You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@druid.apache.org by GitBox <gi...@apache.org> on 2018/07/06 19:08:31 UTC

[GitHub] nishantmonu51 closed pull request #5452: SQL compatible Null Handling Part 2 - Processing Layer and Druid-SQL changes

nishantmonu51 closed pull request #5452: SQL compatible Null Handling Part 2 - Processing Layer and Druid-SQL changes
URL: https://github.com/apache/incubator-druid/pull/5452
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.travis.yml b/.travis.yml
index 4ec30507b4f..db4f10ecbc7 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,6 +33,15 @@ matrix:
         - unset _JAVA_OPTIONS
       script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl processing
 
+      # processing module tests with SQL Compatibility enabled
+    - sudo: false
+      env:
+        - NAME="processing module test with SQL Compatibility"
+      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
+      before_script:
+        - unset _JAVA_OPTIONS
+      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -Ddruid.generic.useDefaultValueForNull=false -pl processing
+
       # server module test
     - sudo: false
       env:
@@ -43,6 +52,17 @@ matrix:
       # Server module test is run without the parallel-test option because it's memory sensitive and often fails with that option.
       script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -pl server
 
+      # server module test with SQL Compatibility enabled
+    - sudo: false
+      env:
+        - NAME="server module test with SQL Compatibility enabled"
+      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
+      before_script:
+        - unset _JAVA_OPTIONS
+      # Server module test is run without the parallel-test option because it's memory sensitive and often fails with that option.
+      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -pl server -Ddruid.generic.useDefaultValueForNull=false
+
+
       # other modules test
     - sudo: false
       env:
@@ -53,6 +73,16 @@ matrix:
         - unset _JAVA_OPTIONS
       script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl '!processing,!server'
 
+      # other modules test with SQL Compatibility enabled
+    - sudo: false
+      env:
+        - NAME="other modules test with SQL Compatibility"
+        - AWS_REGION=us-east-1 # set a aws region for unit tests
+      install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
+      before_script:
+        - unset _JAVA_OPTIONS
+      script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -Ddruid.generic.useDefaultValueForNull=false -pl '!processing,!server'
+
       # run integration tests
     - sudo: required
       services:
diff --git a/api/src/main/java/io/druid/data/input/Rows.java b/api/src/main/java/io/druid/data/input/Rows.java
index 0ef09e9fa24..e0fbe1a172b 100644
--- a/api/src/main/java/io/druid/data/input/Rows.java
+++ b/api/src/main/java/io/druid/data/input/Rows.java
@@ -23,9 +23,11 @@
 import com.google.common.collect.ImmutableSortedSet;
 import com.google.common.collect.Maps;
 import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.parsers.ParseException;
 
+import javax.annotation.Nullable;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -92,10 +94,11 @@
    * @throws NullPointerException if the string is null
    * @throws ParseException       if the column cannot be converted to a number
    */
+  @Nullable
   public static Number objectToNumber(final String name, final Object inputValue)
   {
     if (inputValue == null) {
-      return Rows.LONG_ZERO;
+      return NullHandling.defaultLongValue();
     }
 
     if (inputValue instanceof Number) {
diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
index 11ffcf007cb..23aeff2d7f9 100644
--- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
+++ b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java
@@ -21,12 +21,12 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.io.Files;
 import io.druid.benchmark.datagen.BenchmarkDataGenerator;
 import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
 import io.druid.benchmark.datagen.BenchmarkSchemas;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.hll.HyperLogLogHash;
 import io.druid.jackson.DefaultObjectMapper;
@@ -557,7 +557,7 @@ public Filter toFilter()
       if (extractionFn == null) {
         return new NoBitmapSelectorFilter(dimension, value);
       } else {
-        final String valueOrNull = Strings.emptyToNull(value);
+        final String valueOrNull = NullHandling.emptyToNullIfNeeded(value);
 
         final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
         {
diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml
index 32711f95759..e1b3483a0a2 100644
--- a/codestyle/checkstyle.xml
+++ b/codestyle/checkstyle.xml
@@ -171,5 +171,15 @@
       <property name="illegalPattern" value="true"/>
       <property name="message" value="Use java.lang.Primitive.BYTES instead."/>
     </module>
+    <module name="Regexp">
+      <property name="format" value="Strings.emptyToNull"/>
+      <property name="illegalPattern" value="true"/>
+      <property name="message" value="Use io.druid.common.config.NullHandling.emptyToNullIfNeeded instead"/>
+    </module>
+    <module name="Regexp">
+      <property name="format" value="Strings.nullToEmpty"/>
+      <property name="illegalPattern" value="true"/>
+      <property name="message" value="Use io.druid.common.config.NullHandling.nullToEmptyIfNeeded instead"/>
+    </module>
   </module>
 </module>
diff --git a/common/src/main/java/io/druid/math/expr/Evals.java b/common/src/main/java/io/druid/math/expr/Evals.java
index 2b90b58cddf..51d4b8b35c1 100644
--- a/common/src/main/java/io/druid/math/expr/Evals.java
+++ b/common/src/main/java/io/druid/math/expr/Evals.java
@@ -19,7 +19,7 @@
 
 package io.druid.math.expr;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.logger.Logger;
 
 import java.util.Arrays;
@@ -83,6 +83,6 @@ public static boolean asBoolean(double x)
 
   public static boolean asBoolean(String x)
   {
-    return !Strings.isNullOrEmpty(x) && Boolean.valueOf(x);
+    return !NullHandling.isNullOrEquivalent(x) && Boolean.valueOf(x);
   }
 }
diff --git a/common/src/main/java/io/druid/math/expr/Expr.java b/common/src/main/java/io/druid/math/expr/Expr.java
index 0b1deb18874..b7a59f240d4 100644
--- a/common/src/main/java/io/druid/math/expr/Expr.java
+++ b/common/src/main/java/io/druid/math/expr/Expr.java
@@ -272,6 +272,9 @@ public void visit(Visitor visitor)
   public ExprEval eval(ObjectBinding bindings)
   {
     ExprEval ret = expr.eval(bindings);
+    if (NullHandling.sqlCompatible() && (ret.value() == null)) {
+      return ExprEval.of(null);
+    }
     if (ret.type() == ExprType.LONG) {
       return ExprEval.of(-ret.asLong());
     }
@@ -307,6 +310,9 @@ public String toString()
   public ExprEval eval(ObjectBinding bindings)
   {
     ExprEval ret = expr.eval(bindings);
+    if (NullHandling.sqlCompatible() && (ret.value() == null)) {
+      return ExprEval.of(null);
+    }
     // conforming to other boolean-returning binary operators
     ExprType retType = ret.type() == ExprType.DOUBLE ? ExprType.DOUBLE : ExprType.LONG;
     return ExprEval.of(!ret.asBoolean(), retType);
@@ -365,15 +371,21 @@ public ExprEval eval(ObjectBinding bindings)
 
     // Result of any Binary expressions is null if any of the argument is null.
     // e.g "select null * 2 as c;" or "select null + 1 as c;" will return null as per Standard SQL spec.
-    if (NullHandling.sqlCompatible() && (leftVal.isNull() || rightVal.isNull())) {
+    if (NullHandling.sqlCompatible() && (leftVal.value() == null || rightVal.value() == null)) {
       return ExprEval.of(null);
     }
 
     if (leftVal.type() == ExprType.STRING && rightVal.type() == ExprType.STRING) {
       return evalString(leftVal.asString(), rightVal.asString());
     } else if (leftVal.type() == ExprType.LONG && rightVal.type() == ExprType.LONG) {
+      if (NullHandling.sqlCompatible() && (leftVal.isNumericNull() || rightVal.isNumericNull())) {
+        return ExprEval.of(null);
+      }
       return ExprEval.of(evalLong(leftVal.asLong(), rightVal.asLong()));
     } else {
+      if (NullHandling.sqlCompatible() && (leftVal.isNumericNull() || rightVal.isNumericNull())) {
+        return ExprEval.of(null);
+      }
       return ExprEval.of(evalDouble(leftVal.asDouble(), rightVal.asDouble()));
     }
   }
diff --git a/common/src/main/java/io/druid/math/expr/ExprEval.java b/common/src/main/java/io/druid/math/expr/ExprEval.java
index c5f0f6a1c37..344a5dab030 100644
--- a/common/src/main/java/io/druid/math/expr/ExprEval.java
+++ b/common/src/main/java/io/druid/math/expr/ExprEval.java
@@ -19,9 +19,7 @@
 
 package io.druid.math.expr;
 
-import com.google.common.base.Preconditions;
 import com.google.common.primitives.Doubles;
-import com.google.common.primitives.Ints;
 import io.druid.common.config.NullHandling;
 import io.druid.common.guava.GuavaUtils;
 import io.druid.java.util.common.IAE;
@@ -32,7 +30,7 @@
  */
 public abstract class ExprEval<T>
 {
-  public static ExprEval ofLong(Number longValue)
+  public static ExprEval ofLong(@Nullable Number longValue)
   {
     return new LongExprEval(longValue);
   }
@@ -42,7 +40,7 @@ public static ExprEval of(long longValue)
     return new LongExprEval(longValue);
   }
 
-  public static ExprEval ofDouble(Number doubleValue)
+  public static ExprEval ofDouble(@Nullable Number doubleValue)
   {
     return new DoubleExprEval(doubleValue);
   }
@@ -71,7 +69,7 @@ public static ExprEval of(boolean value, ExprType type)
     }
   }
 
-  public static ExprEval bestEffortOf(Object val)
+  public static ExprEval bestEffortOf(@Nullable Object val)
   {
     if (val instanceof ExprEval) {
       return (ExprEval) val;
@@ -85,6 +83,7 @@ public static ExprEval bestEffortOf(Object val)
     return new StringExprEval(val == null ? null : String.valueOf(val));
   }
 
+  @Nullable
   final T value;
 
   private ExprEval(T value)
@@ -99,10 +98,10 @@ public Object value()
     return value;
   }
 
-  public boolean isNull()
-  {
-    return value == null;
-  }
+  /**
+   * returns true if numeric primitive value for this ExprEval is null, otherwise false.
+   */
+  public abstract boolean isNumericNull();
 
   public abstract int asInt();
 
@@ -125,7 +124,7 @@ public String asString()
   private abstract static class NumericExprEval extends ExprEval<Number>
   {
 
-    private NumericExprEval(Number value)
+    private NumericExprEval(@Nullable Number value)
     {
       super(value);
     }
@@ -147,13 +146,19 @@ public final double asDouble()
     {
       return value.doubleValue();
     }
+
+    @Override
+    public boolean isNumericNull()
+    {
+      return value == null;
+    }
   }
 
   private static class DoubleExprEval extends NumericExprEval
   {
-    private DoubleExprEval(Number value)
+    private DoubleExprEval(@Nullable Number value)
     {
-      super(Preconditions.checkNotNull(value, "value"));
+      super(value == null ? NullHandling.defaultDoubleValue() : value);
     }
 
     @Override
@@ -175,7 +180,7 @@ public final ExprEval castTo(ExprType castTo)
         case DOUBLE:
           return this;
         case LONG:
-          return ExprEval.of(asLong());
+          return ExprEval.of(value == null ? null : asLong());
         case STRING:
           return ExprEval.of(asString());
       }
@@ -191,9 +196,9 @@ public Expr toExpr()
 
   private static class LongExprEval extends NumericExprEval
   {
-    private LongExprEval(Number value)
+    private LongExprEval(@Nullable Number value)
     {
-      super(Preconditions.checkNotNull(value, "value"));
+      super(value == null ? NullHandling.defaultLongValue() : value);
     }
 
     @Override
@@ -213,7 +218,7 @@ public final ExprEval castTo(ExprType castTo)
     {
       switch (castTo) {
         case DOUBLE:
-          return ExprEval.of(asDouble());
+          return ExprEval.of(value == null ? null : asDouble());
         case LONG:
           return this;
         case STRING:
@@ -231,6 +236,8 @@ public Expr toExpr()
 
   private static class StringExprEval extends ExprEval<String>
   {
+    private Number numericVal;
+
     private StringExprEval(@Nullable String value)
     {
       super(NullHandling.emptyToNullIfNeeded(value));
@@ -245,36 +252,63 @@ public final ExprType type()
     @Override
     public final int asInt()
     {
-      if (value == null) {
+      Number number = asNumber();
+      if (number == null) {
         assert NullHandling.replaceWithDefault();
         return 0;
       }
-
-      final Integer theInt = Ints.tryParse(value);
-      assert NullHandling.replaceWithDefault() || theInt != null;
-      return theInt == null ? 0 : theInt;
+      return number.intValue();
     }
 
     @Override
     public final long asLong()
     {
-      // GuavaUtils.tryParseLong handles nulls, no need for special null handling here.
-      final Long theLong = GuavaUtils.tryParseLong(value);
-      assert NullHandling.replaceWithDefault() || theLong != null;
-      return theLong == null ? 0L : theLong;
+      Number number = asNumber();
+      if (number == null) {
+        assert NullHandling.replaceWithDefault();
+        return 0L;
+      }
+      return number.longValue();
     }
 
     @Override
     public final double asDouble()
     {
-      if (value == null) {
+      Number number = asNumber();
+      if (number == null) {
         assert NullHandling.replaceWithDefault();
-        return 0.0;
+        return 0.0d;
+      }
+      return number.doubleValue();
+    }
+
+    @Nullable
+    private Number asNumber()
+    {
+      if (value == null) {
+        return null;
       }
+      if (numericVal != null) {
+        // Optimization for non-null case.
+        return numericVal;
+      }
+      Number rv;
+      Long v = GuavaUtils.tryParseLong(value);
+      // Do NOT use ternary operator here, because it makes Java to convert Long to Double
+      if (v != null) {
+        rv = v;
+      } else {
+        rv = Doubles.tryParse(value);
+      }
+
+      numericVal = rv;
+      return rv;
+    }
 
-      final Double theDouble = Doubles.tryParse(value);
-      assert NullHandling.replaceWithDefault() || theDouble != null;
-      return theDouble == null ? 0.0 : theDouble;
+    @Override
+    public boolean isNumericNull()
+    {
+      return asNumber() == null;
     }
 
     @Override
@@ -288,9 +322,9 @@ public final ExprEval castTo(ExprType castTo)
     {
       switch (castTo) {
         case DOUBLE:
-          return ExprEval.of(asDouble());
+          return ExprEval.ofDouble(asNumber());
         case LONG:
-          return ExprEval.of(asLong());
+          return ExprEval.ofLong(asNumber());
         case STRING:
           return this;
       }
diff --git a/common/src/main/java/io/druid/math/expr/Function.java b/common/src/main/java/io/druid/math/expr/Function.java
index 42b7902ba4b..20a5b21b260 100644
--- a/common/src/main/java/io/druid/math/expr/Function.java
+++ b/common/src/main/java/io/druid/math/expr/Function.java
@@ -74,7 +74,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
     @Override
     protected final ExprEval eval(ExprEval param)
     {
-      if (NullHandling.sqlCompatible() && param.isNull()) {
+      if (NullHandling.sqlCompatible() && param.isNumericNull()) {
         return ExprEval.of(null);
       }
       if (param.type() == ExprType.LONG) {
@@ -796,6 +796,9 @@ public String name()
     @Override
     protected ExprEval eval(ExprEval x, ExprEval y)
     {
+      if (NullHandling.sqlCompatible() && x.value() == null) {
+        return ExprEval.of(null);
+      }
       ExprType castTo;
       try {
         castTo = ExprType.valueOf(StringUtils.toUpperCase(y.asString()));
@@ -880,7 +883,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
         throw new IAE("Function[%s] needs 2 arguments", name());
       }
       final ExprEval eval = args.get(0).eval(bindings);
-      return eval.isNull() ? args.get(1).eval(bindings) : eval;
+      return eval.value() == null ? args.get(1).eval(bindings) : eval;
     }
   }
 
@@ -937,7 +940,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
       }
 
       final String arg = args.get(0).eval(bindings).asString();
-      return arg == null ? ExprEval.of(0) : ExprEval.of(arg.length());
+      return arg == null ? ExprEval.ofLong(NullHandling.defaultLongValue()) : ExprEval.of(arg.length());
     }
   }
 
@@ -1094,7 +1097,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
       }
 
       final ExprEval expr = args.get(0).eval(bindings);
-      return ExprEval.of(expr.isNull(), ExprType.LONG);
+      return ExprEval.of(expr.value() == null, ExprType.LONG);
     }
   }
 
@@ -1114,7 +1117,7 @@ public ExprEval apply(List<Expr> args, Expr.ObjectBinding bindings)
       }
 
       final ExprEval expr = args.get(0).eval(bindings);
-      return ExprEval.of(!expr.isNull(), ExprType.LONG);
+      return ExprEval.of(expr.value() != null, ExprType.LONG);
     }
   }
 }
diff --git a/common/src/test/java/io/druid/math/expr/EvalTest.java b/common/src/test/java/io/druid/math/expr/EvalTest.java
index 800bc5740c2..7910f6a9491 100644
--- a/common/src/test/java/io/druid/math/expr/EvalTest.java
+++ b/common/src/test/java/io/druid/math/expr/EvalTest.java
@@ -140,11 +140,10 @@ public void testLongEval()
     Assert.assertEquals(1271055781L, evalLong("unix_timestamp('2010-04-12T07:03:01')", bindings));
     Assert.assertEquals(1271023381L, evalLong("unix_timestamp('2010-04-12T07:03:01+09:00')", bindings));
     Assert.assertEquals(1271023381L, evalLong("unix_timestamp('2010-04-12T07:03:01.419+09:00')", bindings));
-    if (NullHandling.replaceWithDefault()) {
-      Assert.assertEquals("NULL", eval("nvl(if(x == 9223372036854775807, '', 'x'), 'NULL')", bindings).asString());
-    } else {
-      Assert.assertEquals("", eval("nvl(if(x == 9223372036854775807, '', 'x'), 'NULL')", bindings).asString());
-    }
+    Assert.assertEquals(
+        NullHandling.replaceWithDefault() ? "NULL" : "",
+        eval("nvl(if(x == 9223372036854775807, '', 'x'), 'NULL')", bindings).asString()
+    );
     Assert.assertEquals("x", eval("nvl(if(x == 9223372036854775806, '', 'x'), 'NULL')", bindings).asString());
   }
 
diff --git a/common/src/test/java/io/druid/math/expr/FunctionTest.java b/common/src/test/java/io/druid/math/expr/FunctionTest.java
index 9ea6efb2c6c..89b997a116f 100644
--- a/common/src/test/java/io/druid/math/expr/FunctionTest.java
+++ b/common/src/test/java/io/druid/math/expr/FunctionTest.java
@@ -88,7 +88,7 @@ public void testSubstring()
   public void testStrlen()
   {
     assertExpr("strlen(x)", 3L);
-    assertExpr("strlen(nonexistent)", 0L);
+    assertExpr("strlen(nonexistent)", NullHandling.defaultLongValue());
   }
 
   @Test
diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java
index 6ed641d79c9..3703e38b610 100644
--- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java
+++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java
@@ -20,6 +20,7 @@
 package io.druid.query.aggregation.histogram;
 
 import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.query.aggregation.Aggregator;
 import io.druid.segment.BaseFloatColumnValueSelector;
 
@@ -59,7 +60,9 @@ public ApproximateHistogramAggregator(
   @Override
   public void aggregate()
   {
-    histogram.offer(selector.getFloat());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      histogram.offer(selector.getFloat());
+    }
   }
 
   @Override
diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java
index ae5904c8cc8..94a032f2440 100644
--- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java
+++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.aggregation.histogram;
 
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.granularity.Granularities;
 import io.druid.java.util.common.guava.Sequence;
@@ -79,10 +80,14 @@ public void testIngestWithNullsIgnoredAndQuery() throws Exception
   @Test
   public void testIngestWithNullsToZeroAndQuery() throws Exception
   {
-    MapBasedRow row = ingestAndQuery(false);
-    Assert.assertEquals(0.0, row.getMetric("index_min").floatValue(), 0.0001);
-    Assert.assertEquals(135.109191, row.getMetric("index_max").floatValue(), 0.0001);
-    Assert.assertEquals(131.428176, row.getMetric("index_quantile").floatValue(), 0.0001);
+    // Nulls are ignored and not replaced with default for SQL compatible null handling.
+    // This is already tested in testIngestWithNullsIgnoredAndQuery()
+    if (NullHandling.replaceWithDefault()) {
+      MapBasedRow row = ingestAndQuery(false);
+      Assert.assertEquals(0.0F, row.getMetric("index_min"));
+      Assert.assertEquals(135.109191, row.getMetric("index_max").floatValue(), 0.0001);
+      Assert.assertEquals(131.428176, row.getMetric("index_quantile").floatValue(), 0.0001);
+    }
   }
 
   private MapBasedRow ingestAndQuery(boolean ignoreNulls) throws Exception
diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
index e78190cc942..9b82b105ae0 100644
--- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
+++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.granularity.Granularities;
 import io.druid.query.Druids;
 import io.druid.query.QueryDataSource;
@@ -312,9 +313,12 @@ public void testQuantileOnInnerQuery() throws Exception
 
       // Verify results
       final List<Object[]> results = plannerResult.run().toList();
-      final List<Object[]> expectedResults = ImmutableList.of(
-          new Object[]{7.0, 8.26386833190918}
-      );
+      final List<Object[]> expectedResults;
+      if (NullHandling.replaceWithDefault()) {
+        expectedResults = ImmutableList.of(new Object[]{7.0, 8.26386833190918});
+      } else {
+        expectedResults = ImmutableList.of(new Object[]{5.25, 6.59091854095459});
+      }
       Assert.assertEquals(expectedResults.size(), results.size());
       for (int i = 0; i < expectedResults.size(); i++) {
         Assert.assertArrayEquals(expectedResults.get(i), results.get(i));
diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
index f76788d4984..6cc099e1045 100644
--- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
+++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java
@@ -117,6 +117,7 @@
 import io.druid.query.timeseries.TimeseriesQueryQueryToolChest;
 import io.druid.query.timeseries.TimeseriesQueryRunnerFactory;
 import io.druid.query.timeseries.TimeseriesResultValue;
+import io.druid.segment.DimensionHandlerUtils;
 import io.druid.segment.IndexIO;
 import io.druid.segment.QueryableIndex;
 import io.druid.segment.TestHelper;
@@ -2212,7 +2213,7 @@ public long countEvents(final Task task)
     List<Result<TimeseriesResultValue>> results =
         task.getQueryRunner(query).run(wrap(query), ImmutableMap.of()).toList();
 
-    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric("rows");
+    return results.isEmpty() ? 0L : DimensionHandlerUtils.nullToZero(results.get(0).getValue().getLongMetric("rows"));
   }
 
   private static byte[] JB(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1)
diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java
index ff0a93c4da2..34036822e6f 100644
--- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java
+++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java
@@ -35,6 +35,7 @@
 import com.google.common.collect.ImmutableMap;
 import io.druid.guice.annotations.Json;
 import io.druid.java.util.common.IAE;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.UOE;
 import io.druid.java.util.common.jackson.JacksonUtils;
 import io.druid.java.util.common.parsers.CSVParser;
@@ -396,8 +397,8 @@ public TSVFlatDataParser(
           "Must specify more than one column to have a key value pair"
       );
       final DelimitedParser delegate = new DelimitedParser(
-          Strings.emptyToNull(delimiter),
-          Strings.emptyToNull(listDelimiter),
+          StringUtils.emptyToNullNonDruidDataString(delimiter),
+          StringUtils.emptyToNullNonDruidDataString(listDelimiter),
           hasHeaderRow,
           skipHeaderRows
       );
diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
index f44385bce06..132320e9c0b 100644
--- a/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
+++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
@@ -19,15 +19,15 @@
 
 package io.druid.server.lookup.namespace.cache;
 
-import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
-import io.druid.java.util.common.StringUtils;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.concurrent.Execs;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.io.Closer;
 import io.druid.java.util.common.lifecycle.Lifecycle;
 import io.druid.java.util.common.logger.Logger;
@@ -383,7 +383,7 @@ public void testMappingWithoutFilter()
         String key = e.getKey();
         String[] val = e.getValue();
         String field = val[0];
-        Assert.assertEquals("non-null check", Strings.emptyToNull(field), Strings.emptyToNull(map.get(key)));
+        Assert.assertEquals("non-null check", NullHandling.emptyToNullIfNeeded(field), NullHandling.emptyToNullIfNeeded(map.get(key)));
       }
       Assert.assertEquals("null check", null, map.get("baz"));
     }
@@ -413,9 +413,9 @@ public void testMappingWithFilter()
         String filterVal = val[1];
 
         if (filterVal.equals("1")) {
-          Assert.assertEquals("non-null check", Strings.emptyToNull(field), Strings.emptyToNull(map.get(key)));
+          Assert.assertEquals("non-null check", NullHandling.emptyToNullIfNeeded(field), NullHandling.emptyToNullIfNeeded(map.get(key)));
         } else {
-          Assert.assertEquals("non-null check", null, Strings.emptyToNull(map.get(key)));
+          Assert.assertEquals("non-null check", null, NullHandling.emptyToNullIfNeeded(map.get(key)));
         }
       }
     }
diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java
index f924324a9c0..9d6d93b8efd 100644
--- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java
+++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java
@@ -21,11 +21,12 @@
 
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.logger.Logger;
 import io.druid.query.lookup.LookupExtractor;
 import io.druid.server.lookup.cache.loading.LoadingCache;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.Callable;
@@ -62,15 +63,19 @@ public LoadingLookup(
 
 
   @Override
-  public String apply(final String key)
+  public String apply(@Nullable final String key)
   {
-    if (key == null) {
+    String keyEquivalent = NullHandling.nullToEmptyIfNeeded(key);
+    if (keyEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
       return null;
     }
+
     final String presentVal;
     try {
-      presentVal = loadingCache.get(key, new ApplyCallable(key));
-      return Strings.emptyToNull(presentVal);
+      presentVal = loadingCache.get(keyEquivalent, new ApplyCallable(keyEquivalent));
+      return NullHandling.emptyToNullIfNeeded(presentVal);
     }
     catch (ExecutionException e) {
       LOGGER.debug("value not found for key [%s]", key);
@@ -79,15 +84,18 @@ public String apply(final String key)
   }
 
   @Override
-  public List<String> unapply(final String value)
+  public List<String> unapply(@Nullable final String value)
   {
-    // null value maps to empty list
-    if (value == null) {
+    String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      // null value maps to empty list when SQL Compatible
       return Collections.EMPTY_LIST;
     }
     final List<String> retList;
     try {
-      retList = reverseLoadingCache.get(value, new UnapplyCallable(value));
+      retList = reverseLoadingCache.get(valueEquivalent, new UnapplyCallable(valueEquivalent));
       return retList;
     }
     catch (ExecutionException e) {
@@ -131,8 +139,9 @@ public ApplyCallable(String key)
     @Override
     public String call()
     {
+      // When SQL compatible null handling is disabled,
       // avoid returning null and return an empty string to cache it.
-      return Strings.nullToEmpty(dataFetcher.fetch(key));
+      return NullHandling.nullToEmptyIfNeeded(dataFetcher.fetch(key));
     }
   }
 
diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java
index 0abc23b8e22..c8df32a4cb7 100644
--- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java
+++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java
@@ -20,11 +20,11 @@
 package io.druid.server.lookup;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningScheduledExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
 
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.concurrent.Execs;
 import io.druid.java.util.common.ISE;
 import io.druid.java.util.common.logger.Logger;
@@ -33,7 +33,8 @@
 import io.druid.server.lookup.cache.polling.PollingCache;
 import io.druid.server.lookup.cache.polling.PollingCacheFactory;
 
-import javax.validation.constraints.NotNull;
+import javax.annotation.Nullable;
+import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
@@ -107,8 +108,15 @@ public void close()
   }
 
   @Override
-  public String apply(@NotNull String key)
+  @Nullable
+  public String apply(@Nullable String key)
   {
+    String keyEquivalent = NullHandling.nullToEmptyIfNeeded(key);
+    if (keyEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      return null;
+    }
     final CacheRefKeeper cacheRefKeeper = refOfCacheKeeper.get();
     if (cacheRefKeeper == null) {
       throw new ISE("Cache reference is null WTF");
@@ -117,9 +125,9 @@ public String apply(@NotNull String key)
     try {
       if (cache == null) {
         // it must've been closed after swapping while I was getting it.  Try again.
-        return this.apply(key);
+        return this.apply(keyEquivalent);
       }
-      return Strings.emptyToNull((String) cache.get(key));
+      return NullHandling.emptyToNullIfNeeded((String) cache.get(keyEquivalent));
     }
     finally {
       if (cacheRefKeeper != null && cache != null) {
@@ -129,8 +137,16 @@ public String apply(@NotNull String key)
   }
 
   @Override
-  public List<String> unapply(final String value)
+  public List<String> unapply(@Nullable final String value)
   {
+    String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueEquivalent == null) {
+      // valueEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      // null value maps to empty list when SQL Compatible
+      return Collections.emptyList();
+    }
+
     CacheRefKeeper cacheRefKeeper = refOfCacheKeeper.get();
     if (cacheRefKeeper == null) {
       throw new ISE("pollingLookup id [%s] is closed", id);
@@ -139,9 +155,9 @@ public String apply(@NotNull String key)
     try {
       if (cache == null) {
         // it must've been closed after swapping while I was getting it.  Try again.
-        return this.unapply(value);
+        return this.unapply(valueEquivalent);
       }
-      return cache.getKeys(value);
+      return cache.getKeys(valueEquivalent);
     }
     finally {
       if (cacheRefKeeper != null && cache != null) {
diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java
index 39e5e33537d..3c2c75e38d4 100644
--- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java
+++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java
@@ -21,8 +21,9 @@
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
+
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
 import io.druid.metadata.MetadataStorageConnectorConfig;
@@ -131,7 +132,7 @@ public String fetch(final String key)
     if (pairs.isEmpty()) {
       return null;
     }
-    return Strings.nullToEmpty(pairs.get(0));
+    return NullHandling.nullToEmptyIfNeeded(pairs.get(0));
   }
 
   @Override
diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java
index 66e864ed625..05dfd478aaf 100644
--- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java
+++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/LoadingLookupTest.java
@@ -22,6 +22,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.server.lookup.cache.loading.LoadingCache;
 import org.easymock.EasyMock;
 import org.junit.Assert;
@@ -43,13 +44,20 @@
   public void testApplyEmptyOrNull()
   {
     Assert.assertEquals(null, loadingLookup.apply(null));
-    Assert.assertEquals(null, loadingLookup.apply(""));
+    if (NullHandling.sqlCompatible()) {
+      // empty string should also have same behavior
+      Assert.assertEquals(null, loadingLookup.apply(""));
+    }
   }
 
   @Test
   public void testUnapplyNull()
   {
-    Assert.assertEquals(Collections.EMPTY_LIST, loadingLookup.unapply(null));
+    if (NullHandling.sqlCompatible()) {
+      Assert.assertEquals(Collections.emptyList(), loadingLookup.unapply(null));
+    } else {
+      Assert.assertEquals(null, loadingLookup.unapply(null));
+    }
   }
 
   @Test
@@ -68,7 +76,10 @@ public void testUnapplyAll() throws ExecutionException
             .andReturn(Lists.newArrayList("key"))
             .once();
     EasyMock.replay(reverseLookupCache);
-    Assert.assertEquals(ImmutableMap.of("value", Lists.newArrayList("key")), loadingLookup.unapplyAll(ImmutableSet.<String>of("value")));
+    Assert.assertEquals(
+        ImmutableMap.of("value", Lists.newArrayList("key")),
+        loadingLookup.unapplyAll(ImmutableSet.<String>of("value"))
+    );
     EasyMock.verify(reverseLookupCache);
   }
 
diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java
index 8a0ce6e4fef..f94b0b77400 100644
--- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java
+++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java
@@ -21,10 +21,11 @@
 
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.Function;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.ISE;
 import io.druid.query.lookup.LookupExtractor;
 import io.druid.server.lookup.cache.polling.OffHeapPollingCache;
@@ -190,7 +191,7 @@ public void testBulkApply()
       public String apply(String input)
       {
         //make sure to rewrite null strings as empty.
-        return Strings.nullToEmpty(input);
+        return NullHandling.nullToEmptyIfNeeded(input);
       }
     }));
   }
@@ -207,7 +208,7 @@ private void assertMapLookup(Map<String, String> map, LookupExtractor lookup)
     for (Map.Entry<String, String> entry : map.entrySet()) {
       String key = entry.getKey();
       String val = entry.getValue();
-      Assert.assertEquals("non-null check", Strings.emptyToNull(val), lookup.apply(key));
+      Assert.assertEquals("non-null check", NullHandling.emptyToNullIfNeeded(val), lookup.apply(key));
     }
   }
 }
diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java
index f80c4c68a58..3c7aa188691 100644
--- a/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java
+++ b/indexing-hadoop/src/main/java/io/druid/indexer/InputRowSerde.java
@@ -26,6 +26,7 @@
 import com.google.common.io.ByteArrayDataOutput;
 import com.google.common.io.ByteStreams;
 
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.data.input.Rows;
@@ -45,6 +46,7 @@
 import io.druid.segment.serde.ComplexMetrics;
 import org.apache.hadoop.io.WritableUtils;
 
+import javax.annotation.Nullable;
 import java.io.DataInput;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -330,18 +332,22 @@ public static final SerializeResult toBytes(
           }
 
           String t = aggFactory.getTypeName();
-
-          if (t.equals("float")) {
-            out.writeFloat(agg.getFloat());
-          } else if (t.equals("long")) {
-            WritableUtils.writeVLong(out, agg.getLong());
-          } else if (t.equals("double")) {
-            out.writeDouble(agg.getDouble());
+          if (agg.isNull()) {
+            out.writeByte(NullHandling.IS_NULL_BYTE);
           } else {
-            //its a complex metric
-            Object val = agg.get();
-            ComplexMetricSerde serde = getComplexMetricSerde(t);
-            writeBytes(serde.toBytes(val), out);
+            out.writeByte(NullHandling.IS_NOT_NULL_BYTE);
+            if (t.equals("float")) {
+              out.writeFloat(agg.getFloat());
+            } else if (t.equals("long")) {
+              WritableUtils.writeVLong(out, agg.getLong());
+            } else if (t.equals("double")) {
+              out.writeDouble(agg.getDouble());
+            } else {
+              //its a complex metric
+              Object val = agg.get();
+              ComplexMetricSerde serde = getComplexMetricSerde(t);
+              writeBytes(serde.toBytes(val), out);
+            }
           }
         }
       }
@@ -353,10 +359,13 @@ public static final SerializeResult toBytes(
     }
   }
 
-  private static void writeBytes(byte[] value, ByteArrayDataOutput out) throws IOException
+  private static void writeBytes(@Nullable byte[] value, ByteArrayDataOutput out) throws IOException
   {
-    WritableUtils.writeVInt(out, value.length);
-    out.write(value, 0, value.length);
+    int length = value == null ? -1 : value.length;
+    WritableUtils.writeVInt(out, length);
+    if (value != null) {
+      out.write(value, 0, value.length);
+    }
   }
 
   private static void writeString(String value, ByteArrayDataOutput out) throws IOException
@@ -450,6 +459,11 @@ public static final InputRow fromBytes(
       for (int i = 0; i < metricSize; i++) {
         String metric = readString(in);
         String type = getType(metric, aggs, i);
+        byte metricNullability = in.readByte();
+        if (metricNullability == NullHandling.IS_NULL_BYTE) {
+          // metric value is null.
+          continue;
+        }
         if (type.equals("float")) {
           event.put(metric, in.readFloat());
         } else if (type.equals("long")) {
diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java
index 70dbeaebe0f..781a3e67133 100644
--- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java
+++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java
@@ -295,8 +295,8 @@ public static void injectSystemProperties(Job job)
 
   public static void injectDruidProperties(Configuration configuration, List<String> listOfAllowedPrefix)
   {
-    String mapJavaOpts = Strings.nullToEmpty(configuration.get(MRJobConfig.MAP_JAVA_OPTS));
-    String reduceJavaOpts = Strings.nullToEmpty(configuration.get(MRJobConfig.REDUCE_JAVA_OPTS));
+    String mapJavaOpts = StringUtils.nullToEmptyNonDruidDataString(configuration.get(MRJobConfig.MAP_JAVA_OPTS));
+    String reduceJavaOpts = StringUtils.nullToEmptyNonDruidDataString(configuration.get(MRJobConfig.REDUCE_JAVA_OPTS));
 
     for (String propName : System.getProperties().stringPropertyNames()) {
       for (String prefix : listOfAllowedPrefix) {
diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java
index 9bb16aca596..2642c8598af 100644
--- a/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java
+++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/StaticPathSpec.java
@@ -21,11 +21,11 @@
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Sets;
 import io.druid.indexer.HadoopDruidIndexerConfig;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -126,7 +126,9 @@ public static void addToMultipleInputs(
   private static void addInputPath(Job job, Iterable<String> pathStrings, Class<? extends InputFormat> inputFormatClass)
   {
     Configuration conf = job.getConfiguration();
-    StringBuilder inputFormats = new StringBuilder(Strings.nullToEmpty(conf.get(MultipleInputs.DIR_FORMATS)));
+    StringBuilder inputFormats = new StringBuilder(
+        StringUtils.nullToEmptyNonDruidDataString(conf.get(MultipleInputs.DIR_FORMATS))
+    );
 
     String[] paths = Iterables.toArray(pathStrings, String.class);
     for (int i = 0; i < paths.length - 1; i++) {
diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java
index 0b72d31a71a..578be78d525 100644
--- a/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java
+++ b/indexing-hadoop/src/test/java/io/druid/indexer/InputRowSerdeTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.data.input.impl.DimensionsSpec;
@@ -83,6 +84,7 @@ public void testSerde()
   {
     // Prepare the mocks & set close() call count expectation to 1
     final Aggregator mockedAggregator = EasyMock.createMock(DoubleSumAggregator.class);
+    EasyMock.expect(mockedAggregator.isNull()).andReturn(false).times(1);
     EasyMock.expect(mockedAggregator.getDouble()).andReturn(0d).times(1);
     mockedAggregator.aggregate();
     EasyMock.expectLastCall().times(1);
@@ -90,6 +92,26 @@ public void testSerde()
     EasyMock.expectLastCall().times(1);
     EasyMock.replay(mockedAggregator);
 
+    final Aggregator mockedNullAggregator = EasyMock.createMock(DoubleSumAggregator.class);
+    EasyMock.expect(mockedNullAggregator.isNull()).andReturn(true).times(1);
+    mockedNullAggregator.aggregate();
+    EasyMock.expectLastCall().times(1);
+    mockedNullAggregator.close();
+    EasyMock.expectLastCall().times(1);
+    EasyMock.replay(mockedNullAggregator);
+
+    final AggregatorFactory mockedAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
+    EasyMock.expect(mockedAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedAggregator);
+    EasyMock.expect(mockedAggregatorFactory.getTypeName()).andReturn("double").anyTimes();
+    EasyMock.expect(mockedAggregatorFactory.getName()).andReturn("mockedAggregator").anyTimes();
+
+    final AggregatorFactory mockedNullAggregatorFactory = EasyMock.createMock(AggregatorFactory.class);
+    EasyMock.expect(mockedNullAggregatorFactory.factorize(EasyMock.anyObject(ColumnSelectorFactory.class))).andReturn(mockedNullAggregator);
+    EasyMock.expect(mockedNullAggregatorFactory.getName()).andReturn("mockedNullAggregator").anyTimes();
+    EasyMock.expect(mockedNullAggregatorFactory.getTypeName()).andReturn("double").anyTimes();
+
+    EasyMock.replay(mockedAggregatorFactory, mockedNullAggregatorFactory);
+
     InputRow in = new MapBasedInputRow(
         timestamp,
         dims,
@@ -102,13 +124,8 @@ public void testSerde()
         new LongSumAggregatorFactory("m2out", "m2"),
         new HyperUniquesAggregatorFactory("m3out", "m3"),
         new LongSumAggregatorFactory("unparseable", "m3"), // Unparseable from String to Long
-        new DoubleSumAggregatorFactory("mockedAggregator", "m4") {
-          @Override
-          public Aggregator factorize(ColumnSelectorFactory metricFactory)
-          {
-            return mockedAggregator;
-          }
-        }
+        mockedAggregatorFactory,
+        mockedNullAggregatorFactory
     };
 
     DimensionsSpec dimensionsSpec = new DimensionsSpec(
@@ -136,13 +153,14 @@ public Aggregator factorize(ColumnSelectorFactory metricFactory)
     Assert.assertEquals(300.1f, out.getRaw("d4"));
     Assert.assertEquals(400.5d, out.getRaw("d5"));
 
-    Assert.assertEquals(0.0f, out.getMetric("agg_non_existing").floatValue(), 0.00001);
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), out.getMetric("agg_non_existing"));
     Assert.assertEquals(5.0f, out.getMetric("m1out").floatValue(), 0.00001);
     Assert.assertEquals(100L, out.getMetric("m2out"));
     Assert.assertEquals(1, ((HyperLogLogCollector) out.getRaw("m3out")).estimateCardinality(), 0.001);
     Assert.assertEquals(0L, out.getMetric("unparseable"));
 
     EasyMock.verify(mockedAggregator);
+    EasyMock.verify(mockedNullAggregator);
   }
 
   @Test
diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java
index c8961157eff..416217e68ec 100644
--- a/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java
+++ b/indexing-service/src/test/java/io/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java
@@ -31,6 +31,7 @@
 import com.google.common.util.concurrent.MoreExecutors;
 import io.druid.client.cache.CacheConfig;
 import io.druid.client.cache.MapCache;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Firehose;
 import io.druid.data.input.FirehoseFactory;
 import io.druid.data.input.InputRow;
@@ -131,6 +132,7 @@
 import io.druid.timeline.DataSegment;
 import io.druid.timeline.partition.LinearShardSpec;
 import io.druid.timeline.partition.NumberedShardSpec;
+import io.druid.utils.Runnables;
 import org.apache.commons.io.FileUtils;
 import org.easymock.EasyMock;
 import org.joda.time.DateTime;
@@ -223,7 +225,7 @@ public InputRow nextRow()
     @Override
     public Runnable commit()
     {
-      return () -> {};
+      return Runnables.getNoopRunnable();
     }
 
     @Override
@@ -331,7 +333,8 @@ public void testHandoffTimeout() throws Exception
 
     // handoff would timeout, resulting in exception
     TaskStatus status = statusFuture.get();
-    Assert.assertTrue(status.getErrorMsg().contains("java.util.concurrent.TimeoutException: Timeout waiting for task."));
+    Assert.assertTrue(status.getErrorMsg()
+                            .contains("java.util.concurrent.TimeoutException: Timeout waiting for task."));
   }
 
   @Test(timeout = 60_000L)
@@ -367,8 +370,8 @@ public void testBasics() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -429,8 +432,8 @@ public void testLateData() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -494,8 +497,8 @@ public void testMaxRowsPerSegment() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2000, sumMetric(task, null, "rows"));
-    Assert.assertEquals(2000, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2000, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(2000, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -562,10 +565,14 @@ public void testTransformSpec() throws Exception
     Assert.assertEquals(0, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(2, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows"));
-    Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(2, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows").longValue());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "metric1").longValue());
+    } else {
+      Assert.assertNull(sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "metric1"));
+    }
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -620,7 +627,8 @@ public void testReportParseExceptionsOnBadMetric() throws Exception
 
     // Wait for the task to finish.
     TaskStatus status = statusFuture.get();
-    Assert.assertTrue(status.getErrorMsg().contains("java.lang.RuntimeException: Max parse exceptions exceeded, terminating task..."));
+    Assert.assertTrue(status.getErrorMsg()
+                            .contains("java.lang.RuntimeException: Max parse exceptions exceeded, terminating task..."));
 
     IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
 
@@ -639,7 +647,15 @@ public void testNoReportParseExceptions() throws Exception
   {
     expectPublishedSegments(1);
 
-    final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(null, TransformSpec.NONE, false, 0, true, null, 1);
+    final AppenderatorDriverRealtimeIndexTask task = makeRealtimeTask(
+        null,
+        TransformSpec.NONE,
+        false,
+        0,
+        true,
+        null,
+        1
+    );
     final ListenableFuture<TaskStatus> statusFuture = runTask(task);
 
     // Wait for firehose to show up, it starts off null.
@@ -683,8 +699,8 @@ public void testNoReportParseExceptions() throws Exception
     Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(3, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(3, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -750,7 +766,18 @@ public void testMultipleParseExceptionsSuccess() throws Exception
             ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "foo"),
 
             // Bad long dim- will count as processed, but bad dims will get default values
-            ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "dimLong", "notnumber", "dimFloat", "notnumber", "met1", "foo"),
+            ImmutableMap.of(
+                "t",
+                1521251960729L,
+                "dim1",
+                "foo",
+                "dimLong",
+                "notnumber",
+                "dimFloat",
+                "notnumber",
+                "met1",
+                "foo"
+            ),
 
             // Bad row- will be unparseable.
             ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"),
@@ -775,8 +802,8 @@ public void testMultipleParseExceptionsSuccess() throws Exception
     Assert.assertEquals(2, task.getRowIngestionMeters().getUnparseable());
 
     // Do some queries.
-    Assert.assertEquals(4, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(4, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     awaitHandoffs();
 
@@ -852,7 +879,18 @@ public void testMultipleParseExceptionsFailure() throws Exception
             ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "met1", "foo"),
 
             // Bad long dim- will count as processed, but bad dims will get default values
-            ImmutableMap.of("t", 1521251960729L, "dim1", "foo", "dimLong", "notnumber", "dimFloat", "notnumber", "met1", "foo"),
+            ImmutableMap.of(
+                "t",
+                1521251960729L,
+                "dim1",
+                "foo",
+                "dimLong",
+                "notnumber",
+                "dimFloat",
+                "notnumber",
+                "met1",
+                "foo"
+            ),
 
             // Bad row- will be unparseable.
             ImmutableMap.of("dim1", "foo", "met1", 2.0, FAIL_DIM, "x"),
@@ -943,7 +981,7 @@ public void testRestore() throws Exception
       }
 
       // Do a query, at this point the previous data should be loaded.
-      Assert.assertEquals(1, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task2, null, "rows").longValue());
 
       final TestFirehose firehose = (TestFirehose) task2.getFirehose();
 
@@ -961,7 +999,7 @@ public void testRestore() throws Exception
       publishedSegment = Iterables.getOnlyElement(publishedSegments);
 
       // Do a query.
-      Assert.assertEquals(2, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(2, sumMetric(task2, null, "rows").longValue());
 
       awaitHandoffs();
 
@@ -1018,7 +1056,7 @@ public void testRestoreAfterHandoffAttemptDuringShutdown() throws Exception
       publishedSegment = Iterables.getOnlyElement(publishedSegments);
 
       // Do a query.
-      Assert.assertEquals(1, sumMetric(task1, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task1, null, "rows").longValue());
 
       // Trigger graceful shutdown.
       task1.stopGracefully();
@@ -1137,7 +1175,8 @@ public void testRestoreCorruptData() throws Exception
 
       IngestionStatsAndErrorsTaskReportData reportData = getTaskReportData();
       Assert.assertEquals(expectedMetrics, reportData.getRowStats());
-      Assert.assertTrue(status.getErrorMsg().contains("java.lang.IllegalArgumentException\n\tat java.nio.Buffer.position"));
+      Assert.assertTrue(status.getErrorMsg()
+                              .contains("java.lang.IllegalArgumentException\n\tat java.nio.Buffer.position"));
     }
   }
 
@@ -1466,7 +1505,7 @@ public void close()
     );
   }
 
-  public long sumMetric(final Task task, final DimFilter filter, final String metric)
+  public Long sumMetric(final Task task, final DimFilter filter, final String metric)
   {
     // Do a query.
     TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
@@ -1482,7 +1521,12 @@ public long sumMetric(final Task task, final DimFilter filter, final String metr
 
     List<Result<TimeseriesResultValue>> results =
         task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList();
-    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
+
+    if (results.isEmpty()) {
+      return 0L;
+    } else {
+      return results.get(0).getValue().getLongMetric(metric);
+    }
   }
 
   private IngestionStatsAndErrorsTaskReportData getTaskReportData() throws IOException
diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
index b8448602de3..46d94b17e5c 100644
--- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
+++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java
@@ -32,6 +32,7 @@
 import com.google.common.util.concurrent.MoreExecutors;
 import io.druid.client.cache.CacheConfig;
 import io.druid.client.cache.MapCache;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Firehose;
 import io.druid.data.input.FirehoseFactory;
 import io.druid.data.input.InputRow;
@@ -119,6 +120,7 @@
 import io.druid.server.coordination.DataSegmentServerAnnouncer;
 import io.druid.server.coordination.ServerType;
 import io.druid.timeline.DataSegment;
+import io.druid.utils.Runnables;
 import org.easymock.EasyMock;
 import org.hamcrest.CoreMatchers;
 import org.joda.time.DateTime;
@@ -205,7 +207,7 @@ public InputRow nextRow()
     @Override
     public Runnable commit()
     {
-      return () -> {};
+      return Runnables.getNoopRunnable();
     }
 
     @Override
@@ -351,8 +353,8 @@ public void testBasics() throws Exception
     Assert.assertEquals(0, task.getMetrics().unparseable());
 
     // Do some queries.
-    Assert.assertEquals(2, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(2, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     // Simulate handoff.
     for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -420,10 +422,15 @@ public void testTransformSpec() throws Exception
     Assert.assertEquals(0, task.getMetrics().unparseable());
 
     // Do some queries.
-    Assert.assertEquals(1, sumMetric(task, null, "rows"));
-    Assert.assertEquals(1, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows"));
-    Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows"));
-    Assert.assertEquals(1, sumMetric(task, null, "met1"));
+    Assert.assertEquals(1, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(1, sumMetric(task, new SelectorDimFilter("dim1t", "foofoo", null), "rows").longValue());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0, sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows").longValue());
+    } else {
+      Assert.assertNull(sumMetric(task, new SelectorDimFilter("dim1t", "barbar", null), "rows"));
+
+    }
+    Assert.assertEquals(1, sumMetric(task, null, "met1").longValue());
 
     // Simulate handoff.
     for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -538,8 +545,8 @@ public void testNoReportParseExceptions() throws Exception
     Assert.assertEquals(2, task.getMetrics().unparseable());
 
     // Do some queries.
-    Assert.assertEquals(3, sumMetric(task, null, "rows"));
-    Assert.assertEquals(3, sumMetric(task, null, "met1"));
+    Assert.assertEquals(3, sumMetric(task, null, "rows").longValue());
+    Assert.assertEquals(3, sumMetric(task, null, "met1").longValue());
 
     // Simulate handoff.
     for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -611,7 +618,7 @@ public void testRestore() throws Exception
       }
 
       // Do a query, at this point the previous data should be loaded.
-      Assert.assertEquals(1, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task2, null, "rows").longValue());
 
       final TestFirehose firehose = (TestFirehose) task2.getFirehose();
 
@@ -632,7 +639,7 @@ public void testRestore() throws Exception
       publishedSegment = Iterables.getOnlyElement(mdc.getPublished());
 
       // Do a query.
-      Assert.assertEquals(2, sumMetric(task2, null, "rows"));
+      Assert.assertEquals(2, sumMetric(task2, null, "rows").longValue());
 
       // Simulate handoff.
       for (Map.Entry<SegmentDescriptor, Pair<Executor, Runnable>> entry : handOffCallbacks.entrySet()) {
@@ -693,7 +700,7 @@ public void testRestoreAfterHandoffAttemptDuringShutdown() throws Exception
       publishedSegment = Iterables.getOnlyElement(mdc.getPublished());
 
       // Do a query.
-      Assert.assertEquals(1, sumMetric(task1, null, "rows"));
+      Assert.assertEquals(1, sumMetric(task1, null, "rows").longValue());
 
       // Trigger graceful shutdown.
       task1.stopGracefully();
@@ -1081,7 +1088,7 @@ public void close()
     return toolboxFactory.build(task);
   }
 
-  public long sumMetric(final Task task, final DimFilter filter, final String metric)
+  public Long sumMetric(final Task task, final DimFilter filter, final String metric)
   {
     // Do a query.
     TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
@@ -1097,6 +1104,10 @@ public long sumMetric(final Task task, final DimFilter filter, final String metr
 
     List<Result<TimeseriesResultValue>> results =
         task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList();
-    return results.isEmpty() ? 0 : results.get(0).getValue().getLongMetric(metric);
+    if (results.isEmpty()) {
+      return 0L;
+    } else {
+      return results.get(0).getValue().getLongMetric(metric);
+    }
   }
 }
diff --git a/java-util/pom.xml b/java-util/pom.xml
index 62360df1c56..be831561ef6 100644
--- a/java-util/pom.xml
+++ b/java-util/pom.xml
@@ -53,6 +53,10 @@
             <groupId>org.skife.config</groupId>
             <artifactId>config-magic</artifactId>
         </dependency>
+        <dependency>
+            <groupId>com.google.inject</groupId>
+            <artifactId>guice</artifactId>
+        </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
diff --git a/common/src/main/java/io/druid/common/config/NullHandling.java b/java-util/src/main/java/io/druid/common/config/NullHandling.java
similarity index 96%
rename from common/src/main/java/io/druid/common/config/NullHandling.java
rename to java-util/src/main/java/io/druid/common/config/NullHandling.java
index 762f8213214..af5acfbbcf9 100644
--- a/common/src/main/java/io/druid/common/config/NullHandling.java
+++ b/java-util/src/main/java/io/druid/common/config/NullHandling.java
@@ -41,12 +41,13 @@
   public static final Double ZERO_DOUBLE = 0.0d;
   public static final Float ZERO_FLOAT = 0.0f;
   public static final Long ZERO_LONG = 0L;
+  public static final byte IS_NULL_BYTE = (byte) 1;
+  public static final byte IS_NOT_NULL_BYTE = (byte) 0;
 
   /**
    * INSTANCE is injected using static injection to avoid adding JacksonInject annotations all over the code.
    * See io.druid.guice.NullHandlingModule for details.
    * It does not take effect in all unit tests since we don't use Guice Injection.
-   * For tests default system property is supposed to be used only in tests
    */
   @Inject
   private static NullValueHandlingConfig INSTANCE = new NullValueHandlingConfig(
diff --git a/common/src/main/java/io/druid/common/config/NullValueHandlingConfig.java b/java-util/src/main/java/io/druid/common/config/NullValueHandlingConfig.java
similarity index 100%
rename from common/src/main/java/io/druid/common/config/NullValueHandlingConfig.java
rename to java-util/src/main/java/io/druid/common/config/NullValueHandlingConfig.java
diff --git a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java
index 16830e1b20f..ad025a5d73d 100644
--- a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java
+++ b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java
@@ -19,6 +19,7 @@
 
 package io.druid.java.util.common;
 
+import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 
 import javax.annotation.Nullable;
@@ -172,4 +173,36 @@ private static String removeChar(String s, char c, int firstOccurranceIndex)
     }
     return sb.toString();
   }
+
+  /**
+   * Returns the given string if it is non-null; the empty string otherwise.
+   * This method should only be used at places where null to empty conversion is
+   * irrelevant to null handling of the data.
+   *
+   * @param string the string to test and possibly return
+   * @return {@code string} itself if it is non-null; {@code ""} if it is null
+   */
+  public static String nullToEmptyNonDruidDataString(@Nullable String string)
+  {
+    //CHECKSTYLE.OFF: Regexp
+    return Strings.nullToEmpty(string);
+    //CHECKSTYLE.ON: Regexp
+  }
+
+  /**
+   * Returns the given string if it is nonempty; {@code null} otherwise.
+   * This method should only be used at places where null to empty conversion is
+   * irrelevant to null handling of the data.
+   *
+   * @param string the string to test and possibly return
+   * @return {@code string} itself if it is nonempty; {@code null} if it is
+   *     empty or null
+   */
+  @Nullable
+  public static String emptyToNullNonDruidDataString(@Nullable String string)
+  {
+    //CHECKSTYLE.OFF: Regexp
+    return Strings.emptyToNull(string);
+    //CHECKSTYLE.ON: Regexp
+  }
 }
diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java
index e3975ca15d4..bd062c41b9e 100644
--- a/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java
+++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java
@@ -21,8 +21,8 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import org.joda.time.DateTimeZone;
 
@@ -60,10 +60,10 @@
     return (input) -> {
       if (input != null && input.contains(listDelimiter)) {
         return StreamSupport.stream(listSplitter.split(input).spliterator(), false)
-                            .map(Strings::emptyToNull)
+                            .map(NullHandling::emptyToNullIfNeeded)
                             .collect(Collectors.toList());
       } else {
-        return Strings.emptyToNull(input);
+        return NullHandling.emptyToNullIfNeeded(input);
       }
     };
   }
diff --git a/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java b/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java
index 235728bc3c3..712ba9b9830 100644
--- a/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java
+++ b/java-util/src/main/java/io/druid/java/util/http/client/NettyHttpClient.java
@@ -20,7 +20,6 @@
 package io.druid.java.util.http.client;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Multimap;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
@@ -140,8 +139,7 @@ public void stop()
     } else {
       channel = channelFuture.getChannel();
     }
-
-    final String urlFile = Strings.nullToEmpty(url.getFile());
+    final String urlFile = StringUtils.nullToEmptyNonDruidDataString(url.getFile());
     final HttpRequest httpRequest = new DefaultHttpRequest(
         HttpVersion.HTTP_1_1,
         method,
diff --git a/processing/src/main/java/io/druid/guice/GuiceInjectors.java b/processing/src/main/java/io/druid/guice/GuiceInjectors.java
index f3bac2362ce..8adaae4dd25 100644
--- a/processing/src/main/java/io/druid/guice/GuiceInjectors.java
+++ b/processing/src/main/java/io/druid/guice/GuiceInjectors.java
@@ -42,6 +42,7 @@
         new JacksonModule(),
         new PropertiesModule(Arrays.asList("common.runtime.properties", "runtime.properties")),
         new ConfigModule(),
+        new NullHandlingModule(),
         new Module()
         {
           @Override
diff --git a/processing/src/main/java/io/druid/guice/NullHandlingModule.java b/processing/src/main/java/io/druid/guice/NullHandlingModule.java
new file mode 100644
index 00000000000..b552cb11b53
--- /dev/null
+++ b/processing/src/main/java/io/druid/guice/NullHandlingModule.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.guice;
+
+import com.google.inject.Binder;
+import com.google.inject.Module;
+import io.druid.common.config.NullHandling;
+import io.druid.common.config.NullValueHandlingConfig;
+
+/**
+ */
+public class NullHandlingModule implements Module
+{
+  @Override
+  public void configure(Binder binder)
+  {
+    JsonConfigProvider.bind(binder, "druid.generic", NullValueHandlingConfig.class);
+    binder.requestStaticInjection(NullHandling.class);
+    binder.requestStaticInjection(NullHandling.class);
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java b/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java
index 8e13ae5bead..e82a8c80a35 100644
--- a/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java
+++ b/processing/src/main/java/io/druid/query/DefaultQueryMetrics.java
@@ -21,11 +21,11 @@
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableMap;
+import io.druid.collections.bitmap.BitmapFactory;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.java.util.emitter.service.ServiceMetricEvent;
-import io.druid.collections.bitmap.BitmapFactory;
 import io.druid.query.filter.Filter;
 import org.joda.time.Interval;
 
@@ -118,7 +118,7 @@ public void duration(QueryType query)
   @Override
   public void queryId(QueryType query)
   {
-    setDimension(DruidMetrics.ID, Strings.nullToEmpty(query.getId()));
+    setDimension(DruidMetrics.ID, StringUtils.nullToEmptyNonDruidDataString(query.getId()));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java
index 797f4484383..8330a8d2dd1 100644
--- a/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java
+++ b/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java
@@ -19,6 +19,7 @@
 
 package io.druid.query.aggregation;
 
+import io.druid.guice.annotations.ExtensionPoint;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.ColumnValueSelector;
 
@@ -39,6 +40,7 @@
  * @see DoubleAggregateCombiner
  * @see ObjectAggregateCombiner
  */
+@ExtensionPoint
 public interface AggregateCombiner<T> extends ColumnValueSelector<T>
 {
   /**
diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java
index ccd88442c81..9ca278414ec 100644
--- a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java
@@ -36,6 +36,8 @@
  * AggregatorFactory is a strategy (in the terms of Design Patterns) that represents column aggregation, e. g. min,
  * max, sum of metric columns, or cardinality of dimension columns (see {@link
  * io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory}).
+ * Implementations of {@link AggregatorFactory} which need to Support Nullable Aggregations are encouraged
+ * to extend {@link NullableAggregatorFactory}.
  */
 @ExtensionPoint
 public abstract class AggregatorFactory implements Cacheable
@@ -59,7 +61,8 @@
    *
    * @return an object representing the combination of lhs and rhs, this can be a new object or a mutation of the inputs
    */
-  public abstract Object combine(Object lhs, Object rhs);
+  @Nullable
+  public abstract Object combine(@Nullable Object lhs, @Nullable Object rhs);
 
   /**
    * Creates an AggregateCombiner to fold rollup aggregation results from serveral "rows" of different indexes during
@@ -126,7 +129,8 @@ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws Aggre
    *
    * @return the finalized value that should be returned for the initial query
    */
-  public abstract Object finalizeComputation(Object object);
+  @Nullable
+  public abstract Object finalizeComputation(@Nullable Object object);
 
   public abstract String getName();
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java
index 833eed38560..1bd9a3e1f16 100644
--- a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java
+++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java
@@ -168,7 +168,7 @@ public static BaseFloatColumnValueSelector makeColumnValueSelectorWithFloatDefau
         public float getFloat()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull() ? nullValue : (float) exprEval.asDouble();
+          return exprEval.isNumericNull() ? nullValue : (float) exprEval.asDouble();
         }
 
         @Override
@@ -181,7 +181,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
         public boolean isNull()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull();
+          return exprEval == null || exprEval.isNumericNull();
         }
       }
       return new ExpressionFloatColumnSelector();
@@ -209,7 +209,7 @@ public static BaseLongColumnValueSelector makeColumnValueSelectorWithLongDefault
         public long getLong()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull() ? nullValue : exprEval.asLong();
+          return exprEval.isNumericNull() ? nullValue : exprEval.asLong();
         }
 
         @Override
@@ -222,7 +222,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
         public boolean isNull()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull();
+          return exprEval == null || exprEval.isNumericNull();
         }
       }
       return new ExpressionLongColumnSelector();
@@ -250,7 +250,7 @@ public static BaseDoubleColumnValueSelector makeColumnValueSelectorWithDoubleDef
         public double getDouble()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull() ? nullValue : exprEval.asDouble();
+          return exprEval.isNumericNull() ? nullValue : exprEval.asDouble();
         }
 
         @Override
@@ -263,7 +263,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
         public boolean isNull()
         {
           final ExprEval exprEval = baseSelector.getObject();
-          return exprEval.isNull();
+          return exprEval == null || exprEval.isNumericNull();
         }
       }
       return new ExpressionDoubleColumnSelector();
diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java
index 1031213bd5f..6a08c248d0e 100644
--- a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public DoubleMaxAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseDoubleColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new DoubleMaxAggregator(getDoubleColumnSelector(metricFactory, Double.NEGATIVE_INFINITY));
+    return getDoubleColumnSelector(
+        metricFactory,
+        Double.NEGATIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
-    return new DoubleMaxBufferAggregator(getDoubleColumnSelector(metricFactory, Double.NEGATIVE_INFINITY));
+    return new DoubleMaxAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
+    return new DoubleMaxBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleMaxAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new DoubleMaxAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java
index 618b7ce59a2..3607ff8879e 100644
--- a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public DoubleMinAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseDoubleColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new DoubleMinAggregator(getDoubleColumnSelector(metricFactory, Double.POSITIVE_INFINITY));
+    return getDoubleColumnSelector(
+        metricFactory,
+        Double.POSITIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
-    return new DoubleMinBufferAggregator(getDoubleColumnSelector(metricFactory, Double.POSITIVE_INFINITY));
+    return new DoubleMinAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
+    return new DoubleMinBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleMinAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new DoubleMinAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java
index 738427582d4..bebc4ea9a20 100644
--- a/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public DoubleSumAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseDoubleColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new DoubleSumAggregator(getDoubleColumnSelector(metricFactory, 0.0));
+    return getDoubleColumnSelector(
+        metricFactory,
+        0.0d
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
-    return new DoubleSumBufferAggregator(getDoubleColumnSelector(metricFactory, 0.0));
+    return new DoubleSumAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseDoubleColumnValueSelector selector)
   {
+    return new DoubleSumBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleSumAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new DoubleSumAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java
index 7d2743e66a1..0bb755804d3 100644
--- a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public FloatMaxAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseFloatColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new FloatMaxAggregator(getFloatColumnSelector(metricFactory, Float.NEGATIVE_INFINITY));
+    return getFloatColumnSelector(
+        metricFactory,
+        Float.NEGATIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
-    return new FloatMaxBufferAggregator(getFloatColumnSelector(metricFactory, Float.NEGATIVE_INFINITY));
+    return new FloatMaxAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
+    return new FloatMaxBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatMaxAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new DoubleMaxAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java
index 9fae8ec64d1..59228c23efc 100644
--- a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public FloatMinAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseFloatColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new FloatMinAggregator(getFloatColumnSelector(metricFactory, Float.POSITIVE_INFINITY));
+    return getFloatColumnSelector(
+        metricFactory,
+        Float.POSITIVE_INFINITY
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
-    return new FloatMinBufferAggregator(getFloatColumnSelector(metricFactory, Float.POSITIVE_INFINITY));
+    return new FloatMinAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
+    return new FloatMinBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatMinAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new DoubleMinAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java
index 38a039e3143..aec5de4149e 100644
--- a/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public FloatSumAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseFloatColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new FloatSumAggregator(getFloatColumnSelector(metricFactory, 0.0f));
+    return getFloatColumnSelector(
+        metricFactory,
+        0.0f
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
-    return new FloatSumBufferAggregator(getFloatColumnSelector(metricFactory, 0.0f));
+    return new FloatSumAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseFloatColumnValueSelector selector)
   {
+    return new FloatSumBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatSumAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new DoubleSumAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java
index bc437c5acb0..9b6ddc9f7be 100644
--- a/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public LongMaxAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseLongColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new LongMaxAggregator(getLongColumnSelector(metricFactory, Long.MIN_VALUE));
+    return getLongColumnSelector(
+        metricFactory,
+        Long.MIN_VALUE
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
-    return new LongMaxBufferAggregator(getLongColumnSelector(metricFactory, Long.MIN_VALUE));
+    return new LongMaxAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
+    return new LongMaxBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return LongMaxAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new LongMaxAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java
index 3b037fb94ca..9dd83d85634 100644
--- a/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public LongMinAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseLongColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new LongMinAggregator(getLongColumnSelector(metricFactory, Long.MAX_VALUE));
+    return getLongColumnSelector(
+        metricFactory,
+        Long.MAX_VALUE
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  public Aggregator factorize(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
-    return new LongMinBufferAggregator(getLongColumnSelector(metricFactory, Long.MAX_VALUE));
+    return new LongMinAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
+    return new LongMinBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return LongMinAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new LongMinAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java
index e37dc987f6d..58137b22554 100644
--- a/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java
@@ -24,8 +24,10 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import io.druid.java.util.common.StringUtils;
 import io.druid.math.expr.ExprMacroTable;
+import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Collections;
 import java.util.List;
@@ -51,25 +53,41 @@ public LongSumAggregatorFactory(String name, String fieldName)
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected BaseLongColumnValueSelector selector(ColumnSelectorFactory metricFactory)
   {
-    return new LongSumAggregator(getLongColumnSelector(metricFactory, 0L));
+    return getLongColumnSelector(
+        metricFactory,
+        0L
+    );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
-    return new LongSumBufferAggregator(getLongColumnSelector(metricFactory, 0L));
+    return new LongSumAggregator(selector);
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, BaseLongColumnValueSelector selector)
   {
+    return new LongSumBufferAggregator(selector);
+  }
+
+  @Override
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
+  {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return LongSumAggregator.combineValues(lhs, rhs);
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     return new LongSumAggregateCombiner();
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableAggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/NullableAggregateCombiner.java
new file mode 100644
index 00000000000..f233defb4c3
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableAggregateCombiner.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+import io.druid.guice.annotations.PublicApi;
+import io.druid.segment.BaseNullableColumnValueSelector;
+import io.druid.segment.ColumnValueSelector;
+
+import javax.annotation.Nullable;
+
+/**
+ * The result of a NullableAggregateCombiner will be null if all the values to be combined are null values or no values are combined at all.
+ * If any of the value is non-null, the result would be the value of the delegate combiner.
+ * Note that the delegate combiner is not required to perform check for {@link BaseNullableColumnValueSelector#isNull()} on the selector as only non-null values
+ * will be passed to the delegate combiner. This class is only used when SQL compatible null handling is enabled.
+ */
+@PublicApi
+public final class NullableAggregateCombiner<T> implements AggregateCombiner<T>
+{
+  private boolean isNullResult = true;
+
+  private final AggregateCombiner<T> delegate;
+
+  public NullableAggregateCombiner(AggregateCombiner<T> delegate)
+  {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public void reset(ColumnValueSelector selector)
+  {
+    if (selector.isNull()) {
+      isNullResult = true;
+    } else {
+      isNullResult = false;
+      delegate.reset(selector);
+    }
+  }
+
+  @Override
+  public void fold(ColumnValueSelector selector)
+  {
+    boolean isNotNull = !selector.isNull();
+    if (isNotNull) {
+      if (isNullResult) {
+        isNullResult = false;
+        delegate.reset(selector);
+      } else {
+        delegate.fold(selector);
+      }
+    }
+  }
+
+  @Override
+  public float getFloat()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return primitive float for Null Value");
+    }
+    return delegate.getFloat();
+  }
+
+  @Override
+  public double getDouble()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return double for Null Value");
+    }
+    return delegate.getDouble();
+  }
+
+  @Override
+  public long getLong()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return long for Null Value");
+    }
+    return delegate.getLong();
+  }
+
+  @Override
+  public boolean isNull()
+  {
+    return isNullResult || delegate.isNull();
+  }
+
+  @Nullable
+  @Override
+  public T getObject()
+  {
+    return isNullResult ? null : delegate.getObject();
+  }
+
+  @Override
+  public Class classOfObject()
+  {
+    return delegate.classOfObject();
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableAggregator.java b/processing/src/main/java/io/druid/query/aggregation/NullableAggregator.java
new file mode 100644
index 00000000000..6b8ddc3dcec
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableAggregator.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+import io.druid.guice.annotations.PublicApi;
+import io.druid.segment.BaseNullableColumnValueSelector;
+
+import javax.annotation.Nullable;
+
+/**
+ * The result of a NullableAggregator will be null if all the values to be aggregated are null values
+ * or no values are aggregated at all. If any of the value is non-null, the result would be the aggregated
+ * value of the delegate aggregator. Note that the delegate aggregator is not required to perform check for
+ * {@link BaseNullableColumnValueSelector#isNull()} on the selector as only non-null values will be passed
+ * to the delegate aggregator. This class is only used when SQL compatible null handling is enabled.
+ */
+@PublicApi
+public final class NullableAggregator implements Aggregator
+{
+  private final Aggregator delegate;
+  private final BaseNullableColumnValueSelector selector;
+  private boolean isNullResult = true;
+
+  public NullableAggregator(Aggregator delegate, BaseNullableColumnValueSelector selector)
+  {
+    this.delegate = delegate;
+    this.selector = selector;
+  }
+
+  @Override
+  public void aggregate()
+  {
+    boolean isNotNull = !selector.isNull();
+    if (isNotNull) {
+      if (isNullResult) {
+        isNullResult = false;
+      }
+      delegate.aggregate();
+    }
+  }
+
+  @Override
+  @Nullable
+  public Object get()
+  {
+    if (isNullResult) {
+      return null;
+    }
+    return delegate.get();
+  }
+
+  @Override
+  public float getFloat()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return float for Null Value");
+    }
+    return delegate.getFloat();
+  }
+
+  @Override
+  public long getLong()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return long for Null Value");
+    }
+    return delegate.getLong();
+  }
+
+  @Override
+  public double getDouble()
+  {
+    if (isNullResult) {
+      throw new IllegalStateException("Cannot return double for Null Value");
+    }
+    return delegate.getDouble();
+  }
+
+  @Override
+  public boolean isNull()
+  {
+    return isNullResult || delegate.isNull();
+  }
+
+  @Override
+  public void close()
+  {
+    delegate.close();
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/NullableAggregatorFactory.java
new file mode 100644
index 00000000000..623ca2f021c
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableAggregatorFactory.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+
+import io.druid.common.config.NullHandling;
+import io.druid.guice.annotations.ExtensionPoint;
+import io.druid.segment.BaseNullableColumnValueSelector;
+import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
+
+/**
+ * Abstract class with functionality to wrap {@link Aggregator}, {@link BufferAggregator} and {@link AggregateCombiner}
+ * to support nullable aggregations for SQL compatibility. Implementations of {@link AggregatorFactory} which need to
+ * Support Nullable Aggregations are encouraged to extend this class.
+ */
+@ExtensionPoint
+public abstract class NullableAggregatorFactory<T extends BaseNullableColumnValueSelector> extends AggregatorFactory
+{
+  @Override
+  public final Aggregator factorize(ColumnSelectorFactory metricFactory)
+  {
+    T selector = selector(metricFactory);
+    Aggregator aggregator = factorize(metricFactory, selector);
+    return NullHandling.replaceWithDefault() ? aggregator : new NullableAggregator(aggregator, selector);
+  }
+
+  @Override
+  public final BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  {
+    T selector = selector(metricFactory);
+    BufferAggregator aggregator = factorizeBuffered(metricFactory, selector);
+    return NullHandling.replaceWithDefault() ? aggregator : new NullableBufferAggregator(aggregator, selector);
+  }
+
+  @Override
+  public final AggregateCombiner makeAggregateCombiner()
+  {
+    AggregateCombiner combiner = makeAggregateCombiner2();
+    return NullHandling.replaceWithDefault() ? combiner : new NullableAggregateCombiner(combiner);
+  }
+
+  @Override
+  public final int getMaxIntermediateSize()
+  {
+    return getMaxIntermediateSize2() + (NullHandling.replaceWithDefault() ? 0 : Byte.BYTES);
+  }
+
+  // ---- ABSTRACT METHODS BELOW ------
+
+  /**
+   * Creates a {@link ColumnValueSelector} for the aggregated column.
+   *
+   * @see ColumnValueSelector
+   */
+  protected abstract T selector(ColumnSelectorFactory metricFactory);
+
+  /**
+   * Creates an {@link Aggregator} to aggregate values from several rows, by using the provided selector.
+   * @param metricFactory metricFactory
+   * @param selector {@link ColumnValueSelector} for the column to aggregate.
+   *
+   * @see Aggregator
+   */
+  protected abstract Aggregator factorize(ColumnSelectorFactory metricFactory, T selector);
+
+  /**
+   * Creates an {@link BufferAggregator} to aggregate values from several rows into a ByteBuffer.
+   * @param metricFactory metricFactory
+   * @param selector {@link ColumnValueSelector} for the column to aggregate.
+   *
+   * @see BufferAggregator
+   */
+  protected abstract BufferAggregator factorizeBuffered(
+      ColumnSelectorFactory metricFactory,
+      T selector
+  );
+
+  /**
+   * Creates an {@link AggregateCombiner} to fold rollup aggregation results from serveral "rows" of different indexes
+   * during index merging. AggregateCombiner implements the same logic as {@link #combine}, with the difference that it
+   * uses {@link ColumnValueSelector} and it's subinterfaces to get inputs and implements {@code ColumnValueSelector}
+   * to provide output.
+   *
+   * @see AggregateCombiner
+   * @see io.druid.segment.IndexMerger
+   */
+  protected abstract AggregateCombiner makeAggregateCombiner2();
+
+  /**
+   * Returns the maximum size that this aggregator will require in bytes for intermediate storage of results.
+   *
+   * @return the maximum number of bytes that an aggregator of this type will require for intermediate result storage.
+   */
+  protected abstract int getMaxIntermediateSize2();
+
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/NullableBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/NullableBufferAggregator.java
new file mode 100644
index 00000000000..ea66f7f5cf5
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/aggregation/NullableBufferAggregator.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.aggregation;
+
+import io.druid.common.config.NullHandling;
+import io.druid.guice.annotations.PublicApi;
+import io.druid.segment.BaseNullableColumnValueSelector;
+
+import javax.annotation.Nullable;
+import java.nio.ByteBuffer;
+
+/**
+ * The result of a NullableBufferAggregator will be null if all the values to be aggregated are null values or no values are aggregated at all.
+ * If any of the value is non-null, the result would be the aggregated value of the delegate aggregator.
+ * Note that the delegate aggregator is not required to perform check for {@link BaseNullableColumnValueSelector#isNull()} on the selector as only non-null values
+ * will be passed to the delegate aggregator. This class is only used when SQL compatible null handling is enabled.
+ */
+@PublicApi
+public final class NullableBufferAggregator implements BufferAggregator
+{
+
+  private final BufferAggregator delegate;
+  private final BaseNullableColumnValueSelector selector;
+
+  public NullableBufferAggregator(BufferAggregator delegate, BaseNullableColumnValueSelector selector)
+  {
+    this.delegate = delegate;
+    this.selector = selector;
+  }
+
+  @Override
+  public void init(ByteBuffer buf, int position)
+  {
+    buf.put(position, NullHandling.IS_NULL_BYTE);
+    delegate.init(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public void aggregate(ByteBuffer buf, int position)
+  {
+    boolean isNotNull = !selector.isNull();
+    if (isNotNull) {
+      if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+        buf.put(position, NullHandling.IS_NOT_NULL_BYTE);
+      }
+      delegate.aggregate(buf, position + Byte.BYTES);
+    }
+  }
+
+  @Override
+  @Nullable
+  public Object get(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      return null;
+    }
+    return delegate.get(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public float getFloat(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      throw new IllegalStateException("Cannot return float for Null Value");
+    }
+    return delegate.getFloat(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public long getLong(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      throw new IllegalStateException("Cannot return long for Null Value");
+    }
+    return delegate.getLong(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public double getDouble(ByteBuffer buf, int position)
+  {
+    if (buf.get(position) == NullHandling.IS_NULL_BYTE) {
+      throw new IllegalStateException("Cannot return double for Null Value");
+    }
+    return delegate.getDouble(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public boolean isNull(ByteBuffer buf, int position)
+  {
+    return buf.get(position) == NullHandling.IS_NULL_BYTE || delegate.isNull(buf, position + Byte.BYTES);
+  }
+
+  @Override
+  public void close()
+  {
+    delegate.close();
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java
index 171d4f39ef1..3d789d1d3da 100644
--- a/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java
@@ -22,6 +22,7 @@
 import io.druid.guice.annotations.ExtensionPoint;
 import io.druid.java.util.common.Cacheable;
 
+import javax.annotation.Nullable;
 import java.util.Comparator;
 import java.util.Map;
 import java.util.Set;
@@ -36,6 +37,7 @@
 
   Comparator getComparator();
 
+  @Nullable
   Object compute(Map<String, Object> combinedAggregators);
 
   String getName();
diff --git a/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
index af40149fa2c..d62de90c5ed 100644
--- a/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/SimpleDoubleAggregatorFactory.java
@@ -28,12 +28,13 @@
 import io.druid.segment.ColumnSelectorFactory;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Objects;
 
-public abstract class SimpleDoubleAggregatorFactory extends AggregatorFactory
+public abstract class SimpleDoubleAggregatorFactory extends NullableAggregatorFactory<BaseDoubleColumnValueSelector>
 {
   protected final String name;
   protected final String fieldName;
@@ -91,7 +92,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Double.BYTES;
   }
@@ -103,7 +104,8 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
     return object;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java
index 65699616228..c51f317f05e 100644
--- a/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/SimpleFloatAggregatorFactory.java
@@ -27,12 +27,13 @@
 import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Objects;
 
-public abstract class SimpleFloatAggregatorFactory extends AggregatorFactory
+public abstract class SimpleFloatAggregatorFactory extends NullableAggregatorFactory<BaseFloatColumnValueSelector>
 {
   protected final String name;
   protected final String fieldName;
@@ -85,7 +86,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Float.BYTES;
   }
@@ -97,7 +98,8 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
     return object;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java
index 04c286fe6ee..915f1457586 100644
--- a/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/SimpleLongAggregatorFactory.java
@@ -27,12 +27,13 @@
 import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
 
+import javax.annotation.Nullable;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Objects;
 
-public abstract class SimpleLongAggregatorFactory extends AggregatorFactory
+public abstract class SimpleLongAggregatorFactory extends NullableAggregatorFactory<BaseLongColumnValueSelector>
 {
   protected final String name;
   protected final String fieldName;
@@ -81,7 +82,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Long.BYTES;
   }
@@ -93,7 +94,8 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
     return object;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java
index 5fd15ae30c1..8f72f513815 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,23 +20,33 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.BaseDoubleColumnValueSelector;
 
-
+/**
+ * If performance of this class appears to be a bottleneck for somebody,
+ * one simple way to improve it is to split it into two different classes,
+ * one that is used when {@link NullHandling#replaceWithDefault()} is false,
+ * and one - when it's true, moving this computation out of the tight loop
+ */
 public class DoubleCardinalityAggregatorColumnSelectorStrategy
     implements CardinalityAggregatorColumnSelectorStrategy<BaseDoubleColumnValueSelector>
 {
   @Override
-  public void hashRow(BaseDoubleColumnValueSelector dimSelector, Hasher hasher)
+  public void hashRow(BaseDoubleColumnValueSelector selector, Hasher hasher)
   {
-    hasher.putDouble(dimSelector.getDouble());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      hasher.putDouble(selector.getDouble());
+    }
   }
 
   @Override
-  public void hashValues(BaseDoubleColumnValueSelector dimSelector, HyperLogLogCollector collector)
+  public void hashValues(BaseDoubleColumnValueSelector selector, HyperLogLogCollector collector)
   {
-    collector.add(CardinalityAggregator.hashFn.hashLong(Double.doubleToLongBits(dimSelector.getDouble())).asBytes());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      collector.add(CardinalityAggregator.hashFn.hashLong(Double.doubleToLongBits(selector.getDouble())).asBytes());
+    }
   }
 }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java
index b46261c7b15..ffa9d764727 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,22 +20,33 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.BaseFloatColumnValueSelector;
 
+/**
+ * If performance of this class appears to be a bottleneck for somebody,
+ * one simple way to improve it is to split it into two different classes,
+ * one that is used when {@link NullHandling#replaceWithDefault()} is false,
+ * and one - when it's true, moving this computation out of the tight loop
+ */
 public class FloatCardinalityAggregatorColumnSelectorStrategy
     implements CardinalityAggregatorColumnSelectorStrategy<BaseFloatColumnValueSelector>
 {
   @Override
   public void hashRow(BaseFloatColumnValueSelector selector, Hasher hasher)
   {
-    hasher.putFloat(selector.getFloat());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      hasher.putFloat(selector.getFloat());
+    }
   }
 
   @Override
   public void hashValues(BaseFloatColumnValueSelector selector, HyperLogLogCollector collector)
   {
-    collector.add(CardinalityAggregator.hashFn.hashInt(Float.floatToIntBits(selector.getFloat())).asBytes());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      collector.add(CardinalityAggregator.hashFn.hashInt(Float.floatToIntBits(selector.getFloat())).asBytes());
+    }
   }
 }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java
index a666ed64e1a..904de5b118f 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,22 +20,33 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.BaseLongColumnValueSelector;
 
+/**
+ * If performance of this class appears to be a bottleneck for somebody,
+ * one simple way to improve it is to split it into two different classes,
+ * one that is used when {@link NullHandling#replaceWithDefault()} is false,
+ * and one - when it's true, moving this computation out of the tight loop
+ */
 public class LongCardinalityAggregatorColumnSelectorStrategy
     implements CardinalityAggregatorColumnSelectorStrategy<BaseLongColumnValueSelector>
 {
   @Override
-  public void hashRow(BaseLongColumnValueSelector dimSelector, Hasher hasher)
+  public void hashRow(BaseLongColumnValueSelector selector, Hasher hasher)
   {
-    hasher.putLong(dimSelector.getLong());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      hasher.putLong(selector.getLong());
+    }
   }
 
   @Override
-  public void hashValues(BaseLongColumnValueSelector dimSelector, HyperLogLogCollector collector)
+  public void hashValues(BaseLongColumnValueSelector selector, HyperLogLogCollector collector)
   {
-    collector.add(CardinalityAggregator.hashFn.hashLong(dimSelector.getLong()).asBytes());
+    if (NullHandling.replaceWithDefault() || !selector.isNull()) {
+      collector.add(CardinalityAggregator.hashFn.hashLong(selector.getLong()).asBytes());
+    }
   }
 }
diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java
index 390b44d7b27..e133ba61061 100644
--- a/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java
@@ -20,6 +20,7 @@
 package io.druid.query.aggregation.cardinality.types;
 
 import com.google.common.hash.Hasher;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HyperLogLogCollector;
 import io.druid.query.aggregation.cardinality.CardinalityAggregator;
 import io.druid.segment.DimensionSelector;
@@ -40,20 +41,34 @@ public void hashRow(DimensionSelector dimSelector, Hasher hasher)
     // nothing to add to hasher if size == 0, only handle size == 1 and size != 0 cases.
     if (size == 1) {
       final String value = dimSelector.lookupName(row.get(0));
-      hasher.putUnencodedChars(nullToSpecial(value));
+      if (NullHandling.replaceWithDefault() || value != null) {
+        hasher.putUnencodedChars(nullToSpecial(value));
+      }
     } else if (size != 0) {
+      boolean hasNonNullValue = false;
       final String[] values = new String[size];
       for (int i = 0; i < size; ++i) {
         final String value = dimSelector.lookupName(row.get(i));
+        // SQL standard spec does not count null values,
+        // Skip counting null values when we are not replacing null with default value.
+        // A special value for null in case null handling is configured to use empty string for null.
+        if (NullHandling.sqlCompatible() && !hasNonNullValue && value != null) {
+          hasNonNullValue = true;
+        }
         values[i] = nullToSpecial(value);
       }
-      // Values need to be sorted to ensure consistent multi-value ordering across different segments
-      Arrays.sort(values);
-      for (int i = 0; i < size; ++i) {
-        if (i != 0) {
-          hasher.putChar(CARDINALITY_AGG_SEPARATOR);
+      // SQL standard spec does not count null values,
+      // Skip counting null values when we are not replacing null with default value.
+      // A special value for null in case null handling is configured to use empty string for null.
+      if (NullHandling.replaceWithDefault() || hasNonNullValue) {
+        // Values need to be sorted to ensure consistent multi-value ordering across different segments
+        Arrays.sort(values);
+        for (int i = 0; i < size; ++i) {
+          if (i != 0) {
+            hasher.putChar(CARDINALITY_AGG_SEPARATOR);
+          }
+          hasher.putUnencodedChars(values[i]);
         }
-        hasher.putUnencodedChars(values[i]);
       }
     }
   }
@@ -65,7 +80,12 @@ public void hashValues(DimensionSelector dimSelector, HyperLogLogCollector colle
     for (int i = 0, rowSize = row.size(); i < rowSize; i++) {
       int index = row.get(i);
       final String value = dimSelector.lookupName(index);
-      collector.add(CardinalityAggregator.hashFn.hashUnencodedChars(nullToSpecial(value)).asBytes());
+      // SQL standard spec does not count null values,
+      // Skip counting null values when we are not replacing null with default value.
+      // A special value for null in case null handling is configured to use empty string for null.
+      if (NullHandling.replaceWithDefault() || value != null) {
+        collector.add(CardinalityAggregator.hashFn.hashUnencodedChars(nullToSpecial(value)).asBytes());
+      }
     }
   }
 
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java
index bc725cc0769..209352a490e 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java
@@ -32,11 +32,13 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -44,7 +46,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class DoubleFirstAggregatorFactory extends AggregatorFactory
+public class DoubleFirstAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   public static final Comparator VALUE_COMPARATOR = (o1, o2) -> Doubles.compare(
       ((SerializablePair<Long, Double>) o1).rhs,
@@ -75,20 +77,26 @@ public DoubleFirstAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleFirstAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleFirstBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -99,13 +107,20 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs;
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     throw new UOE("DoubleFirstAggregatorFactory is not supported during ingestion for rollup");
   }
@@ -116,9 +131,8 @@ public AggregatorFactory getCombiningFactory()
     return new DoubleFirstAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleFirstAggregator(null, null)
         {
           @Override
@@ -134,9 +148,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleFirstBufferAggregator(null, null)
         {
           @Override
@@ -174,9 +187,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Double>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Double>) object).rhs;
   }
 
   @Override
@@ -219,7 +233,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Long.BYTES + Double.BYTES;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java
index fe66d280ca5..af35d369908 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java
@@ -32,11 +32,13 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -44,7 +46,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class FloatFirstAggregatorFactory extends AggregatorFactory
+public class FloatFirstAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   public static final Comparator VALUE_COMPARATOR = (o1, o2) -> Doubles.compare(
       ((SerializablePair<Long, Float>) o1).rhs,
@@ -73,20 +75,26 @@ public FloatFirstAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatFirstAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatFirstBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -97,13 +105,20 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs;
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     throw new UOE("FloatFirstAggregatorFactory is not supported during ingestion for rollup");
   }
@@ -114,9 +129,8 @@ public AggregatorFactory getCombiningFactory()
     return new FloatFirstAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatFirstAggregator(null, null)
         {
           @Override
@@ -132,9 +146,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatFirstBufferAggregator(null, null)
         {
           @Override
@@ -172,9 +185,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Float>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Float>) object).rhs;
   }
 
   @Override
@@ -214,7 +228,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Long.BYTES + Float.BYTES;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java
index a2780e94e6a..cafb33f8ab2 100644
--- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java
@@ -23,26 +23,28 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.primitives.Longs;
-import io.druid.java.util.common.StringUtils;
 import io.druid.collections.SerializablePair;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.UOE;
 import io.druid.query.aggregation.AggregateCombiner;
 import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
 
-public class LongFirstAggregatorFactory extends AggregatorFactory
+public class LongFirstAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   public static final Comparator VALUE_COMPARATOR = (o1, o2) -> Longs.compare(
       ((SerializablePair<Long, Long>) o1).rhs,
@@ -66,20 +68,26 @@ public LongFirstAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongFirstAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongFirstBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -90,13 +98,20 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (lhs == null) {
+      return rhs;
+    }
+    if (rhs == null) {
+      return lhs;
+    }
     return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs;
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     throw new UOE("LongFirstAggregatorFactory is not supported during ingestion for rollup");
   }
@@ -107,9 +122,8 @@ public AggregatorFactory getCombiningFactory()
     return new LongFirstAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongFirstAggregator(null, null)
         {
           @Override
@@ -125,9 +139,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongFirstBufferAggregator(null, null)
         {
           @Override
@@ -165,9 +178,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Long>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Long>) object).rhs;
   }
 
   @Override
@@ -207,7 +221,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Long.BYTES * 2;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java
index ed011684f06..4e5344e8f4d 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java
@@ -30,13 +30,15 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory;
 import io.druid.query.aggregation.first.LongFirstAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -44,7 +46,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class DoubleLastAggregatorFactory extends AggregatorFactory
+public class DoubleLastAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
 
   private final String fieldName;
@@ -65,20 +67,26 @@ public DoubleLastAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleLastAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new DoubleLastBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -89,13 +97,20 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs;
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     throw new UOE("DoubleLastAggregatorFactory is not supported during ingestion for rollup");
   }
@@ -106,9 +121,8 @@ public AggregatorFactory getCombiningFactory()
     return new DoubleLastAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleLastAggregator(null, null)
         {
           @Override
@@ -124,9 +138,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new DoubleLastBufferAggregator(null, null)
         {
           @Override
@@ -164,9 +177,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Double>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Double>) object).rhs;
   }
 
   @Override
@@ -210,7 +224,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Long.BYTES + Double.BYTES;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java
index 9dce9c8949a..5740a18b413 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java
@@ -30,13 +30,15 @@
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.first.FloatFirstAggregatorFactory;
 import io.druid.query.aggregation.first.LongFirstAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -44,7 +46,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class FloatLastAggregatorFactory extends AggregatorFactory
+public class FloatLastAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
 
   private final String fieldName;
@@ -63,20 +65,26 @@ public FloatLastAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatLastAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new FloatLastBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -87,13 +95,20 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return FloatFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs;
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     throw new UOE("FloatLastAggregatorFactory is not supported during ingestion for rollup");
   }
@@ -104,9 +119,8 @@ public AggregatorFactory getCombiningFactory()
     return new FloatLastAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatLastAggregator(null, null)
         {
           @Override
@@ -122,9 +136,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new FloatLastBufferAggregator(null, null)
         {
           @Override
@@ -162,9 +175,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Float>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Float>) object).rhs;
   }
 
   @Override
@@ -205,7 +219,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Long.BYTES + Float.BYTES;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java
index 4bee9e9a665..70c5a970f6a 100644
--- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java
+++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java
@@ -22,21 +22,23 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import io.druid.java.util.common.StringUtils;
 import io.druid.collections.SerializablePair;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.UOE;
 import io.druid.query.aggregation.AggregateCombiner;
 import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.AggregatorUtil;
 import io.druid.query.aggregation.BufferAggregator;
+import io.druid.query.aggregation.NullableAggregatorFactory;
 import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory;
 import io.druid.query.aggregation.first.LongFirstAggregatorFactory;
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
-import io.druid.segment.BaseObjectColumnValueSelector;
 import io.druid.segment.ColumnSelectorFactory;
+import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.column.Column;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -44,7 +46,7 @@
 import java.util.Map;
 import java.util.Objects;
 
-public class LongLastAggregatorFactory extends AggregatorFactory
+public class LongLastAggregatorFactory extends NullableAggregatorFactory<ColumnValueSelector>
 {
   private final String fieldName;
   private final String name;
@@ -62,20 +64,26 @@ public LongLastAggregatorFactory(
   }
 
   @Override
-  public Aggregator factorize(ColumnSelectorFactory metricFactory)
+  protected ColumnValueSelector selector(ColumnSelectorFactory metricFactory)
+  {
+    return metricFactory.makeColumnValueSelector(fieldName);
+  }
+
+  @Override
+  protected Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongLastAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
   @Override
-  public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+  protected BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
   {
     return new LongLastBufferAggregator(
         metricFactory.makeColumnValueSelector(Column.TIME_COLUMN_NAME),
-        metricFactory.makeColumnValueSelector(fieldName)
+        selector
     );
   }
 
@@ -86,13 +94,20 @@ public Comparator getComparator()
   }
 
   @Override
-  public Object combine(Object lhs, Object rhs)
+  @Nullable
+  public Object combine(@Nullable Object lhs, @Nullable Object rhs)
   {
+    if (rhs == null) {
+      return lhs;
+    }
+    if (lhs == null) {
+      return rhs;
+    }
     return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs;
   }
 
   @Override
-  public AggregateCombiner makeAggregateCombiner()
+  public AggregateCombiner makeAggregateCombiner2()
   {
     throw new UOE("LongLastAggregatorFactory is not supported during ingestion for rollup");
   }
@@ -103,9 +118,8 @@ public AggregatorFactory getCombiningFactory()
     return new LongLastAggregatorFactory(name, name)
     {
       @Override
-      public Aggregator factorize(ColumnSelectorFactory metricFactory)
+      public Aggregator factorize(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongLastAggregator(null, null)
         {
           @Override
@@ -121,9 +135,8 @@ public void aggregate()
       }
 
       @Override
-      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
+      public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory, ColumnValueSelector selector)
       {
-        final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name);
         return new LongLastBufferAggregator(null, null)
         {
           @Override
@@ -161,9 +174,10 @@ public Object deserialize(Object object)
   }
 
   @Override
-  public Object finalizeComputation(Object object)
+  @Nullable
+  public Object finalizeComputation(@Nullable Object object)
   {
-    return ((SerializablePair<Long, Long>) object).rhs;
+    return object == null ? null : ((SerializablePair<Long, Long>) object).rhs;
   }
 
   @Override
@@ -203,7 +217,7 @@ public String getTypeName()
   }
 
   @Override
-  public int getMaxIntermediateSize()
+  public int getMaxIntermediateSize2()
   {
     return Long.BYTES * 2;
   }
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java
index 5e2bd5b1014..ecb70da4bf1 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java
@@ -24,6 +24,7 @@
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -109,11 +110,21 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    double retVal = 0.0;
+    Double retVal = NullHandling.defaultDoubleValue();
     if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).doubleValue();
+      Number nextVal = (Number) fieldsIter.next().compute(values);
+      if (nextVal == null) {
+        // As per SQL standard if any of the value is null, arithmetic operators will return null.
+        return null;
+      }
+      retVal = nextVal.doubleValue();
       while (fieldsIter.hasNext()) {
-        retVal = op.compute(retVal, ((Number) fieldsIter.next().compute(values)).doubleValue());
+        nextVal = (Number) fieldsIter.next().compute(values);
+        if (nextVal == null) {
+          // As per SQL standard if any of the value is null, arithmetic operators will return null.
+          return null;
+        }
+        retVal = op.compute(retVal, (nextVal).doubleValue());
       }
     }
     return retVal;
@@ -268,7 +279,7 @@ static Ops lookup(String fn)
     /**
      * Ensures the following order: numeric > NaN > Infinite.
      *
-     * The name may be referenced via Ordering.valueOf(String) in the constructor {@link
+     * The name may be referenced via {@link #valueOf(String)} in the constructor {@link
      * ArithmeticPostAggregator#ArithmeticPostAggregator(String, String, List, String)}.
      */
     @SuppressWarnings("unused")
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java
index 584d022675b..6f67c3a9f3e 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/DoubleGreatestPostAggregator.java
@@ -23,6 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -37,14 +38,9 @@
 
 public class DoubleGreatestPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return ((Double) o).compareTo((Double) o1);
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsFirst(
+      Comparator.comparingDouble(Number::doubleValue)
+  );
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -81,13 +77,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    double retVal = Double.NEGATIVE_INFINITY;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).doubleValue();
-      while (fieldsIter.hasNext()) {
-        double other = ((Number) fieldsIter.next().compute(values)).doubleValue();
-        if (other > retVal) {
-          retVal = other;
+    Double retVal = NullHandling.replaceWithDefault() ? Double.NEGATIVE_INFINITY : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) > 0)) {
+        if (nextVal instanceof Double) {
+          retVal = (Double) nextVal;
+        } else {
+          retVal = nextVal.doubleValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java
index f66c81d9e76..da59c261dc5 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/DoubleLeastPostAggregator.java
@@ -23,6 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -37,14 +38,7 @@
 
 public class DoubleLeastPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return ((Double) o).compareTo((Double) o1);
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsLast(Comparator.comparingDouble(Number::doubleValue));
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -81,13 +75,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    double retVal = Double.POSITIVE_INFINITY;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).doubleValue();
-      while (fieldsIter.hasNext()) {
-        double other = ((Number) fieldsIter.next().compute(values)).doubleValue();
-        if (other < retVal) {
-          retVal = other;
+    Double retVal = NullHandling.replaceWithDefault() ? Double.POSITIVE_INFINITY : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) < 0)) {
+        if (nextVal instanceof Double) {
+          retVal = (Double) nextVal;
+        } else {
+          retVal = nextVal.doubleValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java
index 695568c4dad..4c02961fb31 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/LongGreatestPostAggregator.java
@@ -23,7 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
-import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -38,14 +38,7 @@
 
 public class LongGreatestPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return Longs.compare(((Number) o).longValue(), ((Number) o1).longValue());
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsFirst(Comparator.comparingLong(Number::longValue));
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -82,13 +75,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    long retVal = Long.MIN_VALUE;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).longValue();
-      while (fieldsIter.hasNext()) {
-        long other = ((Number) fieldsIter.next().compute(values)).longValue();
-        if (other > retVal) {
-          retVal = other;
+    Long retVal = NullHandling.replaceWithDefault() ? Long.MIN_VALUE : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) > 0)) {
+        if (nextVal instanceof Long) {
+          retVal = (Long) nextVal;
+        } else {
+          retVal = nextVal.longValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java
index f14bcd8228f..de1a2cd6bd3 100644
--- a/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java
+++ b/processing/src/main/java/io/druid/query/aggregation/post/LongLeastPostAggregator.java
@@ -23,7 +23,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
-import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.query.Queries;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.PostAggregator;
@@ -38,14 +38,7 @@
 
 public class LongLeastPostAggregator implements PostAggregator
 {
-  private static final Comparator COMPARATOR = new Comparator()
-  {
-    @Override
-    public int compare(Object o, Object o1)
-    {
-      return Longs.compare(((Number) o).longValue(), ((Number) o1).longValue());
-    }
-  };
+  private static final Comparator<Number> COMPARATOR = Comparator.nullsLast(Comparator.comparingLong(Number::longValue));
 
   private final String name;
   private final List<PostAggregator> fields;
@@ -82,13 +75,15 @@ public Comparator getComparator()
   public Object compute(Map<String, Object> values)
   {
     Iterator<PostAggregator> fieldsIter = fields.iterator();
-    long retVal = Long.MAX_VALUE;
-    if (fieldsIter.hasNext()) {
-      retVal = ((Number) fieldsIter.next().compute(values)).longValue();
-      while (fieldsIter.hasNext()) {
-        long other = ((Number) fieldsIter.next().compute(values)).longValue();
-        if (other < retVal) {
-          retVal = other;
+    Long retVal = NullHandling.replaceWithDefault() ? Long.MAX_VALUE : null;
+    while (fieldsIter.hasNext()) {
+      Number nextVal = ((Number) fieldsIter.next().compute(values));
+      // Ignore NULL values and return the greatest out of non-null values.
+      if (nextVal != null && (retVal == null || COMPARATOR.compare(nextVal, retVal) < 0)) {
+        if (nextVal instanceof Long) {
+          retVal = (Long) nextVal;
+        } else {
+          retVal = nextVal.longValue();
         }
       }
     }
diff --git a/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java
index c7ab3873b57..3fb3e852d67 100644
--- a/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java
+++ b/processing/src/main/java/io/druid/query/dimension/ListFilteredDimensionSpec.java
@@ -23,7 +23,7 @@
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
 import com.google.common.base.Predicates;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.filter.DimFilterUtils;
 import io.druid.segment.DimensionSelector;
@@ -106,7 +106,7 @@ private DimensionSelector filterWhiteList(DimensionSelector selector)
       }
     } else {
       for (int i = 0; i < selectorCardinality; i++) {
-        if (values.contains(Strings.nullToEmpty(selector.lookupName(i)))) {
+        if (values.contains(NullHandling.nullToEmptyIfNeeded(selector.lookupName(i)))) {
           forwardMapping.put(i, count);
           reverseMapping[count++] = i;
         }
@@ -137,7 +137,7 @@ public boolean apply(@Nullable String input)
     forwardMapping.defaultReturnValue(-1);
     final int[] reverseMapping = new int[maxPossibleFilteredCardinality];
     for (int i = 0; i < selectorCardinality; i++) {
-      if (!values.contains(Strings.nullToEmpty(selector.lookupName(i)))) {
+      if (!values.contains(NullHandling.nullToEmptyIfNeeded(selector.lookupName(i)))) {
         forwardMapping.put(i, count);
         reverseMapping[count++] = i;
       }
diff --git a/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java
index 4ebd407083f..ce11703476b 100644
--- a/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java
+++ b/processing/src/main/java/io/druid/query/dimension/RegexFilteredDimensionSpec.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.filter.DimFilterUtils;
 import io.druid.segment.DimensionSelector;
@@ -76,7 +76,7 @@ public DimensionSelector decorate(final DimensionSelector selector)
             @Override
             public boolean apply(@Nullable String input)
             {
-              return compiledRegex.matcher(Strings.nullToEmpty(input)).matches();
+              return compiledRegex.matcher(NullHandling.nullToEmptyIfNeeded(input)).matches();
             }
           }
       );
@@ -86,7 +86,8 @@ public boolean apply(@Nullable String input)
     final Int2IntOpenHashMap forwardMapping = new Int2IntOpenHashMap();
     forwardMapping.defaultReturnValue(-1);
     for (int i = 0; i < selectorCardinality; i++) {
-      if (compiledRegex.matcher(Strings.nullToEmpty(selector.lookupName(i))).matches()) {
+      String val = NullHandling.nullToEmptyIfNeeded(selector.lookupName(i));
+      if (val != null && compiledRegex.matcher(val).matches()) {
         forwardMapping.put(i, count++);
       }
     }
diff --git a/processing/src/main/java/io/druid/query/expression/ExprUtils.java b/processing/src/main/java/io/druid/query/expression/ExprUtils.java
index 38ac85f415e..dd0a166e5e8 100644
--- a/processing/src/main/java/io/druid/query/expression/ExprUtils.java
+++ b/processing/src/main/java/io/druid/query/expression/ExprUtils.java
@@ -23,6 +23,7 @@
 import io.druid.java.util.common.IAE;
 import io.druid.java.util.common.granularity.PeriodGranularity;
 import io.druid.math.expr.Expr;
+import org.apache.commons.lang.StringUtils;
 import org.joda.time.Chronology;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
@@ -73,7 +74,14 @@ public static PeriodGranularity toPeriodGranularity(
     } else {
       Chronology chronology = timeZone == null ? ISOChronology.getInstanceUTC() : ISOChronology.getInstance(timeZone);
       final Object value = originArg.eval(bindings).value();
-      origin = value != null ? new DateTime(value, chronology) : null;
+      if (value instanceof String && StringUtils.isEmpty((String) value)) {
+        // We get a blank string here, when sql compatible null handling is enabled
+        // and expression contains empty string for for origin
+        // e.g timestamp_floor(\"__time\",'PT1M','','UTC')
+        origin = null;
+      } else {
+        origin = value != null ? new DateTime(value, chronology) : null;
+      }
     }
 
     return new PeriodGranularity(period, origin, timeZone);
diff --git a/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java b/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java
index c60400be33e..d03ea8d13c7 100644
--- a/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/LikeExprMacro.java
@@ -19,7 +19,7 @@
 
 package io.druid.query.expression;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -63,7 +63,7 @@ public Expr apply(final List<Expr> args)
     }
 
     final LikeDimFilter.LikeMatcher likeMatcher = LikeDimFilter.LikeMatcher.from(
-        Strings.nullToEmpty((String) patternExpr.getLiteralValue()),
+        NullHandling.nullToEmptyIfNeeded((String) patternExpr.getLiteralValue()),
         escapeChar
     );
 
diff --git a/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java b/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java
index f9a4273a05a..940630c250c 100644
--- a/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/RegexpExtractExprMacro.java
@@ -19,7 +19,7 @@
 
 package io.druid.query.expression;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -63,9 +63,10 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
-        final Matcher matcher = pattern.matcher(Strings.nullToEmpty(arg.eval(bindings).asString()));
+        String s = arg.eval(bindings).asString();
+        final Matcher matcher = pattern.matcher(NullHandling.nullToEmptyIfNeeded(s));
         final String retVal = matcher.find() ? matcher.group(index) : null;
-        return ExprEval.of(Strings.emptyToNull(retVal));
+        return ExprEval.of(NullHandling.emptyToNullIfNeeded(retVal));
       }
 
       @Override
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java
index 48ce083e730..0bba43ebcf4 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java
@@ -67,6 +67,11 @@ public TimestampCeilExpr(final List<Expr> args)
     @Override
     public ExprEval eval(final ObjectBinding bindings)
     {
+      ExprEval eval = arg.eval(bindings);
+      if (eval.isNumericNull()) {
+        // Return null if the argument if null.
+        return ExprEval.of(null);
+      }
       return ExprEval.of(granularity.bucketEnd(DateTimes.utc(arg.eval(bindings).asLong())).getMillis());
     }
 
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java
index bb1b2ca5f24..bdea9147c0f 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java
@@ -88,7 +88,12 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
-        final DateTime dateTime = new DateTime(arg.eval(bindings).asLong(), chronology);
+        Object val = arg.eval(bindings).value();
+        if (val == null) {
+          // Return null if the argument if null.
+          return ExprEval.of(null);
+        }
+        final DateTime dateTime = new DateTime(val, chronology);
         switch (unit) {
           case EPOCH:
             return ExprEval.of(dateTime.getMillis() / 1000);
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java
index 984fb8592c0..bde9339eb49 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java
@@ -92,7 +92,12 @@ public PeriodGranularity getGranularity()
     @Override
     public ExprEval eval(final ObjectBinding bindings)
     {
-      return ExprEval.of(granularity.bucketStart(DateTimes.utc(arg.eval(bindings).asLong())).getMillis());
+      ExprEval eval = arg.eval(bindings);
+      if (eval.isNumericNull()) {
+        // Return null if the argument if null.
+        return ExprEval.of(null);
+      }
+      return ExprEval.of(granularity.bucketStart(DateTimes.utc(eval.asLong())).getMillis());
     }
 
     @Override
diff --git a/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java
index 71af26d8633..d4063c49ca0 100644
--- a/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TimestampFormatExprMacro.java
@@ -74,6 +74,11 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
+        ExprEval eval = arg.eval(bindings);
+        if (eval.isNumericNull()) {
+          // Return null if the argument if null.
+          return ExprEval.of(null);
+        }
         return ExprEval.of(formatter.print(arg.eval(bindings).asLong()));
       }
 
diff --git a/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java b/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java
index b026f267baa..ebaf6f42dd8 100644
--- a/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java
+++ b/processing/src/main/java/io/druid/query/expression/TrimExprMacro.java
@@ -113,7 +113,7 @@ public ExprEval eval(final ObjectBinding bindings)
     {
       final ExprEval stringEval = stringExpr.eval(bindings);
 
-      if (chars.length == 0 || stringEval.isNull()) {
+      if (chars.length == 0 || stringEval.value() == null) {
         return stringEval;
       }
 
@@ -176,13 +176,13 @@ public ExprEval eval(final ObjectBinding bindings)
     {
       final ExprEval stringEval = stringExpr.eval(bindings);
 
-      if (stringEval.isNull()) {
+      if (stringEval.value() == null) {
         return stringEval;
       }
 
       final ExprEval charsEval = charsExpr.eval(bindings);
 
-      if (charsEval.isNull()) {
+      if (charsEval.value() == null) {
         return stringEval;
       }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java b/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java
index bd762574efd..1bacaab5ec5 100644
--- a/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java
+++ b/processing/src/main/java/io/druid/query/extraction/FunctionalExtraction.java
@@ -21,7 +21,7 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 
 import javax.annotation.Nullable;
 
@@ -52,9 +52,9 @@ public FunctionalExtraction(
   )
   {
     this.retainMissingValue = retainMissingValue;
-    this.replaceMissingValueWith = Strings.emptyToNull(replaceMissingValueWith);
+    this.replaceMissingValueWith = NullHandling.emptyToNullIfNeeded(replaceMissingValueWith);
     Preconditions.checkArgument(
-        !(this.retainMissingValue && !Strings.isNullOrEmpty(this.replaceMissingValueWith)),
+        !(this.retainMissingValue && !(this.replaceMissingValueWith == null)),
         "Cannot specify a [replaceMissingValueWith] and set [retainMissingValue] to true"
     );
 
@@ -69,7 +69,7 @@ public FunctionalExtraction(
         public String apply(@Nullable String dimValue)
         {
           final String retval = extractionFunction.apply(dimValue);
-          return Strings.isNullOrEmpty(retval) ? Strings.emptyToNull(dimValue) : retval;
+          return NullHandling.isNullOrEquivalent(retval) ? NullHandling.emptyToNullIfNeeded(dimValue) : retval;
         }
       };
     } else {
@@ -79,8 +79,10 @@ public String apply(@Nullable String dimValue)
         @Override
         public String apply(@Nullable String dimValue)
         {
-          final String retval = extractionFunction.apply(dimValue);
-          return Strings.isNullOrEmpty(retval) ? FunctionalExtraction.this.replaceMissingValueWith : retval;
+          final String retval = NullHandling.emptyToNullIfNeeded(extractionFunction.apply(dimValue));
+          return retval == null
+                 ? FunctionalExtraction.this.replaceMissingValueWith
+                 : retval;
         }
       };
     }
diff --git a/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
index 1bf700feaaf..0967bdfac0a 100644
--- a/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
@@ -19,7 +19,7 @@
 
 package io.druid.query.extraction;
 
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 
 import javax.annotation.Nullable;
 
@@ -42,14 +42,14 @@ private IdentityExtractionFn()
   @Nullable
   public String apply(@Nullable Object value)
   {
-    return value == null ? null : Strings.emptyToNull(value.toString());
+    return value == null ? null : NullHandling.emptyToNullIfNeeded(value.toString());
   }
 
   @Override
   @Nullable
   public String apply(@Nullable String value)
   {
-    return Strings.emptyToNull(value);
+    return NullHandling.emptyToNullIfNeeded(value);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java
index 1d4e479acd8..fa7929f8ce7 100644
--- a/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/JavaScriptExtractionFn.java
@@ -24,7 +24,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.js.JavaScriptConfig;
 import org.mozilla.javascript.Context;
@@ -113,7 +113,7 @@ public boolean isInjective()
   public String apply(@Nullable Object value)
   {
     checkAndCompileScript();
-    return Strings.emptyToNull(fn.apply(value));
+    return NullHandling.emptyToNullIfNeeded(fn.apply(value));
   }
 
   /**
@@ -139,7 +139,7 @@ private void checkAndCompileScript()
   @Nullable
   public String apply(@Nullable String value)
   {
-    return this.apply((Object) Strings.emptyToNull(value));
+    return this.apply((Object) NullHandling.emptyToNullIfNeeded(value));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java
index da2a8e3bc70..90648d6a9e8 100644
--- a/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/LowerExtractionFn.java
@@ -21,7 +21,7 @@
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -52,7 +52,7 @@ public LowerExtractionFn(@JsonProperty("locale") String localeString)
   @Override
   public String apply(@Nullable String key)
   {
-    if (Strings.isNullOrEmpty(key)) {
+    if (NullHandling.isNullOrEquivalent(key)) {
       return null;
     }
     return key.toLowerCase(locale);
@@ -73,7 +73,7 @@ public ExtractionType getExtractionType()
   @Override
   public byte[] getCacheKey()
   {
-    byte[] localeBytes = StringUtils.toUtf8(Strings.nullToEmpty(localeString));
+    byte[] localeBytes = StringUtils.toUtf8(StringUtils.nullToEmptyNonDruidDataString(localeString));
     return ByteBuffer.allocate(2 + localeBytes.length)
                      .put(ExtractionCacheHelper.CACHE_TYPE_ID_LOWER)
                      .put((byte) 0XFF)
diff --git a/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java b/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java
index b818bd8cd50..2585e51e703 100644
--- a/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java
+++ b/processing/src/main/java/io/druid/query/extraction/MapLookupExtractor.java
@@ -23,21 +23,20 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
 import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.lookup.LookupExtractor;
 
 import javax.annotation.Nullable;
-import javax.validation.constraints.NotNull;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 @JsonTypeName("map")
 public class MapLookupExtractor extends LookupExtractor
@@ -64,22 +63,32 @@ public MapLookupExtractor(
 
   @Nullable
   @Override
-  public String apply(@NotNull String val)
+  public String apply(@Nullable String key)
   {
-    return map.get(val);
+    String keyEquivalent = NullHandling.nullToEmptyIfNeeded(key);
+    if (keyEquivalent == null) {
+      // keyEquivalent is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      return null;
+    }
+    return NullHandling.emptyToNullIfNeeded(map.get(keyEquivalent));
   }
 
   @Override
-  public List<String> unapply(final String value)
+  public List<String> unapply(@Nullable final String value)
   {
-    return Lists.newArrayList(Maps.filterKeys(map, new Predicate<String>()
-    {
-      @Override public boolean apply(@Nullable String key)
-      {
-        return map.get(key).equals(Strings.nullToEmpty(value));
-      }
-    }).keySet());
-
+    String valueToLookup = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueToLookup == null) {
+      // valueToLookup is null only for SQL Compatible Null Behavior
+      // otherwise null will be replaced with empty string in nullToEmptyIfNeeded above.
+      // null value maps to empty list when SQL Compatible
+      return Collections.emptyList();
+    }
+    return map.entrySet()
+              .stream()
+              .filter(entry -> entry.getValue().equals(valueToLookup))
+              .map(entry -> entry.getKey())
+              .collect(Collectors.toList());
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java
index 4d121be031f..5c1054afad9 100644
--- a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -62,7 +62,7 @@ public MatchingDimExtractionFn(
   @Override
   public String apply(@Nullable String dimValue)
   {
-    if (Strings.isNullOrEmpty(dimValue)) {
+    if (NullHandling.isNullOrEquivalent(dimValue)) {
       // We'd return null whether or not the pattern matched
       return null;
     }
diff --git a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java
index 3a1b0a5cc29..feab409d4ba 100644
--- a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -106,13 +106,14 @@ public RegexDimExtractionFn(
   public String apply(@Nullable String dimValue)
   {
     final String retVal;
-    final Matcher matcher = pattern.matcher(Strings.nullToEmpty(dimValue));
-    if (matcher.find()) {
+    String val = NullHandling.nullToEmptyIfNeeded(dimValue);
+    final Matcher matcher = val == null ? null : pattern.matcher(val);
+    if (matcher != null && matcher.find()) {
       retVal = matcher.group(index);
     } else {
       retVal = replaceMissingValue ? replaceMissingValueWith : dimValue;
     }
-    return Strings.emptyToNull(retVal);
+    return NullHandling.emptyToNullIfNeeded(retVal);
   }
 
   @JsonProperty("expr")
diff --git a/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java
index 1f005523925..3e30daa6f5f 100644
--- a/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/SearchQuerySpecDimExtractionFn.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.query.search.SearchQuerySpec;
 
 import javax.annotation.Nullable;
@@ -64,7 +64,7 @@ public SearchQuerySpec getSearchQuerySpec()
   @Override
   public String apply(@Nullable String dimValue)
   {
-    return searchQuerySpec.accept(dimValue) ? Strings.emptyToNull(dimValue) : null;
+    return searchQuerySpec.accept(dimValue) ? NullHandling.emptyToNullIfNeeded(dimValue) : null;
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java
index 168cad01ea2..979fe96e991 100644
--- a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java
@@ -107,7 +107,7 @@ public String apply(@Nullable String value)
         value = "";
       }
     }
-    return Strings.emptyToNull(StringUtils.format(format, value));
+    return io.druid.common.config.NullHandling.emptyToNullIfNeeded(StringUtils.format(format, value));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java
index 5ad88fa38fc..f2379089db9 100644
--- a/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/StrlenExtractionFn.java
@@ -20,6 +20,7 @@
 package io.druid.query.extraction;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
+import io.druid.common.config.NullHandling;
 
 import javax.annotation.Nullable;
 
@@ -40,6 +41,9 @@ public static StrlenExtractionFn instance()
   @Override
   public String apply(@Nullable String value)
   {
+    if (NullHandling.sqlCompatible() && value == null) {
+      return null;
+    }
     return String.valueOf(value == null ? 0 : value.length());
   }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java
index 75b251f18fa..c980aaa920f 100644
--- a/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java
@@ -22,7 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -63,7 +63,7 @@ public SubstringDimExtractionFn(
   @Override
   public String apply(@Nullable String dimValue)
   {
-    if (Strings.isNullOrEmpty(dimValue)) {
+    if (NullHandling.isNullOrEquivalent(dimValue)) {
       return null;
     }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java
index e731b3f3d0d..064e519e148 100644
--- a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.ibm.icu.text.SimpleDateFormat;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.StringUtils;
 import org.joda.time.DateTime;
@@ -120,7 +120,7 @@ public TimeDimExtractionFn(
   @Override
   public String apply(@Nullable String dimValue)
   {
-    if (Strings.isNullOrEmpty(dimValue)) {
+    if (NullHandling.isNullOrEquivalent(dimValue)) {
       return null;
     }
 
diff --git a/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java
index 34368f7f0fd..10a7c81835f 100644
--- a/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/UpperExtractionFn.java
@@ -21,7 +21,7 @@
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
-import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 
 import javax.annotation.Nullable;
@@ -51,7 +51,7 @@ public UpperExtractionFn(@JsonProperty("locale") String localeString)
   @Override
   public String apply(@Nullable String key)
   {
-    if (Strings.isNullOrEmpty(key)) {
+    if (NullHandling.isNullOrEquivalent(key)) {
       return null;
     }
     return key.toUpperCase(locale);
@@ -72,7 +72,7 @@ public ExtractionType getExtractionType()
   @Override
   public byte[] getCacheKey()
   {
-    byte[] localeBytes = StringUtils.toUtf8(Strings.nullToEmpty(localeString));
+    byte[] localeBytes = StringUtils.toUtf8(StringUtils.nullToEmptyNonDruidDataString(localeString));
     return ByteBuffer.allocate(2 + localeBytes.length)
                      .put(ExtractionCacheHelper.CACHE_TYPE_ID_UPPER)
                      .put((byte) 0XFF)
diff --git a/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java
index f41e6c6a024..7ff0aa2d352 100644
--- a/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/DoubleValueMatcherColumnSelectorStrategy.java
@@ -22,7 +22,6 @@
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.BaseDoubleColumnValueSelector;
 import io.druid.segment.DimensionHandlerUtils;
-import io.druid.segment.filter.BooleanValueMatcher;
 
 
 public class DoubleValueMatcherColumnSelectorStrategy
@@ -33,7 +32,7 @@ public ValueMatcher makeValueMatcher(final BaseDoubleColumnValueSelector selecto
   {
     final Double matchVal = DimensionHandlerUtils.convertObjectToDouble(value);
     if (matchVal == null) {
-      return BooleanValueMatcher.of(false);
+      return ValueMatcher.nullValueMatcher(selector);
     }
 
     final long matchValLongBits = Double.doubleToLongBits(matchVal);
@@ -65,6 +64,9 @@ public ValueMatcher makeValueMatcher(
       @Override
       public boolean matches()
       {
+        if (selector.isNull()) {
+          return predicate.applyNull();
+        }
         return predicate.applyDouble(selector.getDouble());
       }
 
@@ -80,13 +82,11 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public ValueGetter makeValueGetter(final BaseDoubleColumnValueSelector selector)
   {
-    return new ValueGetter()
-    {
-      @Override
-      public String[] get()
-      {
-        return new String[]{Double.toString(selector.getDouble())};
+    return () -> {
+      if (selector.isNull()) {
+        return null;
       }
+      return new String[]{Double.toString(selector.getDouble())};
     };
   }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java b/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java
index c28b8d37687..d3ab89c36c8 100644
--- a/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java
+++ b/processing/src/main/java/io/druid/query/filter/DruidDoublePredicate.java
@@ -19,12 +19,38 @@
 
 package io.druid.query.filter;
 
-
+/**
+ * Note: this is not a {@link io.druid.guice.annotations.PublicApi} or an
+ * {@link io.druid.guice.annotations.ExtensionPoint} of Druid.
+ */
+// All implementations are currently lambda expressions and intellij inspections wrongly complains about unused variable.
+// SupressWarnings can be removed once https://youtrack.jetbrains.com/issue/IDEA-191743 is resolved.
+@SuppressWarnings("unused")
 public interface DruidDoublePredicate
 {
   DruidDoublePredicate ALWAYS_FALSE = input -> false;
 
   DruidDoublePredicate ALWAYS_TRUE = input -> true;
 
+  DruidDoublePredicate MATCH_NULL_ONLY = new DruidDoublePredicate()
+  {
+    @Override
+    public boolean applyDouble(double input)
+    {
+      return false;
+    }
+
+    @Override
+    public boolean applyNull()
+    {
+      return true;
+    }
+  };
+
   boolean applyDouble(double input);
+
+  default boolean applyNull()
+  {
+    return false;
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java b/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java
index 2aa2674dd24..77dfb306ac4 100644
--- a/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java
+++ b/processing/src/main/java/io/druid/query/filter/DruidFloatPredicate.java
@@ -20,11 +20,36 @@
 package io.druid.query.filter;
 
 /**
- * FloatPredicate is only supported in Java 8+, so use this to avoid boxing when a float predicate is needed.
+ * Note: this is not a {@link io.druid.guice.annotations.PublicApi} or an
+ * {@link io.druid.guice.annotations.ExtensionPoint} of Druid.
  */
+// All implementations are currently lambda expressions and intellij inspections wrongly complains about unused variable.
+// SupressWarnings can be removed once https://youtrack.jetbrains.com/issue/IDEA-191743 is resolved.
+@SuppressWarnings("unused")
 public interface DruidFloatPredicate
 {
   DruidFloatPredicate ALWAYS_FALSE = input -> false;
 
+  DruidFloatPredicate MATCH_NULL_ONLY = new DruidFloatPredicate()
+  {
+    @Override
+    public boolean applyFloat(float input)
+    {
+      return false;
+    }
+
+    @Override
+    public boolean applyNull()
+    {
+      return true;
+    }
+  };
+
+
   boolean applyFloat(float input);
+
+  default boolean applyNull()
+  {
+    return false;
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java b/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java
index 2afca5c40e1..ecf15c03e04 100644
--- a/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java
+++ b/processing/src/main/java/io/druid/query/filter/DruidLongPredicate.java
@@ -20,7 +20,8 @@
 package io.druid.query.filter;
 
 /**
- * LongPredicate is only supported in Java 8+, so use this to avoid boxing when a long predicate is needed.
+ * Note: this is not a {@link io.druid.guice.annotations.PublicApi} or an
+ * {@link io.druid.guice.annotations.ExtensionPoint} of Druid.
  */
 public interface DruidLongPredicate
 {
@@ -28,5 +29,25 @@
 
   DruidLongPredicate ALWAYS_TRUE = input -> true;
 
+  DruidLongPredicate MATCH_NULL_ONLY = new DruidLongPredicate()
+  {
+    @Override
+    public boolean applyLong(long input)
+    {
+      return false;
+    }
+
+    @Override
+    public boolean applyNull()
+    {
+      return true;
+    }
+  };
+
   boolean applyLong(long input);
+
+  default boolean applyNull()
+  {
+    return false;
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java
index af7a2b215ed..55ae117ece3 100644
--- a/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/FloatValueMatcherColumnSelectorStrategy.java
@@ -22,7 +22,6 @@
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.BaseFloatColumnValueSelector;
 import io.druid.segment.DimensionHandlerUtils;
-import io.druid.segment.filter.BooleanValueMatcher;
 
 public class FloatValueMatcherColumnSelectorStrategy
     implements ValueMatcherColumnSelectorStrategy<BaseFloatColumnValueSelector>
@@ -32,7 +31,7 @@ public ValueMatcher makeValueMatcher(final BaseFloatColumnValueSelector selector
   {
     final Float matchVal = DimensionHandlerUtils.convertObjectToFloat(value);
     if (matchVal == null) {
-      return BooleanValueMatcher.of(false);
+      return ValueMatcher.nullValueMatcher(selector);
     }
 
     final int matchValIntBits = Float.floatToIntBits(matchVal);
@@ -64,6 +63,9 @@ public ValueMatcher makeValueMatcher(
       @Override
       public boolean matches()
       {
+        if (selector.isNull()) {
+          return predicate.applyNull();
+        }
         return predicate.applyFloat(selector.getFloat());
       }
 
@@ -79,13 +81,11 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public ValueGetter makeValueGetter(final BaseFloatColumnValueSelector selector)
   {
-    return new ValueGetter()
-    {
-      @Override
-      public String[] get()
-      {
-        return new String[]{Float.toString(selector.getFloat())};
+    return () -> {
+      if (selector.isNull()) {
+        return null;
       }
+      return new String[]{Float.toString(selector.getFloat())};
     };
   }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/InDimFilter.java b/processing/src/main/java/io/druid/query/filter/InDimFilter.java
index 0eddc923b3d..e551364c1de 100644
--- a/processing/src/main/java/io/druid/query/filter/InDimFilter.java
+++ b/processing/src/main/java/io/druid/query/filter/InDimFilter.java
@@ -21,12 +21,10 @@
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Function;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
-import com.google.common.collect.ImmutableSortedSet;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
@@ -34,7 +32,9 @@
 import com.google.common.collect.TreeRangeSet;
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Floats;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
+import io.druid.java.util.common.guava.Comparators;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.lookup.LookupExtractionFn;
 import io.druid.query.lookup.LookupExtractor;
@@ -53,6 +53,8 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
 
 public class InDimFilter implements DimFilter
 {
@@ -60,7 +62,8 @@
   // Hashing threshold is not applied to String for now, String still uses ImmutableSortedSet
   public static final int NUMERIC_HASHING_THRESHOLD = 16;
 
-  private final ImmutableSortedSet<String> values;
+  // Values can contain `null` object
+  private final SortedSet<String> values;
   private final String dimension;
   private final ExtractionFn extractionFn;
   private final Supplier<DruidLongPredicate> longPredicateSupplier;
@@ -76,19 +79,11 @@ public InDimFilter(
   {
     Preconditions.checkNotNull(dimension, "dimension can not be null");
     Preconditions.checkArgument(values != null && !values.isEmpty(), "values can not be null or empty");
-    this.values = ImmutableSortedSet.copyOf(
-        Iterables.transform(
-            values, new Function<String, String>()
-            {
-              @Override
-              public String apply(String input)
-              {
-                return Strings.nullToEmpty(input);
-              }
 
-            }
-        )
-    );
+    this.values = new TreeSet<>(Comparators.naturalNullsFirst());
+    for (String value : values) {
+      this.values.add(NullHandling.emptyToNullIfNeeded(value));
+    }
     this.dimension = dimension;
     this.extractionFn = extractionFn;
     this.longPredicateSupplier = getLongPredicateSupplier();
@@ -121,14 +116,21 @@ public ExtractionFn getExtractionFn()
     final byte[][] valuesBytes = new byte[values.size()][];
     int valuesBytesSize = 0;
     int index = 0;
+    boolean hasNull = false;
     for (String value : values) {
+      if (value == null) {
+        hasNull = true;
+      }
+      //CHECKSTYLE.OFF: Regexp
+      // Strings.nullToEmpty is safe to use here as we have encoded nullability in hasNull flag.
       valuesBytes[index] = StringUtils.toUtf8(Strings.nullToEmpty(value));
+      //CHECKSTYLE.ON: Regexp
       valuesBytesSize += valuesBytes[index].length + 1;
       ++index;
     }
     byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey();
 
-    ByteBuffer filterCacheKey = ByteBuffer.allocate(3
+    ByteBuffer filterCacheKey = ByteBuffer.allocate(5
                                                     + dimensionBytes.length
                                                     + valuesBytesSize
                                                     + extractionFnBytes.length)
@@ -136,6 +138,8 @@ public ExtractionFn getExtractionFn()
                                           .put(dimensionBytes)
                                           .put(DimFilterUtils.STRING_SEPARATOR)
                                           .put(extractionFnBytes)
+                                          .put(DimFilterUtils.STRING_SEPARATOR)
+                                          .put(hasNull ? NullHandling.IS_NULL_BYTE : NullHandling.IS_NOT_NULL_BYTE)
                                           .put(DimFilterUtils.STRING_SEPARATOR);
     for (byte[] bytes : valuesBytes) {
       filterCacheKey.put(bytes)
@@ -167,7 +171,7 @@ private InDimFilter optimizeLookup()
         // We cannot do an unapply()-based optimization if the selector value
         // and the replaceMissingValuesWith value are the same, since we have to match on
         // all values that are not present in the lookup.
-        final String convertedValue = Strings.emptyToNull(value);
+        final String convertedValue = NullHandling.emptyToNullIfNeeded(value);
         if (!exFn.isRetainMissingValue() && Objects.equals(convertedValue, exFn.getReplaceMissingValueWith())) {
           return this;
         }
@@ -177,7 +181,7 @@ private InDimFilter optimizeLookup()
         // there may be row values that match the selector value but are not included
         // in the lookup map. Match on the selector value as well.
         // If the selector value is overwritten in the lookup map, don't add selector value to keys.
-        if (exFn.isRetainMissingValue() && lookup.apply(convertedValue) == null) {
+        if (exFn.isRetainMissingValue() && NullHandling.isNullOrEquivalent(lookup.apply(convertedValue))) {
           keys.add(convertedValue);
         }
       }
@@ -212,7 +216,15 @@ public Filter toFilter()
     }
     RangeSet<String> retSet = TreeRangeSet.create();
     for (String value : values) {
-      retSet.add(Range.singleton(Strings.nullToEmpty(value)));
+      String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+      if (valueEquivalent == null) {
+        // Case when SQL compatible null handling is enabled
+        // Range.singleton(null) is invalid, so use the fact that
+        // only null values are less than empty string.
+        retSet.add(Range.lessThan(""));
+      } else {
+        retSet.add(Range.singleton(valueEquivalent));
+      }
     }
     return retSet;
   }
@@ -269,8 +281,9 @@ public String toString()
       builder.append(")");
     }
 
-    builder.append(" IN (").append(Joiner.on(", ").join(values)).append(")");
-
+    builder.append(" IN (")
+           .append(Joiner.on(", ").join(Iterables.transform(values, input -> StringUtils.nullToEmptyNonDruidDataString(input))))
+           .append(")");
     return builder.toString();
   }
 
diff --git a/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java b/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java
index 31cb3b32af3..1fde914bef7 100644
--- a/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java
+++ b/processing/src/main/java/io/druid/query/filter/LikeDimFilter.java
@@ -23,11 +23,11 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.collect.RangeSet;
 import com.google.common.collect.Sets;
 import com.google.common.io.BaseEncoding;
 import com.google.common.primitives.Chars;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.segment.data.Indexed;
@@ -100,7 +100,7 @@ private LikeMatcher(
     )
     {
       this.suffixMatch = Preconditions.checkNotNull(suffixMatch, "suffixMatch");
-      this.prefix = Strings.nullToEmpty(prefix);
+      this.prefix = NullHandling.nullToEmptyIfNeeded(prefix);
       this.pattern = Preconditions.checkNotNull(pattern, "pattern");
     }
 
@@ -153,7 +153,8 @@ private static void addPatternCharacter(final StringBuilder patternBuilder, fina
 
     public boolean matches(@Nullable final String s)
     {
-      return pattern.matcher(Strings.nullToEmpty(s)).matches();
+      String val = NullHandling.nullToEmptyIfNeeded(s);
+      return val != null && pattern.matcher(val).matches();
     }
 
     /**
@@ -167,7 +168,7 @@ public boolean matchesSuffixOnly(final Indexed<String> strings, final int i)
         return true;
       } else if (suffixMatch == SuffixMatch.MATCH_EMPTY) {
         final String s = strings.get(i);
-        return (s == null ? 0 : s.length()) == prefix.length();
+        return s == null ? matches(null) : s.length() == prefix.length();
       } else {
         // suffixMatch is MATCH_PATTERN
         final String s = strings.get(i);
@@ -183,23 +184,9 @@ public DruidPredicateFactory predicateFactory(final ExtractionFn extractionFn)
         public Predicate<String> makeStringPredicate()
         {
           if (extractionFn != null) {
-            return new Predicate<String>()
-            {
-              @Override
-              public boolean apply(String input)
-              {
-                return matches(extractionFn.apply(input));
-              }
-            };
+            return input -> matches(extractionFn.apply(input));
           } else {
-            return new Predicate<String>()
-            {
-              @Override
-              public boolean apply(String input)
-              {
-                return matches(input);
-              }
-            };
+            return input -> matches(input);
           }
         }
 
diff --git a/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java
index 15a62bd5063..c810eac208e 100644
--- a/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/LongValueMatcherColumnSelectorStrategy.java
@@ -22,7 +22,6 @@
 import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
 import io.druid.segment.BaseLongColumnValueSelector;
 import io.druid.segment.DimensionHandlerUtils;
-import io.druid.segment.filter.BooleanValueMatcher;
 
 public class LongValueMatcherColumnSelectorStrategy
     implements ValueMatcherColumnSelectorStrategy<BaseLongColumnValueSelector>
@@ -32,7 +31,7 @@ public ValueMatcher makeValueMatcher(final BaseLongColumnValueSelector selector,
   {
     final Long matchVal = DimensionHandlerUtils.convertObjectToLong(value);
     if (matchVal == null) {
-      return BooleanValueMatcher.of(false);
+      return ValueMatcher.nullValueMatcher(selector);
     }
     final long matchValLong = matchVal;
     return new ValueMatcher()
@@ -63,6 +62,9 @@ public ValueMatcher makeValueMatcher(
       @Override
       public boolean matches()
       {
+        if (selector.isNull()) {
+          return predicate.applyNull();
+        }
         return predicate.applyLong(selector.getLong());
       }
 
@@ -78,13 +80,11 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public ValueGetter makeValueGetter(final BaseLongColumnValueSelector selector)
   {
-    return new ValueGetter()
-    {
-      @Override
-      public String[] get()
-      {
-        return new String[]{Long.toString(selector.getLong())};
+    return () -> {
+      if (selector.isNull()) {
+        return null;
       }
+      return new String[]{Long.toString(selector.getLong())};
     };
   }
 }
diff --git a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
index ebb6fa4fe3a..fdf69173ca5 100644
--- a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
+++ b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java
@@ -24,21 +24,22 @@
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
 import com.google.common.base.Predicates;
-import com.google.common.base.Strings;
-import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
 import com.google.common.collect.Sets;
 import com.google.common.collect.TreeRangeSet;
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Floats;
+import io.druid.common.config.NullHandling;
 import io.druid.common.guava.GuavaUtils;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.segment.filter.DimensionPredicateFilter;
 import io.druid.segment.filter.SelectorFilter;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
+import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Objects;
 
@@ -47,6 +48,8 @@
 public class SelectorDimFilter implements DimFilter
 {
   private final String dimension;
+
+  @Nullable
   private final String value;
   private final ExtractionFn extractionFn;
 
@@ -66,7 +69,7 @@ public SelectorDimFilter(
     Preconditions.checkArgument(dimension != null, "dimension must not be null");
 
     this.dimension = dimension;
-    this.value = Strings.nullToEmpty(value);
+    this.value = NullHandling.emptyToNullIfNeeded(value);
     this.extractionFn = extractionFn;
   }
 
@@ -77,10 +80,12 @@ public SelectorDimFilter(
     byte[] valueBytes = (value == null) ? new byte[]{} : StringUtils.toUtf8(value);
     byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey();
 
-    return ByteBuffer.allocate(3 + dimensionBytes.length + valueBytes.length + extractionFnBytes.length)
+    return ByteBuffer.allocate(5 + dimensionBytes.length + valueBytes.length + extractionFnBytes.length)
                      .put(DimFilterUtils.SELECTOR_CACHE_ID)
+                     .put(DimFilterUtils.STRING_SEPARATOR)
                      .put(dimensionBytes)
                      .put(DimFilterUtils.STRING_SEPARATOR)
+                     .put(value == null ? NullHandling.IS_NULL_BYTE : NullHandling.IS_NOT_NULL_BYTE)
                      .put(valueBytes)
                      .put(DimFilterUtils.STRING_SEPARATOR)
                      .put(extractionFnBytes)
@@ -90,7 +95,7 @@ public SelectorDimFilter(
   @Override
   public DimFilter optimize()
   {
-    return new InDimFilter(dimension, ImmutableList.of(value), extractionFn).optimize();
+    return new InDimFilter(dimension, Arrays.asList(value), extractionFn).optimize();
   }
 
   @Override
@@ -99,14 +104,13 @@ public Filter toFilter()
     if (extractionFn == null) {
       return new SelectorFilter(dimension, value);
     } else {
-      final String valueOrNull = Strings.emptyToNull(value);
 
       final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
       {
         @Override
         public Predicate<String> makeStringPredicate()
         {
-          return Predicates.equalTo(valueOrNull);
+          return Predicates.equalTo(value);
         }
 
         @Override
@@ -190,7 +194,14 @@ public boolean equals(Object o)
       return null;
     }
     RangeSet<String> retSet = TreeRangeSet.create();
-    retSet.add(Range.singleton(Strings.nullToEmpty(value)));
+    String valueEquivalent = NullHandling.nullToEmptyIfNeeded(value);
+    if (valueEquivalent == null) {
+      // Case when SQL compatible null handling is enabled
+      // Nulls are less than empty String in segments
+      retSet.add(Range.lessThan(""));
+    } else {
+      retSet.add(Range.singleton(valueEquivalent));
+    }
     return retSet;
   }
 
@@ -219,6 +230,10 @@ private void initLongPredicate()
       if (longPredicate != null) {
         return;
       }
+      if (value == null) {
+        longPredicate = DruidLongPredicate.MATCH_NULL_ONLY;
+        return;
+      }
       final Long valueAsLong = GuavaUtils.tryParseLong(value);
       if (valueAsLong == null) {
         longPredicate = DruidLongPredicate.ALWAYS_FALSE;
@@ -239,6 +254,11 @@ private void initFloatPredicate()
       if (floatPredicate != null) {
         return;
       }
+
+      if (value == null) {
+        floatPredicate = DruidFloatPredicate.MATCH_NULL_ONLY;
+        return;
+      }
       final Float valueAsFloat = Floats.tryParse(value);
 
       if (valueAsFloat == null) {
@@ -259,6 +279,10 @@ private void initDoublePredicate()
       if (druidDoublePredicate != null) {
         return;
       }
+      if (value == null) {
+        druidDoublePredicate = DruidDoublePredicate.MATCH_NULL_ONLY;
+        return;
+      }
       final Double aDouble = Doubles.tryParse(value);
 
       if (aDouble == null) {
diff --git a/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java
index e380b1162ee..aa3dfe14aa4 100644
--- a/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/filter/StringValueMatcherColumnSelectorStrategy.java
@@ -20,7 +20,6 @@
 package io.druid.query.filter;
 
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import io.druid.segment.DimensionSelector;
 import io.druid.segment.data.IndexedInts;
 import io.druid.segment.filter.BooleanValueMatcher;
@@ -28,19 +27,11 @@
 public class StringValueMatcherColumnSelectorStrategy implements ValueMatcherColumnSelectorStrategy<DimensionSelector>
 {
   private static final String[] NULL_VALUE = new String[]{null};
-  private static final ValueGetter NULL_VALUE_GETTER = new ValueGetter()
-  {
-    @Override
-    public String[] get()
-    {
-      return NULL_VALUE;
-    }
-  };
+  private static final ValueGetter NULL_VALUE_GETTER = () -> NULL_VALUE;
 
   @Override
   public ValueMatcher makeValueMatcher(final DimensionSelector selector, String value)
   {
-    value = Strings.emptyToNull(value);
     if (selector.getValueCardinality() == 0) {
       return BooleanValueMatcher.of(value == null);
     } else {
@@ -68,22 +59,17 @@ public ValueGetter makeValueGetter(final DimensionSelector selector)
     if (selector.getValueCardinality() == 0) {
       return NULL_VALUE_GETTER;
     } else {
-      return new ValueGetter()
-      {
-        @Override
-        public String[] get()
-        {
-          final IndexedInts row = selector.getRow();
-          final int size = row.size();
-          if (size == 0) {
-            return NULL_VALUE;
-          } else {
-            String[] values = new String[size];
-            for (int i = 0; i < size; ++i) {
-              values[i] = Strings.emptyToNull(selector.lookupName(row.get(i)));
-            }
-            return values;
+      return () -> {
+        final IndexedInts row = selector.getRow();
+        final int size = row.size();
+        if (size == 0) {
+          return NULL_VALUE;
+        } else {
+          String[] values = new String[size];
+          for (int i = 0; i < size; ++i) {
+            values[i] = selector.lookupName(row.get(i));
           }
+          return values;
         }
       };
     }
diff --git a/processing/src/main/java/io/druid/query/filter/ValueGetter.java b/processing/src/main/java/io/druid/query/filter/ValueGetter.java
index 72000a4756d..ff4caf36edc 100644
--- a/processing/src/main/java/io/druid/query/filter/ValueGetter.java
+++ b/processing/src/main/java/io/druid/query/filter/ValueGetter.java
@@ -19,6 +19,8 @@
 
 package io.druid.query.filter;
 
+import javax.annotation.Nullable;
+
 /**
  */
 public interface ValueGetter
@@ -27,5 +29,7 @@
   // converted to strings. We should also add functions
   // for these and modify ColumnComparisonFilter to handle
   // comparing Long and Float columns to eachother.
+  // Returns null when the underlying Long/Float value is null.
+  @Nullable
   String[] get();
 }
diff --git a/processing/src/main/java/io/druid/query/filter/ValueMatcher.java b/processing/src/main/java/io/druid/query/filter/ValueMatcher.java
index cad255474d8..955e01b0dde 100644
--- a/processing/src/main/java/io/druid/query/filter/ValueMatcher.java
+++ b/processing/src/main/java/io/druid/query/filter/ValueMatcher.java
@@ -21,6 +21,8 @@
 
 import io.druid.query.monomorphicprocessing.CalledFromHotLoop;
 import io.druid.query.monomorphicprocessing.HotLoopCallee;
+import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
+import io.druid.segment.BaseNullableColumnValueSelector;
 
 /**
  */
@@ -28,4 +30,23 @@
 {
   @CalledFromHotLoop
   boolean matches();
+
+  // Utility method to match null values.
+  static ValueMatcher nullValueMatcher(BaseNullableColumnValueSelector selector)
+  {
+    return new ValueMatcher()
+    {
+      @Override
+      public boolean matches()
+      {
+        return selector.isNull();
+      }
+
+      @Override
+      public void inspectRuntimeShape(RuntimeShapeInspector inspector)
+      {
+        inspector.visit("selector", selector);
+      }
+    };
+  }
 }
diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
index ade15e96cf6..3ebf1d3826b 100644
--- a/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
+++ b/processing/src/main/java/io/druid/query/groupby/GroupByQuery.java
@@ -58,6 +58,7 @@
 import io.druid.query.ordering.StringComparators;
 import io.druid.query.spec.LegacySegmentSpec;
 import io.druid.query.spec.QuerySegmentSpec;
+import io.druid.segment.DimensionHandlerUtils;
 import io.druid.segment.VirtualColumn;
 import io.druid.segment.VirtualColumns;
 import io.druid.segment.column.Column;
@@ -533,19 +534,19 @@ private static int compareDims(List<DimensionSpec> dimensions, Row lhs, Row rhs)
     for (DimensionSpec dimension : dimensions) {
       final int dimCompare;
       if (dimension.getOutputType() == ValueType.LONG) {
-        dimCompare = Long.compare(
-            ((Number) lhs.getRaw(dimension.getOutputName())).longValue(),
-            ((Number) rhs.getRaw(dimension.getOutputName())).longValue()
+        dimCompare = Comparators.<Long>naturalNullsFirst().compare(
+            DimensionHandlerUtils.convertObjectToLong(lhs.getRaw(dimension.getOutputName())),
+            DimensionHandlerUtils.convertObjectToLong(rhs.getRaw(dimension.getOutputName()))
         );
       } else if (dimension.getOutputType() == ValueType.FLOAT) {
-        dimCompare = Float.compare(
-            ((Number) lhs.getRaw(dimension.getOutputName())).floatValue(),
-            ((Number) rhs.getRaw(dimension.getOutputName())).floatValue()
+        dimCompare = Comparators.<Float>naturalNullsFirst().compare(
+            DimensionHandlerUtils.convertObjectToFloat(lhs.getRaw(dimension.getOutputName())),
+            DimensionHandlerUtils.convertObjectToFloat(rhs.getRaw(dimension.getOutputName()))
         );
       } else if (dimension.getOutputType() == ValueType.DOUBLE) {
-        dimCompare = Double.compare(
-            ((Number) lhs.getRaw(dimension.getOutputName())).doubleValue(),
-            ((Number) rhs.getRaw(dimension.getOutputName())).doubleValue()
+        dimCompare = Comparators.<Double>naturalNullsFirst().compare(
+            DimensionHandlerUtils.convertObjectToDouble(lhs.getRaw(dimension.getOutputName())),
+            DimensionHandlerUtils.convertObjectToDouble(rhs.getRaw(dimension.getOutputName()))
         );
       } else {
         dimCompare = ((Ordering) Comparators.naturalNullsFirst()).compare(
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java
index 8e79e9e73d6..f9063b55258 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java
@@ -20,12 +20,12 @@
 package io.druid.query.groupby.epinephelinae;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.base.Suppliers;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Maps;
 import io.druid.collections.NonBlockingPool;
 import io.druid.collections.ResourceHolder;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.data.input.Row;
 import io.druid.java.util.common.DateTimes;
@@ -45,6 +45,7 @@
 import io.druid.query.groupby.epinephelinae.column.GroupByColumnSelectorPlus;
 import io.druid.query.groupby.epinephelinae.column.GroupByColumnSelectorStrategy;
 import io.druid.query.groupby.epinephelinae.column.LongGroupByColumnSelectorStrategy;
+import io.druid.query.groupby.epinephelinae.column.NullableValueGroupByColumnSelectorStrategy;
 import io.druid.query.groupby.epinephelinae.column.StringGroupByColumnSelectorStrategy;
 import io.druid.query.groupby.strategy.GroupByStrategyV2;
 import io.druid.segment.ColumnValueSelector;
@@ -124,8 +125,8 @@ private GroupByQueryEngineV2()
 
     final ResourceHolder<ByteBuffer> bufferHolder = intermediateResultsBufferPool.take();
 
-    final String fudgeTimestampString = Strings.emptyToNull(
-        query.getContextValue(GroupByStrategyV2.CTX_KEY_FUDGE_TIMESTAMP, "")
+    final String fudgeTimestampString = NullHandling.emptyToNullIfNeeded(
+        query.getContextValue(GroupByStrategyV2.CTX_KEY_FUDGE_TIMESTAMP, null)
     );
 
     final DateTime fudgeTimestamp = fudgeTimestampString == null
@@ -250,15 +251,24 @@ public GroupByColumnSelectorStrategy makeColumnSelectorStrategy(
             return new DictionaryBuildingStringGroupByColumnSelectorStrategy();
           }
         case LONG:
-          return new LongGroupByColumnSelectorStrategy();
+          return makeNullableStrategy(new LongGroupByColumnSelectorStrategy());
         case FLOAT:
-          return new FloatGroupByColumnSelectorStrategy();
+          return makeNullableStrategy(new FloatGroupByColumnSelectorStrategy());
         case DOUBLE:
-          return new DoubleGroupByColumnSelectorStrategy();
+          return makeNullableStrategy(new DoubleGroupByColumnSelectorStrategy());
         default:
           throw new IAE("Cannot create query type helper from invalid type [%s]", type);
       }
     }
+
+    private GroupByColumnSelectorStrategy makeNullableStrategy(GroupByColumnSelectorStrategy delegate)
+    {
+      if (NullHandling.sqlCompatible()) {
+        return new NullableValueGroupByColumnSelectorStrategy(delegate);
+      } else {
+        return delegate;
+      }
+    }
   }
 
   private abstract static class GroupByEngineIterator<KeyType> implements Iterator<Row>, Closeable
@@ -546,7 +556,8 @@ protected void putToMap(ByteBuffer key, Map<String, Object> map)
         selectorPlus.getColumnSelectorStrategy().processValueFromGroupingKey(
             selectorPlus,
             key,
-            map
+            map,
+            selectorPlus.getKeyBufferPosition()
         );
       }
     }
@@ -686,19 +697,16 @@ private static void convertRowTypesToOutputTypes(List<DimensionSpec> dimensionSp
           (dimName, baseVal) -> {
             switch (outputType) {
               case STRING:
-                baseVal = baseVal == null ? "" : baseVal.toString();
+                baseVal = DimensionHandlerUtils.convertObjectToString(baseVal);
                 break;
               case LONG:
                 baseVal = DimensionHandlerUtils.convertObjectToLong(baseVal);
-                baseVal = baseVal == null ? 0L : baseVal;
                 break;
               case FLOAT:
                 baseVal = DimensionHandlerUtils.convertObjectToFloat(baseVal);
-                baseVal = baseVal == null ? 0.f : baseVal;
                 break;
               case DOUBLE:
                 baseVal = DimensionHandlerUtils.convertObjectToDouble(baseVal);
-                baseVal = baseVal == null ? 0.d : baseVal;
                 break;
               default:
                 throw new IAE("Unsupported type: " + outputType);
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
index 722f7b4692d..db8706f7620 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java
@@ -23,7 +23,6 @@
 import com.fasterxml.jackson.annotation.JsonValue;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -31,6 +30,7 @@
 import com.google.common.primitives.Longs;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import io.druid.collections.ReferenceCountingResourceHolder;
+import io.druid.common.config.NullHandling;
 import io.druid.common.utils.IntArrayUtils;
 import io.druid.data.input.MapBasedRow;
 import io.druid.data.input.Row;
@@ -39,6 +39,7 @@
 import io.druid.java.util.common.Pair;
 import io.druid.java.util.common.granularity.AllGranularity;
 import io.druid.java.util.common.guava.Accumulator;
+import io.druid.java.util.common.guava.Comparators;
 import io.druid.query.BaseQuery;
 import io.druid.query.ColumnSelectorPlus;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -438,7 +439,7 @@ public Row apply(Grouper.Entry<RowBasedKey> entry)
               Object dimVal = entry.getKey().getKey()[i];
               theMap.put(
                   query.getDimensions().get(i - dimStart).getOutputName(),
-                  dimVal instanceof String ? Strings.emptyToNull((String) dimVal) : dimVal
+                  dimVal instanceof String ? NullHandling.emptyToNullIfNeeded((String) dimVal) : dimVal
               );
             }
 
@@ -527,18 +528,11 @@ public String toString()
     @Override
     public Supplier<Comparable> makeInputRawSupplier(DimensionSelector selector)
     {
-      return new Supplier<Comparable>()
-      {
-        @Override
-        public Comparable get()
-        {
-          final String value;
-          IndexedInts index = selector.getRow();
-          value = index.size() == 0
-                  ? ""
-                  : selector.lookupName(index.get(0));
-          return Strings.nullToEmpty(value);
-        }
+      return () -> {
+        IndexedInts index = selector.getRow();
+        return index.size() == 0
+               ? null
+               : selector.lookupName(index.get(0));
       };
     }
   }
@@ -605,28 +599,19 @@ public InputRawSupplierColumnSelectorStrategy makeColumnSelectorStrategy(
       type = type == null ? ValueType.STRING : type;
       switch (type) {
         case STRING:
-          functions[i] = input -> input == null ? "" : input.toString();
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToString(input);
           break;
 
         case LONG:
-          functions[i] = input -> {
-            final Long val = DimensionHandlerUtils.convertObjectToLong(input);
-            return val == null ? 0L : val;
-          };
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToLong(input);
           break;
 
         case FLOAT:
-          functions[i] = input -> {
-            final Float val = DimensionHandlerUtils.convertObjectToFloat(input);
-            return val == null ? 0.f : val;
-          };
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToFloat(input);
           break;
 
         case DOUBLE:
-          functions[i] = input -> {
-            Double val = DimensionHandlerUtils.convertObjectToDouble(input);
-            return val == null ? 0.0 : val;
-          };
+          functions[i] = input -> DimensionHandlerUtils.convertObjectToDouble(input);
           break;
         default:
           throw new IAE("invalid type: [%s]", type);
@@ -871,7 +856,10 @@ public int compare(Grouper.Entry<RowBasedKey> entry1, Grouper.Entry<RowBasedKey>
     private static int compareDimsInRows(RowBasedKey key1, RowBasedKey key2, int dimStart)
     {
       for (int i = dimStart; i < key1.getKey().length; i++) {
-        final int cmp = ((Comparable) key1.getKey()[i]).compareTo(key2.getKey()[i]);
+        final int cmp = Comparators.<Comparable>naturalNullsFirst().compare(
+            (Comparable) key1.getKey()[i],
+            (Comparable) key2.getKey()[i]
+        );
         if (cmp != 0) {
           return cmp;
         }
@@ -920,9 +908,10 @@ private static int compareDimsInRowsWithAggs(
 
         if (isNumericField.get(i) && comparator.equals(StringComparators.NUMERIC)) {
           // use natural comparison
-          cmp = lhs.compareTo(rhs);
+          cmp = Comparators.<Comparable>naturalNullsFirst().compare(lhs, rhs);
         } else {
-          cmp = comparator.compare(lhs.toString(), rhs.toString());
+          cmp = comparator.compare(DimensionHandlerUtils.convertObjectToString(lhs),
+                                   DimensionHandlerUtils.convertObjectToString(rhs));
         }
 
         if (cmp != 0) {
@@ -934,9 +923,10 @@ private static int compareDimsInRowsWithAggs(
     }
   }
 
-  static long estimateStringKeySize(String key)
+  static long estimateStringKeySize(@Nullable String key)
   {
-    return (long) key.length() * Character.BYTES + ROUGH_OVERHEAD_PER_DICTIONARY_ENTRY;
+    long length = key == null ? 0 : key.length();
+    return length * Character.BYTES + ROUGH_OVERHEAD_PER_DICTIONARY_ENTRY;
   }
 
   private static class RowBasedKeySerde implements Grouper.KeySerde<RowBasedGrouperHelper.RowBasedKey>
@@ -1024,7 +1014,7 @@ private void initializeRankOfDictionaryIds()
       rankOfDictionaryIds = IntStream.range(0, dictionarySize).toArray();
       IntArrays.quickSort(
           rankOfDictionaryIds,
-          (i1, i2) -> dictionary.get(i1).compareTo(dictionary.get(i2))
+          (i1, i2) -> Comparators.<String>naturalNullsFirst().compare(dictionary.get(i1), dictionary.get(i2))
       );
 
       IntArrayUtils.inverse(rankOfDictionaryIds);
@@ -1208,7 +1198,7 @@ public int compare(ByteBuffer lhsBuffer, ByteBuffer rhsBuffer, int lhsPosition,
               throw new IAE("Cannot order by a non-numeric aggregator[%s]", orderSpec);
             }
 
-            serdeHelper = makeNumericSerdeHelper(valueType, aggOffset, true, stringComparator);
+            serdeHelper = makeNullHandlingNumericserdeHelper(valueType, aggOffset, true, stringComparator);
 
             orderByHelpers.add(serdeHelper);
             needsReverses.add(needsReverse);
@@ -1393,12 +1383,34 @@ private RowBasedKeySerdeHelper makeSerdeHelper(
         case LONG:
         case FLOAT:
         case DOUBLE:
-          return makeNumericSerdeHelper(valueType, keyBufferPosition, pushLimitDown, stringComparator);
+          return makeNullHandlingNumericserdeHelper(valueType, keyBufferPosition, pushLimitDown, stringComparator);
         default:
           throw new IAE("invalid type: %s", valueType);
       }
     }
 
+    private RowBasedKeySerdeHelper makeNullHandlingNumericserdeHelper(
+        ValueType valueType,
+        int keyBufferPosition,
+        boolean pushLimitDown,
+        @Nullable StringComparator stringComparator
+    )
+    {
+      if (NullHandling.sqlCompatible()) {
+        return new NullableRowBasedKeySerdeHelper(
+            makeNumericSerdeHelper(
+                valueType,
+                keyBufferPosition + Byte.BYTES,
+                pushLimitDown,
+                stringComparator
+            ),
+            keyBufferPosition
+        );
+      } else {
+        return makeNumericSerdeHelper(valueType, keyBufferPosition, pushLimitDown, stringComparator);
+      }
+    }
+
     private RowBasedKeySerdeHelper makeNumericSerdeHelper(
         ValueType valueType,
         int keyBufferPosition,
@@ -1581,7 +1593,7 @@ public int getKeyBufferValueSize()
       @Override
       public boolean putToKeyBuffer(RowBasedKey key, int idx)
       {
-        keyBuffer.putLong((Long) key.getKey()[idx]);
+        keyBuffer.putLong(DimensionHandlerUtils.nullToZero((Long) key.getKey()[idx]));
         return true;
       }
 
@@ -1632,7 +1644,7 @@ public int getKeyBufferValueSize()
       @Override
       public boolean putToKeyBuffer(RowBasedKey key, int idx)
       {
-        keyBuffer.putFloat((Float) key.getKey()[idx]);
+        keyBuffer.putFloat(DimensionHandlerUtils.nullToZero((Float) key.getKey()[idx]));
         return true;
       }
 
@@ -1684,7 +1696,7 @@ public int getKeyBufferValueSize()
       @Override
       public boolean putToKeyBuffer(RowBasedKey key, int idx)
       {
-        keyBuffer.putDouble((Double) key.getKey()[idx]);
+        keyBuffer.putDouble(DimensionHandlerUtils.nullToZero((Double) key.getKey()[idx]));
         return true;
       }
 
@@ -1700,6 +1712,78 @@ public BufferComparator getBufferComparator()
         return bufferComparator;
       }
     }
+
+    // This class is only used when SQL compatible null handling is enabled.
+    private class NullableRowBasedKeySerdeHelper implements RowBasedKeySerdeHelper
+    {
+      private final RowBasedKeySerdeHelper delegate;
+      private final int keyBufferPosition;
+      private final BufferComparator comparator;
+
+      NullableRowBasedKeySerdeHelper(RowBasedKeySerdeHelper delegate, int keyBufferPosition)
+      {
+        this.delegate = delegate;
+        this.keyBufferPosition = keyBufferPosition;
+        BufferComparator delegateBufferComparator = this.delegate.getBufferComparator();
+        this.comparator = (lhsBuffer, rhsBuffer, lhsPosition, rhsPosition) -> {
+          boolean isLhsNull = (lhsBuffer.get(lhsPosition + keyBufferPosition) == NullHandling.IS_NULL_BYTE);
+          boolean isRhsNull = (rhsBuffer.get(rhsPosition + keyBufferPosition) == NullHandling.IS_NULL_BYTE);
+          if (isLhsNull && isRhsNull) {
+            // Both are null
+            return 0;
+          }
+          // only lhs is null
+          if (isLhsNull) {
+            return -1;
+          }
+          // only rhs is null
+          if (isRhsNull) {
+            return 1;
+          }
+          return delegateBufferComparator.compare(
+              lhsBuffer,
+              rhsBuffer,
+              lhsPosition,
+              rhsPosition
+          );
+        };
+      }
+
+      @Override
+      public int getKeyBufferValueSize()
+      {
+        return delegate.getKeyBufferValueSize() + Byte.BYTES;
+      }
+
+      @Override
+      public boolean putToKeyBuffer(RowBasedKey key, int idx)
+      {
+        Object val = key.getKey()[idx];
+        if (val == null) {
+          keyBuffer.put(NullHandling.IS_NULL_BYTE);
+        } else {
+          keyBuffer.put(NullHandling.IS_NOT_NULL_BYTE);
+        }
+        delegate.putToKeyBuffer(key, idx);
+        return true;
+      }
+
+      @Override
+      public void getFromByteBuffer(ByteBuffer buffer, int initialOffset, int dimValIdx, Comparable[] dimValues)
+      {
+        if (buffer.get(initialOffset + keyBufferPosition) == NullHandling.IS_NULL_BYTE) {
+          dimValues[dimValIdx] = null;
+        } else {
+          delegate.getFromByteBuffer(buffer, initialOffset, dimValIdx, dimValues);
+        }
+      }
+
+      @Override
+      public BufferComparator getBufferComparator()
+      {
+        return comparator;
+      }
+    }
   }
 
   private static int compareDimsInBuffersForNullFudgeTimestamp(
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java
index 2fd6fb93b3f..d9db08985cb 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java
@@ -21,6 +21,7 @@
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.DimensionSelector;
 import io.druid.segment.data.ArrayBasedIndexedInts;
@@ -47,9 +48,14 @@
   }
 
   @Override
-  public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap)
+  public void processValueFromGroupingKey(
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
+  )
   {
-    final int id = key.getInt(selectorPlus.getKeyBufferPosition());
+    final int id = key.getInt(keyBufferPosition);
 
     // GROUP_BY_MISSING_VALUE is used to indicate empty rows, which are omitted from the result map.
     if (id != GROUP_BY_MISSING_VALUE) {
@@ -59,7 +65,7 @@ public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus,
           value
       );
     } else {
-      resultMap.put(selectorPlus.getOutputName(), "");
+      resultMap.put(selectorPlus.getOutputName(), NullHandling.defaultStringValue());
     }
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java
index ae0250c6f92..bcbd50b71fa 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/DoubleGroupByColumnSelectorStrategy.java
@@ -21,7 +21,9 @@
 
 
 import io.druid.segment.ColumnValueSelector;
+import io.druid.segment.DimensionHandlerUtils;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
@@ -35,10 +37,13 @@ public int getGroupingKeySize()
 
   @Override
   public void processValueFromGroupingKey(
-      GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   )
   {
-    final double val = key.getDouble(selectorPlus.getKeyBufferPosition());
+    final double val = key.getDouble(keyBufferPosition);
     resultMap.put(selectorPlus.getOutputName(), val);
   }
 
@@ -55,17 +60,21 @@ public Object getOnlyValue(ColumnValueSelector selector)
   }
 
   @Override
-  public void writeToKeyBuffer(int keyBufferPosition, Object obj, ByteBuffer keyBuffer)
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
   {
-    keyBuffer.putDouble(keyBufferPosition, (Double) obj);
+    keyBuffer.putDouble(keyBufferPosition, DimensionHandlerUtils.nullToZero((Double) obj));
   }
 
   @Override
   public void initGroupingKeyColumnValue(
-      int keyBufferPosition, int columnIndex, Object rowObj, ByteBuffer keyBuffer, int[] stack
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
   )
   {
-    keyBuffer.putDouble(keyBufferPosition, (Double) rowObj);
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
     stack[columnIndex] = 1;
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java
index 226e50f3229..3b5996e0ebd 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/FloatGroupByColumnSelectorStrategy.java
@@ -20,7 +20,9 @@
 package io.druid.query.groupby.epinephelinae.column;
 
 import io.druid.segment.ColumnValueSelector;
+import io.druid.segment.DimensionHandlerUtils;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
@@ -35,10 +37,13 @@ public int getGroupingKeySize()
 
   @Override
   public void processValueFromGroupingKey(
-      GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   )
   {
-    final float val = key.getFloat(selectorPlus.getKeyBufferPosition());
+    final float val = key.getFloat(keyBufferPosition);
     resultMap.put(selectorPlus.getOutputName(), val);
   }
 
@@ -55,17 +60,21 @@ public Object getOnlyValue(ColumnValueSelector selector)
   }
 
   @Override
-  public void writeToKeyBuffer(int keyBufferPosition, Object obj, ByteBuffer keyBuffer)
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
   {
-    keyBuffer.putFloat(keyBufferPosition, (Float) obj);
+    keyBuffer.putFloat(keyBufferPosition, DimensionHandlerUtils.nullToZero((Float) obj));
   }
 
   @Override
   public void initGroupingKeyColumnValue(
-      int keyBufferPosition, int columnIndex, Object rowObj, ByteBuffer keyBuffer, int[] stack
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
   )
   {
-    keyBuffer.putFloat(keyBufferPosition, (Float) rowObj);
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
     stack[columnIndex] = 1;
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java
index a50cc504d06..9fd2450e8ca 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/GroupByColumnSelectorStrategy.java
@@ -59,11 +59,13 @@
    * @param selectorPlus dimension info containing the key offset, value selector, and dimension spec
    * @param resultMap result map for the group by query being served
    * @param key grouping key
+   * @param keyBufferPosition buffer position for the grouping key, added to support chaining multiple {@link ColumnSelectorStrategy}
    */
   void processValueFromGroupingKey(
       GroupByColumnSelectorPlus selectorPlus,
       ByteBuffer key,
-      Map<String, Object> resultMap
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   );
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java
index dac7ef639f0..515b7ccb93f 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/LongGroupByColumnSelectorStrategy.java
@@ -20,7 +20,9 @@
 package io.druid.query.groupby.epinephelinae.column;
 
 import io.druid.segment.ColumnValueSelector;
+import io.druid.segment.DimensionHandlerUtils;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
@@ -35,10 +37,13 @@ public int getGroupingKeySize()
 
   @Override
   public void processValueFromGroupingKey(
-      GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
   )
   {
-    final long val = key.getLong(selectorPlus.getKeyBufferPosition());
+    final long val = key.getLong(keyBufferPosition);
     resultMap.put(selectorPlus.getOutputName(), val);
   }
 
@@ -55,17 +60,21 @@ public Object getOnlyValue(ColumnValueSelector selector)
   }
 
   @Override
-  public void writeToKeyBuffer(int keyBufferPosition, Object obj, ByteBuffer keyBuffer)
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
   {
-    keyBuffer.putLong(keyBufferPosition, (Long) obj);
+    keyBuffer.putLong(keyBufferPosition, DimensionHandlerUtils.nullToZero((Long) obj));
   }
 
   @Override
   public void initGroupingKeyColumnValue(
-      int keyBufferPosition, int columnIndex, Object rowObj, ByteBuffer keyBuffer, int[] stack
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
   )
   {
-    keyBuffer.putLong(keyBufferPosition, (Long) rowObj);
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
     stack[columnIndex] = 1;
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/NullableValueGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/NullableValueGroupByColumnSelectorStrategy.java
new file mode 100644
index 00000000000..576ff3010aa
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/NullableValueGroupByColumnSelectorStrategy.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.groupby.epinephelinae.column;
+
+
+import io.druid.common.config.NullHandling;
+import io.druid.segment.ColumnValueSelector;
+
+import javax.annotation.Nullable;
+import java.nio.ByteBuffer;
+import java.util.Map;
+
+public class NullableValueGroupByColumnSelectorStrategy implements GroupByColumnSelectorStrategy
+{
+  private final GroupByColumnSelectorStrategy delegate;
+
+  public NullableValueGroupByColumnSelectorStrategy(GroupByColumnSelectorStrategy delegate)
+  {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public int getGroupingKeySize()
+  {
+    return delegate.getGroupingKeySize() + Byte.BYTES;
+  }
+
+  @Override
+  public void processValueFromGroupingKey(
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
+  )
+  {
+    if (key.get(keyBufferPosition) == NullHandling.IS_NULL_BYTE) {
+      resultMap.put(selectorPlus.getOutputName(), null);
+    } else {
+      delegate.processValueFromGroupingKey(selectorPlus, key, resultMap, keyBufferPosition + Byte.BYTES);
+    }
+  }
+
+  @Override
+  public void initColumnValues(ColumnValueSelector selector, int columnIndex, Object[] values)
+  {
+    if (selector.isNull()) {
+      values[columnIndex] = null;
+    } else {
+      delegate.initColumnValues(selector, columnIndex, values);
+    }
+  }
+
+  @Override
+  @Nullable
+  public Object getOnlyValue(ColumnValueSelector selector)
+  {
+    if (selector.isNull()) {
+      return null;
+    }
+    return delegate.getOnlyValue(selector);
+  }
+
+  @Override
+  public void writeToKeyBuffer(int keyBufferPosition, @Nullable Object obj, ByteBuffer keyBuffer)
+  {
+    if (obj == null) {
+      keyBuffer.put(keyBufferPosition, NullHandling.IS_NULL_BYTE);
+    } else {
+      keyBuffer.put(keyBufferPosition, NullHandling.IS_NOT_NULL_BYTE);
+    }
+    delegate.writeToKeyBuffer(keyBufferPosition + Byte.BYTES, obj, keyBuffer);
+  }
+
+  @Override
+  public void initGroupingKeyColumnValue(
+      int keyBufferPosition,
+      int columnIndex,
+      Object rowObj,
+      ByteBuffer keyBuffer,
+      int[] stack
+  )
+  {
+    writeToKeyBuffer(keyBufferPosition, rowObj, keyBuffer);
+    stack[columnIndex] = 1;
+  }
+
+  @Override
+  public boolean checkRowIndexAndAddValueToGroupingKey(
+      int keyBufferPosition, Object rowObj, int rowValIdx, ByteBuffer keyBuffer
+  )
+  {
+    // rows from a nullable column always have a single value, multi-value is not currently supported
+    // this method handles row values after the first in a multivalued row, so just return false
+    return false;
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java
index 38de1f7b9b9..8cba28a7533 100644
--- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java
+++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/column/StringGroupByColumnSelectorStrategy.java
@@ -20,6 +20,7 @@
 package io.druid.query.groupby.epinephelinae.column;
 
 import com.google.common.base.Preconditions;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.ColumnValueSelector;
 import io.druid.segment.DimensionSelector;
 import io.druid.segment.data.IndexedInts;
@@ -36,9 +37,14 @@ public int getGroupingKeySize()
   }
 
   @Override
-  public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus, ByteBuffer key, Map<String, Object> resultMap)
+  public void processValueFromGroupingKey(
+      GroupByColumnSelectorPlus selectorPlus,
+      ByteBuffer key,
+      Map<String, Object> resultMap,
+      int keyBufferPosition
+  )
   {
-    final int id = key.getInt(selectorPlus.getKeyBufferPosition());
+    final int id = key.getInt(keyBufferPosition);
 
     // GROUP_BY_MISSING_VALUE is used to indicate empty rows, which are omitted from the result map.
     if (id != GROUP_BY_MISSING_VALUE) {
@@ -47,7 +53,7 @@ public void processValueFromGroupingKey(GroupByColumnSelectorPlus selectorPlus,
           ((DimensionSelector) selectorPlus.getSelector()).lookupName(id)
       );
     } else {
-      resultMap.put(selectorPlus.getOutputName(), "");
+      resultMap.put(selectorPlus.getOutputName(), NullHandling.defaultStringValue());
     }
   }
 
diff --git a/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java
index 2e382c4dd9c..d1674b18dd8 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/EqualToHavingSpec.java
@@ -68,7 +68,11 @@ public void setAggregators(Map<String, AggregatorFactory> aggregators)
   @Override
   public boolean eval(Row row)
   {
-    return HavingSpecMetricComparator.compare(row, aggregationName, value, aggregators) == 0;
+    Object metricVal = row.getRaw(aggregationName);
+    if (metricVal == null || value == null) {
+      return metricVal == null && value == null;
+    }
+    return HavingSpecMetricComparator.compare(aggregationName, value, aggregators, metricVal) == 0;
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java
index 1c7ec437eec..9b975515163 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java
@@ -68,7 +68,11 @@ public void setAggregators(Map<String, AggregatorFactory> aggregators)
   @Override
   public boolean eval(Row row)
   {
-    return HavingSpecMetricComparator.compare(row, aggregationName, value, aggregators) > 0;
+    Object metricVal = row.getRaw(aggregationName);
+    if (metricVal == null || value == null) {
+      return false;
+    }
+    return HavingSpecMetricComparator.compare(aggregationName, value, aggregators, metricVal) > 0;
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java b/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java
index 1b041dc1be5..d6e18a7f55b 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/HavingSpecMetricComparator.java
@@ -21,7 +21,6 @@
 
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Longs;
-import io.druid.data.input.Row;
 import io.druid.java.util.common.ISE;
 import io.druid.query.aggregation.AggregatorFactory;
 
@@ -35,11 +34,8 @@
 {
   static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+");
 
-  static int compare(Row row, String aggregationName, Number value, Map<String, AggregatorFactory> aggregators)
+  static int compare(String aggregationName, Number value, Map<String, AggregatorFactory> aggregators, Object metricValueObj)
   {
-
-    Object metricValueObj = row.getRaw(aggregationName);
-
     if (metricValueObj != null) {
       if (aggregators != null && aggregators.containsKey(aggregationName)) {
         metricValueObj = aggregators.get(aggregationName).finalizeComputation(metricValueObj);
diff --git a/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java
index 37d6268b866..6729bf19924 100644
--- a/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/having/LessThanHavingSpec.java
@@ -66,7 +66,11 @@ public void setAggregators(Map<String, AggregatorFactory> aggregators)
   @Override
   public boolean eval(Row row)
   {
-    return HavingSpecMetricComparator.compare(row, aggregationName, value, aggregators) < 0;
+    Object metricVal = row.getRaw(aggregationName);
+    if (metricVal == null || value == null) {
+      return false;
+    }
+    return HavingSpecMetricComparator.compare(aggregationName, value, aggregators, metricVal) < 0;
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java
index 2cbaf81bfe9..b79a6378193 100644
--- a/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java
+++ b/processing/src/main/java/io/druid/query/groupby/orderby/DefaultLimitSpec.java
@@ -30,6 +30,7 @@
 import com.google.common.collect.Sets;
 import com.google.common.primitives.Ints;
 import com.google.common.primitives.Longs;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Row;
 import io.druid.java.util.common.ISE;
 import io.druid.java.util.common.granularity.Granularities;
@@ -269,12 +270,22 @@ public int compare(Row left, Row right)
 
   private Ordering<Row> metricOrdering(final String column, final Comparator comparator)
   {
-    return Ordering.from(Comparator.comparing((Row row) -> row.getRaw(column), Comparator.nullsLast(comparator)));
+    if (NullHandling.sqlCompatible()) {
+      return Ordering.from(Comparator.comparing((Row row) -> row.getRaw(column), Comparator.nullsFirst(comparator)));
+    } else {
+      return Ordering.from(Comparator.comparing((Row row) -> row.getRaw(column), Comparator.nullsLast(comparator)));
+    }
   }
 
   private Ordering<Row> dimensionOrdering(final String dimension, final StringComparator comparator)
   {
-    return Ordering.from(Comparator.comparing((Row row) -> row.getDimension(dimension).isEmpty() ? null : row.getDimension(dimension).get(0), Comparator.nullsFirst(comparator)));
+    return Ordering.from(
+        Comparator.comparing(
+            (Row row) -> row.getDimension(dimension).isEmpty()
+                         ? null
+                         : row.getDimension(dimension).get(0),
+            Comparator.nullsFirst(comparator)
+        ));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java
index 4cfae560655..c65d4278585 100644
--- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java
+++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java
@@ -228,6 +228,15 @@ public boolean doMergeResults(final GroupByQuery query)
 
     // Fudge timestamp, maybe.
     final DateTime fudgeTimestamp = getUniversalTimestamp(query);
+    ImmutableMap.Builder<String, Object> context = ImmutableMap.builder();
+    context.put("finalize", false);
+    context.put(GroupByQueryConfig.CTX_KEY_STRATEGY, GroupByStrategySelector.STRATEGY_V2);
+    if (fudgeTimestamp != null) {
+      context.put(CTX_KEY_FUDGE_TIMESTAMP, String.valueOf(fudgeTimestamp.getMillis()));
+    }
+    context.put(CTX_KEY_OUTERMOST, false);
+    // the having spec shouldn't be passed down, so we need to convey the existing limit push down status
+    context.put(GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, query.isApplyLimitPushDown());
 
     final GroupByQuery newQuery = new GroupByQuery(
         query.getDataSource(),
@@ -243,14 +252,7 @@ public boolean doMergeResults(final GroupByQuery query)
         query.getLimitSpec(),
         query.getContext()
     ).withOverriddenContext(
-        ImmutableMap.<String, Object>of(
-            "finalize", false,
-            GroupByQueryConfig.CTX_KEY_STRATEGY, GroupByStrategySelector.STRATEGY_V2,
-            CTX_KEY_FUDGE_TIMESTAMP, fudgeTimestamp == null ? "" : String.valueOf(fudgeTimestamp.getMillis()),
-            CTX_KEY_OUTERMOST, false,
-            // the having spec shouldn't be passed down, so we need to convey the existing limit push down status
-            GroupByQueryConfig.CTX_KEY_APPLY_LIMIT_PUSH_DOWN, query.isApplyLimitPushDown()
-        )
+        context.build()
     );
 
     Sequence<Row> rowSequence = Sequences.map(
diff --git a/processing/src/main/java/io/druid/query/lookup/LookupConfig.java b/processing/src/main/java/io/druid/query/lookup/LookupConfig.java
index 810afa9d3d2..1afce6a26aa 100644
--- a/processing/src/main/java/io/druid/query/lookup/LookupConfig.java
+++ b/processing/src/main/java/io/druid/query/lookup/LookupConfig.java
@@ -21,7 +21,7 @@
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Strings;
+import io.druid.java.util.common.StringUtils;
 
 import javax.validation.constraints.Min;
 import java.util.Objects;
@@ -56,7 +56,7 @@ public LookupConfig(
       @JsonProperty("snapshotWorkingDir") String snapshotWorkingDir
   )
   {
-    this.snapshotWorkingDir = Strings.nullToEmpty(snapshotWorkingDir);
+    this.snapshotWorkingDir = StringUtils.nullToEmptyNonDruidDataString(snapshotWorkingDir);
   }
 
   public String getSnapshotWorkingDir()
diff --git a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java
index 1c2605fe985..c62ad7fe196 100644
--- a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java
@@ -23,7 +23,6 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Function;
-import com.google.common.base.Strings;
 import com.google.common.base.Throwables;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.extraction.ExtractionCacheHelper;
@@ -52,9 +51,9 @@ public LookupExtractionFn(
         {
           @Nullable
           @Override
-          public String apply(String input)
+          public String apply(@Nullable String input)
           {
-            return lookup.apply(Strings.nullToEmpty(input));
+            return lookup.apply(input);
           }
         },
         retainMissingValue,
diff --git a/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java b/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java
index 0400c21ae1e..21ad0a054e9 100644
--- a/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java
+++ b/processing/src/main/java/io/druid/query/lookup/LookupExtractor.java
@@ -25,7 +25,6 @@
 import io.druid.query.extraction.MapLookupExtractor;
 
 import javax.annotation.Nullable;
-import javax.validation.constraints.NotNull;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -40,12 +39,12 @@
   /**
    * Apply a particular lookup methodology to the input string
    *
-   * @param key The value to apply the lookup to. May not be null
+   * @param key The value to apply the lookup to.
    *
-   * @return The lookup, or null key cannot have the lookup applied to it and should be treated as missing.
+   * @return The lookup, or null when key is `null` or cannot have the lookup applied to it and should be treated as missing.
    */
   @Nullable
-  public abstract String apply(@NotNull String key);
+  public abstract String apply(@Nullable String key);
 
   /**
    * @param keys set of keys to apply lookup for each element
@@ -71,15 +70,15 @@
    * Provide the reverse mapping from a given value to a list of keys
    *
    * @param value the value to apply the reverse lookup
-   *              Null and empty are considered to be the same value = nullToEmpty(value)
    *
    * @return the list of keys that maps to value or empty list.
    * Note that for the case of a none existing value in the lookup we have to cases either return an empty list OR list with null element.
    * returning an empty list implies that user want to ignore such a lookup value.
    * In the other hand returning a list with the null element implies user want to map the none existing value to the key null.
+   * Null value maps to empty list.
    */
 
-  public abstract List<String> unapply(String value);
+  public abstract List<String> unapply(@Nullable String value);
 
   /**
    * @param values Iterable of values for which will perform reverse lookup
diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java
index 196bb42cd3b..d45b8ed0247 100644
--- a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java
+++ b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java
@@ -21,10 +21,10 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.granularity.Granularities;
 import io.druid.java.util.common.guava.Accumulator;
@@ -216,8 +216,8 @@ private ColumnAnalysis analyzeStringColumn(
     }
 
     if (analyzingMinMax() && cardinality > 0) {
-      min = Strings.nullToEmpty(bitmapIndex.getValue(0));
-      max = Strings.nullToEmpty(bitmapIndex.getValue(cardinality - 1));
+      min = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(0));
+      max = NullHandling.nullToEmptyIfNeeded(bitmapIndex.getValue(cardinality - 1));
     }
 
     return new ColumnAnalysis(
diff --git a/processing/src/main/java/io/druid/query/search/SearchHit.java b/processing/src/main/java/io/druid/query/search/SearchHit.java
index 52e6692f989..4031d0df3d8 100644
--- a/processing/src/main/java/io/druid/query/search/SearchHit.java
+++ b/processing/src/main/java/io/druid/query/search/SearchHit.java
@@ -22,6 +22,7 @@
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
+import io.druid.common.config.NullHandling;
 
 /**
  */
@@ -39,7 +40,7 @@ public SearchHit(
   )
   {
     this.dimension = Preconditions.checkNotNull(dimension);
-    this.value = Preconditions.checkNotNull(value);
+    this.value = NullHandling.nullToEmptyIfNeeded(value);
     this.count = count;
   }
 
diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
index 27a838f0990..a26f3f77342 100644
--- a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
@@ -20,7 +20,6 @@
 package io.druid.query.search;
 
 import com.google.common.base.Function;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
@@ -132,7 +131,7 @@ public void updateSearchResultSet(
         for (int i = 0, rowSize = row.size(); i < rowSize; ++i) {
           final String dimVal = selector.lookupName(row.get(i));
           if (searchQuerySpec.accept(dimVal)) {
-            set.addTo(new SearchHit(outputName, Strings.nullToEmpty(dimVal)), 1);
+            set.addTo(new SearchHit(outputName, dimVal), 1);
             if (set.size() >= limit) {
               return;
             }
diff --git a/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java b/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java
index ec4ce207854..987a86e0d68 100644
--- a/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java
+++ b/processing/src/main/java/io/druid/query/search/UseIndexesStrategy.java
@@ -20,7 +20,6 @@
 package io.druid.query.search;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import io.druid.collections.bitmap.BitmapFactory;
@@ -261,7 +260,7 @@ public IndexOnlyExecutor(
           extractionFn = IdentityExtractionFn.getInstance();
         }
         for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
-          String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
+          String dimVal = extractionFn.apply(bitmapIndex.getValue(i));
           if (!searchQuerySpec.accept(dimVal)) {
             continue;
           }
diff --git a/processing/src/main/java/io/druid/query/topn/TopNMapFn.java b/processing/src/main/java/io/druid/query/topn/TopNMapFn.java
index 24d05fa3777..3d4807628ec 100644
--- a/processing/src/main/java/io/druid/query/topn/TopNMapFn.java
+++ b/processing/src/main/java/io/druid/query/topn/TopNMapFn.java
@@ -56,14 +56,10 @@
     return longVal == null ? DimensionHandlerUtils.ZERO_LONG : longVal;
   };
 
-  private static Function<Object, Object> FLOAT_TRANSFORMER = input -> {
-    final Float floatVal = DimensionHandlerUtils.convertObjectToFloat(input);
-    return floatVal == null ? DimensionHandlerUtils.ZERO_FLOAT : floatVal;
-  };
-  private static Function<Object, Object> DOUBLE_TRANSFORMER = input -> {
-    final Double doubleValue = DimensionHandlerUtils.convertObjectToDouble(input);
-    return doubleValue == null ? DimensionHandlerUtils.ZERO_DOUBLE : doubleValue;
-  };
+  private static Function<Object, Object> FLOAT_TRANSFORMER = input -> DimensionHandlerUtils.convertObjectToFloat(input);
+
+  private static Function<Object, Object> DOUBLE_TRANSFORMER = input -> DimensionHandlerUtils.convertObjectToDouble(
+      input);
 
   private static final TopNColumnSelectorStrategyFactory STRATEGY_FACTORY = new TopNColumnSelectorStrategyFactory();
 
diff --git a/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java b/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java
index bee78e10304..45f050f9361 100644
--- a/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java
+++ b/processing/src/main/java/io/druid/segment/ColumnSelectorBitmapIndexSelector.java
@@ -225,7 +225,7 @@ public ImmutableBitmap getBitmapIndex(String dimension, String value)
     }
 
     final BitmapIndex bitmapIndex = column.getBitmapIndex();
-    return bitmapIndex.getBitmap(bitmapIndex.getIndex(NullHandling.emptyToNullIfNeeded(value)));
+    return bitmapIndex.getBitmap(bitmapIndex.getIndex(value));
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java
index 278ea9c1bff..afd866c441b 100644
--- a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java
+++ b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java
@@ -237,6 +237,15 @@ private static ColumnCapabilities getEffectiveCapabilities(
     return strategyFactory.makeColumnSelectorStrategy(capabilities, selector);
   }
 
+  @Nullable
+  public static String convertObjectToString(@Nullable Object valObj)
+  {
+    if (valObj == null) {
+      return null;
+    }
+    return valObj.toString();
+  }
+
   @Nullable
   public static Long convertObjectToLong(@Nullable Object valObj)
   {
diff --git a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java
index 423f3a6424c..92eca9b4055 100644
--- a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java
+++ b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java
@@ -44,9 +44,7 @@ public Double processRowValsToUnsortedEncodedKeyComponent(Object dimValues, bool
     if (dimValues instanceof List) {
       throw new UnsupportedOperationException("Numeric columns do not support multivalue rows.");
     }
-    Double ret = DimensionHandlerUtils.convertObjectToDouble(dimValues, reportParseExceptions);
-    // remove null -> zero conversion when https://github.com/druid-io/druid/pull/5278 series of patches is merged
-    return ret == null ? DimensionHandlerUtils.ZERO_DOUBLE : ret;
+    return DimensionHandlerUtils.convertObjectToDouble(dimValues, reportParseExceptions);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java
index adca2802085..3f1a001073f 100644
--- a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java
+++ b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java
@@ -45,9 +45,7 @@ public Float processRowValsToUnsortedEncodedKeyComponent(Object dimValues, boole
       throw new UnsupportedOperationException("Numeric columns do not support multivalue rows.");
     }
 
-    Float ret = DimensionHandlerUtils.convertObjectToFloat(dimValues, reportParseExceptions);
-    // remove null -> zero conversion when https://github.com/druid-io/druid/pull/5278 series of patches is merged
-    return ret == null ? DimensionHandlerUtils.ZERO_FLOAT : ret;
+    return DimensionHandlerUtils.convertObjectToFloat(dimValues, reportParseExceptions);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java b/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java
index 4b7df0a20b4..3522ea30725 100644
--- a/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java
+++ b/processing/src/main/java/io/druid/segment/LongDimensionIndexer.java
@@ -45,9 +45,7 @@ public Long processRowValsToUnsortedEncodedKeyComponent(Object dimValues, boolea
       throw new UnsupportedOperationException("Numeric columns do not support multivalue rows.");
     }
 
-    Long ret = DimensionHandlerUtils.convertObjectToLong(dimValues, reportParseExceptions);
-    // remove null -> zero conversion when https://github.com/druid-io/druid/pull/5278 series of patches is merged
-    return ret == null ? DimensionHandlerUtils.ZERO_LONG : ret;
+    return DimensionHandlerUtils.convertObjectToLong(dimValues, reportParseExceptions);
   }
 
   @Override
diff --git a/processing/src/main/java/io/druid/segment/filter/BoundFilter.java b/processing/src/main/java/io/druid/segment/filter/BoundFilter.java
index c9fc4262a1e..c8115b804a3 100644
--- a/processing/src/main/java/io/druid/segment/filter/BoundFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/BoundFilter.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Predicate;
 import com.google.common.base.Supplier;
 import io.druid.collections.bitmap.ImmutableBitmap;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.Pair;
 import io.druid.query.BitmapResultFactory;
 import io.druid.query.extraction.ExtractionFn;
@@ -148,7 +149,7 @@ public boolean supportsSelectivityEstimation(
     if (!boundDimFilter.hasLowerBound()) {
       startIndex = 0;
     } else {
-      final int found = bitmapIndex.getIndex(boundDimFilter.getLower());
+      final int found = bitmapIndex.getIndex(NullHandling.emptyToNullIfNeeded(boundDimFilter.getLower()));
       if (found >= 0) {
         startIndex = boundDimFilter.isLowerStrict() ? found + 1 : found;
       } else {
@@ -159,7 +160,7 @@ public boolean supportsSelectivityEstimation(
     if (!boundDimFilter.hasUpperBound()) {
       endIndex = bitmapIndex.getCardinality();
     } else {
-      final int found = bitmapIndex.getIndex(boundDimFilter.getUpper());
+      final int found = bitmapIndex.getIndex(NullHandling.emptyToNullIfNeeded(boundDimFilter.getUpper()));
       if (found >= 0) {
         endIndex = boundDimFilter.isUpperStrict() ? found : found + 1;
       } else {
@@ -249,9 +250,10 @@ private boolean doesMatch(String input)
   {
     if (input == null) {
       return (!boundDimFilter.hasLowerBound()
-              || (boundDimFilter.getLower().isEmpty() && !boundDimFilter.isLowerStrict())) // lower bound allows null
+              || (NullHandling.isNullOrEquivalent(boundDimFilter.getLower()) && !boundDimFilter.isLowerStrict()))
+             // lower bound allows null
              && (!boundDimFilter.hasUpperBound()
-                 || !boundDimFilter.getUpper().isEmpty()
+                 || !NullHandling.isNullOrEquivalent(boundDimFilter.getUpper())
                  || !boundDimFilter.isUpperStrict()); // upper bound allows null
     }
     int lowerComparing = 1;
diff --git a/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java b/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java
index 4acea0f2937..42c2b32702a 100644
--- a/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/ExpressionFilter.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
+import io.druid.common.config.NullHandling;
 import io.druid.math.expr.Evals;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -58,6 +59,9 @@ public ValueMatcher makeMatcher(final ColumnSelectorFactory factory)
       @Override
       public boolean matches()
       {
+        if (NullHandling.sqlCompatible() && selector.isNull()) {
+          return false;
+        }
         return Evals.asBoolean(selector.getLong());
       }
 
@@ -108,7 +112,8 @@ public boolean supportsBitmapIndex(final BitmapIndexSelector selector)
           value -> expr.eval(identifierName -> {
             // There's only one binding, and it must be the single column, so it can safely be ignored in production.
             assert column.equals(identifierName);
-            return value;
+            // convert null to Empty before passing to expressions if needed.
+            return NullHandling.nullToEmptyIfNeeded(value);
           }).asBoolean()
       );
     }
diff --git a/processing/src/main/java/io/druid/segment/filter/Filters.java b/processing/src/main/java/io/druid/segment/filter/Filters.java
index 415ab5bdc7d..5476191ca3b 100644
--- a/processing/src/main/java/io/druid/segment/filter/Filters.java
+++ b/processing/src/main/java/io/druid/segment/filter/Filters.java
@@ -25,7 +25,6 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import io.druid.collections.bitmap.ImmutableBitmap;
-import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.guava.FunctionalIterable;
 import io.druid.query.BitmapResultFactory;
 import io.druid.query.ColumnSelectorPlus;
@@ -135,8 +134,7 @@ public static ValueMatcher makeValueMatcher(
             columnSelectorFactory
         );
 
-    return selector.getColumnSelectorStrategy()
-                   .makeValueMatcher(selector.getSelector(), NullHandling.emptyToNullIfNeeded(value));
+    return selector.getColumnSelectorStrategy().makeValueMatcher(selector.getSelector(), value);
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/segment/filter/InFilter.java b/processing/src/main/java/io/druid/segment/filter/InFilter.java
index 7e737616dad..869e94e37ef 100644
--- a/processing/src/main/java/io/druid/segment/filter/InFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/InFilter.java
@@ -20,7 +20,6 @@
 package io.druid.segment.filter;
 
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
 import io.druid.collections.bitmap.ImmutableBitmap;
 import io.druid.query.BitmapResultFactory;
@@ -165,9 +164,9 @@ private DruidPredicateFactory getPredicateFactory()
       public Predicate<String> makeStringPredicate()
       {
         if (extractionFn != null) {
-          return input -> values.contains(Strings.nullToEmpty(extractionFn.apply(input)));
+          return input -> values.contains(extractionFn.apply(input));
         } else {
-          return input -> values.contains(Strings.nullToEmpty(input));
+          return input -> values.contains(input);
         }
       }
 
diff --git a/processing/src/main/java/io/druid/segment/filter/LikeFilter.java b/processing/src/main/java/io/druid/segment/filter/LikeFilter.java
index a5984f31b5c..99b953c6270 100644
--- a/processing/src/main/java/io/druid/segment/filter/LikeFilter.java
+++ b/processing/src/main/java/io/druid/segment/filter/LikeFilter.java
@@ -19,9 +19,9 @@
 
 package io.druid.segment.filter;
 
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import io.druid.collections.bitmap.ImmutableBitmap;
+import io.druid.common.config.NullHandling;
 import io.druid.query.BitmapResultFactory;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.filter.BitmapIndexSelector;
@@ -90,7 +90,10 @@ public boolean supportsSelectivityEstimation(
   {
     if (isSimpleEquals()) {
       // Verify that dimension equals prefix.
-      return ImmutableList.of(selector.getBitmapIndex(dimension, likeMatcher.getPrefix()));
+      return ImmutableList.of(selector.getBitmapIndex(
+          dimension,
+          NullHandling.emptyToNullIfNeeded(likeMatcher.getPrefix())
+      ));
     } else if (isSimplePrefix()) {
       // Verify that dimension startsWith prefix, and is accepted by likeMatcher.matchesSuffixOnly.
       final BitmapIndex bitmapIndex = selector.getBitmapIndex(dimension);
@@ -140,16 +143,24 @@ private IntIterable getDimValueIndexIterableForPrefixMatch(
       final Indexed<String> dimValues
   )
   {
-    final String lower = Strings.nullToEmpty(likeMatcher.getPrefix());
-    final String upper = Strings.nullToEmpty(likeMatcher.getPrefix()) + Character.MAX_VALUE;
+
+    final String lower = NullHandling.nullToEmptyIfNeeded(likeMatcher.getPrefix());
+    final String upper = NullHandling.nullToEmptyIfNeeded(likeMatcher.getPrefix()) + Character.MAX_VALUE;
+
     final int startIndex; // inclusive
     final int endIndex; // exclusive
 
-    final int lowerFound = bitmapIndex.getIndex(lower);
-    startIndex = lowerFound >= 0 ? lowerFound : -(lowerFound + 1);
+    if (lower == null) {
+      // For Null values
+      startIndex = bitmapIndex.getIndex(null);
+      endIndex = startIndex + 1;
+    } else {
+      final int lowerFound = bitmapIndex.getIndex(lower);
+      startIndex = lowerFound >= 0 ? lowerFound : -(lowerFound + 1);
 
-    final int upperFound = bitmapIndex.getIndex(upper);
-    endIndex = upperFound >= 0 ? upperFound + 1 : -(upperFound + 1);
+      final int upperFound = bitmapIndex.getIndex(upper);
+      endIndex = upperFound >= 0 ? upperFound + 1 : -(upperFound + 1);
+    }
 
     return new IntIterable()
     {
diff --git a/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java b/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java
index 72d20f9c235..ea204ced0d1 100644
--- a/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java
+++ b/processing/src/main/java/io/druid/segment/serde/BitmapIndexColumnPartSupplier.java
@@ -22,7 +22,6 @@
 import com.google.common.base.Supplier;
 import io.druid.collections.bitmap.BitmapFactory;
 import io.druid.collections.bitmap.ImmutableBitmap;
-import io.druid.common.config.NullHandling;
 import io.druid.segment.column.BitmapIndex;
 import io.druid.segment.data.GenericIndexed;
 
@@ -78,7 +77,7 @@ public BitmapFactory getBitmapFactory()
       public int getIndex(String value)
       {
         // GenericIndexed.indexOf satisfies contract needed by BitmapIndex.indexOf
-        return dictionary.indexOf(NullHandling.emptyToNullIfNeeded(value));
+        return dictionary.indexOf(value);
       }
 
       @Override
diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java b/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java
index 8698b36e1f5..a17584bf16f 100644
--- a/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java
+++ b/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java
@@ -21,13 +21,11 @@
 
 import com.google.common.base.Function;
 import io.druid.guice.annotations.ExtensionPoint;
-import io.druid.segment.writeout.SegmentWriteOutMedium;
 import io.druid.segment.GenericColumnSerializer;
 import io.druid.segment.column.ColumnBuilder;
 import io.druid.segment.data.ObjectStrategy;
-import it.unimi.dsi.fastutil.bytes.ByteArrays;
+import io.druid.segment.writeout.SegmentWriteOutMedium;
 
-import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 
 /**
@@ -82,9 +80,9 @@
    *
    * @return serialized intermediate representation of aggregate in byte[]
    */
-  public byte[] toBytes(@Nullable Object val)
+  public byte[] toBytes(Object val)
   {
-    return val != null ? getObjectStrategy().toBytes(val) : ByteArrays.EMPTY_ARRAY;
+    return getObjectStrategy().toBytes(val);
   }
 
   /**
diff --git a/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java b/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java
index 18c64d1bfcd..297ecce3714 100644
--- a/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java
+++ b/processing/src/main/java/io/druid/segment/virtual/ExpressionColumnValueSelector.java
@@ -82,6 +82,6 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   @Override
   public boolean isNull()
   {
-    return getObject().isNull();
+    return getObject().isNumericNull();
   }
 }
diff --git a/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java b/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java
index 3aae3b797d7..c3f16572c59 100644
--- a/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java
+++ b/processing/src/main/java/io/druid/segment/virtual/ExpressionSelectors.java
@@ -161,7 +161,7 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
     if (bindings.equals(ExprUtils.nilBindings())) {
       // Optimization for constant expressions.
       final ExprEval eval = expression.eval(bindings);
-      if (NullHandling.sqlCompatible() && eval.isNull()) {
+      if (NullHandling.sqlCompatible() && eval.isNumericNull()) {
         return NilColumnValueSelector.instance();
       }
       return new ConstantColumnValueSelector<>(
@@ -248,23 +248,32 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
   {
     final Map<String, Supplier<Object>> suppliers = Maps.newHashMap();
     for (String columnName : Parser.findRequiredBindings(expression)) {
-      final ColumnCapabilities columnCapabilities = columnSelectorFactory.getColumnCapabilities(columnName);
+      final ColumnCapabilities columnCapabilities = columnSelectorFactory
+              .getColumnCapabilities(columnName);
       final ValueType nativeType = columnCapabilities != null ? columnCapabilities.getType() : null;
       final Supplier<Object> supplier;
 
       if (nativeType == ValueType.FLOAT) {
-        supplier = columnSelectorFactory.makeColumnValueSelector(columnName)::getFloat;
+        ColumnValueSelector selector = columnSelectorFactory
+                .makeColumnValueSelector(columnName);
+        supplier = makeNullableSupplier(selector, selector::getFloat);
       } else if (nativeType == ValueType.LONG) {
-        supplier = columnSelectorFactory.makeColumnValueSelector(columnName)::getLong;
+        ColumnValueSelector selector = columnSelectorFactory
+                .makeColumnValueSelector(columnName);
+        supplier = makeNullableSupplier(selector, selector::getLong);
       } else if (nativeType == ValueType.DOUBLE) {
-        supplier = columnSelectorFactory.makeColumnValueSelector(columnName)::getDouble;
+        ColumnValueSelector selector = columnSelectorFactory
+                .makeColumnValueSelector(columnName);
+        supplier = makeNullableSupplier(selector, selector::getDouble);
       } else if (nativeType == ValueType.STRING) {
         supplier = supplierFromDimensionSelector(
-            columnSelectorFactory.makeDimensionSelector(new DefaultDimensionSpec(columnName, columnName))
+                columnSelectorFactory
+                        .makeDimensionSelector(new DefaultDimensionSpec(columnName, columnName))
         );
       } else if (nativeType == null) {
         // Unknown ValueType. Try making an Object selector and see if that gives us anything useful.
-        supplier = supplierFromObjectSelector(columnSelectorFactory.makeColumnValueSelector(columnName));
+        supplier = supplierFromObjectSelector(columnSelectorFactory
+                .makeColumnValueSelector(columnName));
       } else {
         // Unhandleable ValueType (COMPLEX).
         supplier = null;
@@ -292,6 +301,23 @@ public void inspectRuntimeShape(RuntimeShapeInspector inspector)
     }
   }
 
+  private static <T> Supplier<T> makeNullableSupplier(
+      ColumnValueSelector selector,
+      Supplier<T> supplier
+  )
+  {
+    if (NullHandling.replaceWithDefault()) {
+      return supplier;
+    } else {
+      return () -> {
+        if (selector.isNull()) {
+          return null;
+        }
+        return supplier.get();
+      };
+    }
+  }
+
   @VisibleForTesting
   @Nonnull
   static Supplier<Object> supplierFromDimensionSelector(final DimensionSelector selector)
diff --git a/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java b/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java
index 2a0be1ccf76..e2d504bd70b 100644
--- a/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java
+++ b/processing/src/main/java/io/druid/segment/virtual/SingleLongInputCachingExpressionColumnValueSelector.java
@@ -150,6 +150,6 @@ private ExprEval eval(final long value)
   @Override
   public boolean isNull()
   {
-    return getObject().isNull();
+    return getObject().isNumericNull();
   }
 }
diff --git a/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java b/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java
index 2b2db902d9e..ae5fb3467d7 100644
--- a/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java
+++ b/processing/src/main/java/io/druid/segment/virtual/SingleStringInputCachingExpressionColumnValueSelector.java
@@ -139,7 +139,7 @@ private ExprEval eval()
   @Override
   public boolean isNull()
   {
-    return eval().isNull();
+    return eval().isNumericNull();
   }
 
   public static class LruEvalCache
diff --git a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java
index 9a96d28db49..f3d4ef565fb 100644
--- a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java
+++ b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.Maps;
 import com.google.common.io.Closeables;
 import com.google.common.util.concurrent.MoreExecutors;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.MapInputRowParser;
@@ -48,6 +49,7 @@
 import io.druid.segment.IndexBuilder;
 import io.druid.segment.QueryableIndex;
 import io.druid.segment.QueryableIndexSegment;
+import io.druid.segment.TestHelper;
 import io.druid.segment.incremental.IncrementalIndexSchema;
 import org.junit.After;
 import org.junit.Assert;
@@ -284,8 +286,13 @@ public void testNumericEvolutionTimeseriesAggregation()
     );
 
     // Only nonexistent(4)
+    Map<String, Object> result = Maps.newHashMap();
+    result.put("a", NullHandling.defaultLongValue());
+    result.put("b", NullHandling.defaultDoubleValue());
+    result.put("c", NullHandling.defaultLongValue());
+    result.put("d", NullHandling.defaultDoubleValue());
     Assert.assertEquals(
-        timeseriesResult(ImmutableMap.of("a", 0L, "b", 0.0, "c", 0L, "d", 0.0)),
+        timeseriesResult(result),
         runQuery(query, factory, ImmutableList.of(index4))
     );
 
@@ -354,7 +361,14 @@ public void testNumericEvolutionFiltering()
 
     // Only nonexistent(4)
     Assert.assertEquals(
-        timeseriesResult(ImmutableMap.of("a", 0L, "b", 0.0, "c", 0L)),
+        timeseriesResult(TestHelper.createExpectedMap(
+            "a",
+            NullHandling.defaultLongValue(),
+            "b",
+            NullHandling.defaultDoubleValue(),
+            "c",
+            0L
+        )),
         runQuery(query, factory, ImmutableList.of(index4))
     );
 
diff --git a/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java
index 63066772d63..9c839f59961 100644
--- a/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/DoubleMaxAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testDoubleMaxAggregator()
   {
-    DoubleMaxAggregator agg = (DoubleMaxAggregator) doubleMaxAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleMaxAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testDoubleMaxAggregator()
   @Test
   public void testDoubleMaxBufferAggregator()
   {
-    DoubleMaxBufferAggregator agg = (DoubleMaxBufferAggregator) doubleMaxAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = doubleMaxAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMaxAggregator agg)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMaxBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java
index 2becdc133dd..08969d36a7f 100644
--- a/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/DoubleMinAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testDoubleMinAggregator()
   {
-    DoubleMinAggregator agg = (DoubleMinAggregator) doubleMinAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleMinAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testDoubleMinAggregator()
   @Test
   public void testDoubleMinBufferAggregator()
   {
-    DoubleMinBufferAggregator agg = (DoubleMinBufferAggregator) doubleMinAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = doubleMinAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Double.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMinAggregator agg)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestDoubleColumnSelectorImpl selector, DoubleMinBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestDoubleColumnSelectorImpl selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java
index 584f08a2b81..ed5ad443f5c 100644
--- a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.js.JavaScriptConfig;
 import io.druid.query.dimension.DimensionSpec;
 import io.druid.query.extraction.ExtractionFn;
@@ -224,9 +225,9 @@ public ColumnCapabilities getColumnCapabilities(String columnName)
 
   private void assertValues(FilteredAggregator agg, TestFloatColumnSelector selector, double... expectedVals)
   {
-    Assert.assertEquals(0.0d, agg.get());
-    Assert.assertEquals(0.0d, agg.get());
-    Assert.assertEquals(0.0d, agg.get());
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), agg.get());
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), agg.get());
+    Assert.assertEquals(NullHandling.defaultDoubleValue(), agg.get());
     for (double expectedVal : expectedVals) {
       aggregate(selector, agg);
       Assert.assertEquals(expectedVal, agg.get());
diff --git a/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java
index b392a24f9a5..27442acfb8c 100644
--- a/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/LongMaxAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testLongMaxAggregator()
   {
-    LongMaxAggregator agg = (LongMaxAggregator) longMaxAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longMaxAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testLongMaxAggregator()
   @Test
   public void testLongMaxBufferAggregator()
   {
-    LongMaxBufferAggregator agg = (LongMaxBufferAggregator) longMaxAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = longMaxAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMaxAggregator agg)
+  private void aggregate(TestLongColumnSelector selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMaxBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestLongColumnSelector selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java
index d5b7182057c..b2694541fa5 100644
--- a/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/LongMinAggregationTest.java
@@ -56,7 +56,7 @@ public void setup()
   @Test
   public void testLongMinAggregator()
   {
-    LongMinAggregator agg = (LongMinAggregator) longMinAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longMinAggFactory.factorize(colSelectorFactory);
 
     aggregate(selector, agg);
     aggregate(selector, agg);
@@ -71,9 +71,9 @@ public void testLongMinAggregator()
   @Test
   public void testLongMinBufferAggregator()
   {
-    LongMinBufferAggregator agg = (LongMinBufferAggregator) longMinAggFactory.factorizeBuffered(colSelectorFactory);
+    BufferAggregator agg = longMinAggFactory.factorizeBuffered(colSelectorFactory);
 
-    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES]);
+    ByteBuffer buffer = ByteBuffer.wrap(new byte[Long.BYTES + Byte.BYTES]);
     agg.init(buffer, 0);
 
     aggregate(selector, agg, buffer, 0);
@@ -105,13 +105,13 @@ public void testEqualsAndHashCode()
     Assert.assertFalse(one.equals(two));
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMinAggregator agg)
+  private void aggregate(TestLongColumnSelector selector, Aggregator agg)
   {
     agg.aggregate();
     selector.increment();
   }
 
-  private void aggregate(TestLongColumnSelector selector, LongMinBufferAggregator agg, ByteBuffer buff, int position)
+  private void aggregate(TestLongColumnSelector selector, BufferAggregator agg, ByteBuffer buff, int position)
   {
     agg.aggregate(buff, position);
     selector.increment();
diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java
index 1249049e1df..5144f798bde 100644
--- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java
@@ -27,6 +27,7 @@
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.js.JavaScriptConfig;
 import io.druid.query.ColumnSelectorPlus;
@@ -429,8 +430,8 @@ public void testAggregateValues()
     for (int i = 0; i < values1.size(); ++i) {
       aggregate(selectorList, agg);
     }
-    Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get()), 0.05);
-    Assert.assertEquals(7L, rowAggregatorFactoryRounded.finalizeComputation(agg.get()));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get()), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7L : 6L, rowAggregatorFactoryRounded.finalizeComputation(agg.get()));
   }
 
   @Test
@@ -473,8 +474,8 @@ public void testBufferAggregateValues()
     for (int i = 0; i < values1.size(); ++i) {
       bufferAggregate(selectorList, agg, buf, pos);
     }
-    Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05);
-    Assert.assertEquals(7L, rowAggregatorFactoryRounded.finalizeComputation(agg.get(buf, pos)));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7L : 6L, rowAggregatorFactoryRounded.finalizeComputation(agg.get(buf, pos)));
   }
 
   @Test
@@ -553,11 +554,11 @@ public void testCombineValues()
       aggregate(selector2, agg2);
     }
 
-    Assert.assertEquals(4.0, (Double) valueAggregatorFactory.finalizeComputation(agg1.get()), 0.05);
-    Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg2.get()), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 4.0 : 3.0, (Double) valueAggregatorFactory.finalizeComputation(agg1.get()), 0.05);
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg2.get()), 0.05);
 
     Assert.assertEquals(
-        7.0,
+        NullHandling.replaceWithDefault() ? 7.0 : 6.0,
         (Double) rowAggregatorFactory.finalizeComputation(
             rowAggregatorFactory.combine(
                 agg1.get(),
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java
index 8aba7090aba..15d419e06d1 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/DoubleFirstAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestDoubleColumnSelectorImpl;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleFirstAggregator()
   {
-    DoubleFirstAggregator agg = (DoubleFirstAggregator) doubleFirstAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleFirstAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,7 +91,7 @@ public void testDoubleFirstAggregator()
   @Test
   public void testDoubleFirstBufferAggregator()
   {
-    DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) doubleFirstAggFactory.factorizeBuffered(
+    BufferAggregator agg = doubleFirstAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleFirstCombiningAggregator()
   {
-    DoubleFirstAggregator agg = (DoubleFirstAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,7 +140,7 @@ public void testDoubleFirstCombiningAggregator()
   @Test
   public void testDoubleFirstCombiningBufferAggregator()
   {
-    DoubleFirstBufferAggregator agg = (DoubleFirstBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleFirstAggFactory.getMaxIntermediateSize()]);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      DoubleFirstAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      DoubleFirstBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java
index ff0f00f5eb0..719c16980dd 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/FloatFirstAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestFloatColumnSelector;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleFirstAggregator()
   {
-    FloatFirstAggregator agg = (FloatFirstAggregator) floatFirstAggregatorFactory.factorize(colSelectorFactory);
+    Aggregator agg = floatFirstAggregatorFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,7 +91,7 @@ public void testDoubleFirstAggregator()
   @Test
   public void testDoubleFirstBufferAggregator()
   {
-    FloatFirstBufferAggregator agg = (FloatFirstBufferAggregator) floatFirstAggregatorFactory.factorizeBuffered(
+    BufferAggregator agg = floatFirstAggregatorFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[floatFirstAggregatorFactory.getMaxIntermediateSize()]);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleFirstCombiningAggregator()
   {
-    FloatFirstAggregator agg = (FloatFirstAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,7 +140,7 @@ public void testDoubleFirstCombiningAggregator()
   @Test
   public void testDoubleFirstCombiningBufferAggregator()
   {
-    FloatFirstBufferAggregator agg = (FloatFirstBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[floatFirstAggregatorFactory.getMaxIntermediateSize()]);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      FloatFirstAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      FloatFirstBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java
index 36a8b5cb3f6..6c48dbfd5ac 100644
--- a/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/first/LongFirstAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
 import io.druid.segment.ColumnSelectorFactory;
@@ -70,7 +72,7 @@ public void setup()
   @Test
   public void testLongFirstAggregator()
   {
-    LongFirstAggregator agg = (LongFirstAggregator) longFirstAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longFirstAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -88,7 +90,7 @@ public void testLongFirstAggregator()
   @Test
   public void testLongFirstBufferAggregator()
   {
-    LongFirstBufferAggregator agg = (LongFirstBufferAggregator) longFirstAggFactory.factorizeBuffered(
+    BufferAggregator agg = longFirstAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]);
@@ -118,7 +120,7 @@ public void testCombine()
   @Test
   public void testLongFirstCombiningAggregator()
   {
-    LongFirstAggregator agg = (LongFirstAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -137,7 +139,7 @@ public void testLongFirstCombiningAggregator()
   @Test
   public void testLongFirstCombiningBufferAggregator()
   {
-    LongFirstBufferAggregator agg = (LongFirstBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[longFirstAggFactory.getMaxIntermediateSize()]);
@@ -167,7 +169,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      LongFirstAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -177,7 +179,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      LongFirstBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java
index c5453e18171..423472df77b 100644
--- a/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/last/DoubleLastAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestDoubleColumnSelectorImpl;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleLastAggregator()
   {
-    DoubleLastAggregator agg = (DoubleLastAggregator) doubleLastAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = doubleLastAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,7 +91,7 @@ public void testDoubleLastAggregator()
   @Test
   public void testDoubleLastBufferAggregator()
   {
-    DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) doubleLastAggFactory.factorizeBuffered(
+    BufferAggregator agg = doubleLastAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleLastCombiningAggregator()
   {
-    DoubleLastAggregator agg = (DoubleLastAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,7 +140,7 @@ public void testDoubleLastCombiningAggregator()
   @Test
   public void testDoubleLastCombiningBufferAggregator()
   {
-    DoubleLastBufferAggregator agg = (DoubleLastBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[doubleLastAggFactory.getMaxIntermediateSize()]);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      DoubleLastAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      DoubleLastBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java
index 1938f838733..2179f8feb80 100644
--- a/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/last/FloatLastAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestFloatColumnSelector;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
@@ -71,7 +73,7 @@ public void setup()
   @Test
   public void testDoubleLastAggregator()
   {
-    FloatLastAggregator agg = (FloatLastAggregator) floatLastAggregatorFactory.factorize(colSelectorFactory);
+    Aggregator agg = floatLastAggregatorFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -89,7 +91,7 @@ public void testDoubleLastAggregator()
   @Test
   public void testDoubleLastBufferAggregator()
   {
-    FloatLastBufferAggregator agg = (FloatLastBufferAggregator) floatLastAggregatorFactory.factorizeBuffered(
+    BufferAggregator agg = floatLastAggregatorFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[floatLastAggregatorFactory.getMaxIntermediateSize()]);
@@ -119,7 +121,7 @@ public void testCombine()
   @Test
   public void testDoubleLastCombiningAggregator()
   {
-    FloatLastAggregator agg = (FloatLastAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -138,7 +140,7 @@ public void testDoubleLastCombiningAggregator()
   @Test
   public void testDoubleLastCombiningBufferAggregator()
   {
-    FloatLastBufferAggregator agg = (FloatLastBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[floatLastAggregatorFactory.getMaxIntermediateSize()]);
@@ -168,7 +170,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      FloatLastAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -178,7 +180,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      FloatLastBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java
index 25d9160e15f..5106749298c 100644
--- a/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java
+++ b/processing/src/test/java/io/druid/query/aggregation/last/LongLastAggregationTest.java
@@ -22,7 +22,9 @@
 import io.druid.collections.SerializablePair;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.Pair;
+import io.druid.query.aggregation.Aggregator;
 import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.BufferAggregator;
 import io.druid.query.aggregation.TestLongColumnSelector;
 import io.druid.query.aggregation.TestObjectColumnSelector;
 import io.druid.segment.ColumnSelectorFactory;
@@ -70,7 +72,7 @@ public void setup()
   @Test
   public void testLongLastAggregator()
   {
-    LongLastAggregator agg = (LongLastAggregator) longLastAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = longLastAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -88,7 +90,7 @@ public void testLongLastAggregator()
   @Test
   public void testLongLastBufferAggregator()
   {
-    LongLastBufferAggregator agg = (LongLastBufferAggregator) longLastAggFactory.factorizeBuffered(
+    BufferAggregator agg = longLastAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]);
@@ -118,7 +120,7 @@ public void testCombine()
   @Test
   public void testLongLastCombiningAggregator()
   {
-    LongLastAggregator agg = (LongLastAggregator) combiningAggFactory.factorize(colSelectorFactory);
+    Aggregator agg = combiningAggFactory.factorize(colSelectorFactory);
 
     aggregate(agg);
     aggregate(agg);
@@ -137,7 +139,7 @@ public void testLongLastCombiningAggregator()
   @Test
   public void testLongLastCombiningBufferAggregator()
   {
-    LongLastBufferAggregator agg = (LongLastBufferAggregator) combiningAggFactory.factorizeBuffered(
+    BufferAggregator agg = combiningAggFactory.factorizeBuffered(
         colSelectorFactory);
 
     ByteBuffer buffer = ByteBuffer.wrap(new byte[longLastAggFactory.getMaxIntermediateSize()]);
@@ -167,7 +169,7 @@ public void testSerde() throws Exception
   }
 
   private void aggregate(
-      LongLastAggregator agg
+      Aggregator agg
   )
   {
     agg.aggregate();
@@ -177,7 +179,7 @@ private void aggregate(
   }
 
   private void aggregate(
-      LongLastBufferAggregator agg,
+      BufferAggregator agg,
       ByteBuffer buff,
       int position
   )
diff --git a/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java b/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java
index 609e74f375d..42560206a52 100644
--- a/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/FunctionalExtractionTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -135,7 +136,7 @@ public void testRetainMissing()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? in : out, exFn.apply(in));
+    Assert.assertEquals(NullHandling.isNullOrEquivalent(out) ? in : out, exFn.apply(in));
   }
 
   @Test
@@ -149,7 +150,7 @@ public void testRetainMissingButFound()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? in : out, exFn.apply(in));
+    Assert.assertEquals(NullHandling.isNullOrEquivalent(out) ? in : out, exFn.apply(in));
   }
 
   @Test
@@ -163,7 +164,11 @@ public void testReplaceMissing()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? MISSING : out, exFn.apply(in));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(NullHandling.isNullOrEquivalent(out) ? MISSING : out, exFn.apply(in));
+    } else {
+      Assert.assertEquals(out == null ? MISSING : out, exFn.apply(in));
+    }
   }
 
 
@@ -178,7 +183,11 @@ public void testReplaceMissingBlank()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    } else {
+      Assert.assertEquals(out == null ? "" : out, exFn.apply(in));
+    }
   }
 
   @Test
@@ -192,7 +201,11 @@ public void testOnlyOneValuePresent()
         false
     );
     final String out = fn.apply(in);
-    Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(Strings.isNullOrEmpty(out) ? null : out, exFn.apply(in));
+    } else {
+      Assert.assertEquals(Strings.isNullOrEmpty(out) ? "" : out, exFn.apply(in));
+    }
   }
 
   @Test
@@ -204,7 +217,7 @@ public void testNullInputs()
         null,
         false
     );
-    if (Strings.isNullOrEmpty(fn.apply(null))) {
+    if (NullHandling.isNullOrEquivalent(fn.apply(null))) {
       Assert.assertEquals(null, exFn.apply(null));
     }
   }
diff --git a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java
index 6c2dc0e6d96..d8b1bd55925 100644
--- a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java
@@ -23,6 +23,7 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.DateTimes;
 import io.druid.js.JavaScriptConfig;
@@ -131,7 +132,11 @@ public void testJavascriptIsNull()
 
     Assert.assertEquals("yes", extractionFn.apply((String) null));
     Assert.assertEquals("yes", extractionFn.apply((Object) null));
-    Assert.assertEquals("yes", extractionFn.apply(""));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals("yes", extractionFn.apply(""));
+    } else {
+      Assert.assertEquals("no", extractionFn.apply(""));
+    }
     Assert.assertEquals("no", extractionFn.apply("abc"));
     Assert.assertEquals("no", extractionFn.apply(new Object()));
     Assert.assertEquals("no", extractionFn.apply(1));
diff --git a/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java
index 1a7727adbaf..fc014fa918a 100644
--- a/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/LowerExtractionFnTest.java
@@ -19,6 +19,7 @@
 
 package io.druid.query.extraction;
 
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -31,7 +32,7 @@
   public void testApply()
   {
     Assert.assertEquals("lower 1 string", extractionFn.apply("lOwER 1 String"));
-    Assert.assertEquals(null, extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
     Assert.assertEquals(null, extractionFn.apply(null));
     Assert.assertEquals(null, extractionFn.apply((Object) null));
     Assert.assertEquals("1", extractionFn.apply(1));
diff --git a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java
index 870812dcac4..dfb0450809c 100644
--- a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -39,9 +40,19 @@ public void testUnApply()
   {
     Assert.assertEquals(Arrays.asList("foo"), fn.unapply("bar"));
     Assert.assertEquals(Sets.newHashSet("null", "empty String"), Sets.newHashSet(fn.unapply("")));
-    Assert.assertEquals("Null value should be equal to empty string",
-                        Sets.newHashSet("null", "empty String"),
-                        Sets.newHashSet(fn.unapply((String) null)));
+    if (NullHandling.sqlCompatible()) {
+      Assert.assertEquals(
+          "Null value should be equal to empty list",
+          Sets.newHashSet(),
+          Sets.newHashSet(fn.unapply((String) null))
+      );
+    } else {
+      Assert.assertEquals(
+          "Null value should be equal to empty string",
+          Sets.newHashSet("null", "empty String"),
+          Sets.newHashSet(fn.unapply((String) null))
+      );
+    }
     Assert.assertEquals(Sets.newHashSet(""), Sets.newHashSet(fn.unapply("empty_string")));
     Assert.assertEquals("not existing value returns empty list", Collections.EMPTY_LIST, fn.unapply("not There"));
   }
@@ -55,7 +66,6 @@ public void testGetMap()
   @Test
   public void testApply()
   {
-
     Assert.assertEquals("bar", fn.apply("foo"));
   }
 
diff --git a/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java
index 4415056f542..14c0d293660 100644
--- a/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/MatchingDimExtractionFnTest.java
@@ -21,6 +21,7 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -75,7 +76,7 @@ public void testNullExtraction()
 
     Assert.assertNull(extractionFn.apply((Object) null));
     Assert.assertNull(extractionFn.apply((String) null));
-    Assert.assertNull(extractionFn.apply((String) ""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply((String) ""));
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java
index ce6e468ccd9..a08f6686b6d 100644
--- a/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/RegexDimExtractionFnTest.java
@@ -22,6 +22,7 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -145,11 +146,11 @@ public void testNullAndEmpty()
     String regex = "(.*)/.*/.*";
     ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null);
     // no match, map empty input value to null
-    Assert.assertEquals(null, extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
     // null value, returns null
     Assert.assertEquals(null, extractionFn.apply(null));
     // empty match, map empty result to null
-    Assert.assertEquals(null, extractionFn.apply("/a/b"));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply("/a/b"));
   }
 
   @Test
@@ -168,8 +169,8 @@ public void testMissingValueReplacementWhenPatternMatchesNull()
   {
     String regex = "^()$";
     ExtractionFn extractionFn = new RegexDimExtractionFn(regex, true, "NO MATCH");
-    Assert.assertEquals(null, extractionFn.apply(""));
-    Assert.assertEquals(null, extractionFn.apply(null));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "NO MATCH", extractionFn.apply(null));
     Assert.assertEquals("NO MATCH", extractionFn.apply("abc"));
   }
 
@@ -178,10 +179,10 @@ public void testMissingValueReplacementToEmpty()
   {
     String regex = "(bob)";
     ExtractionFn extractionFn = new RegexDimExtractionFn(regex, true, "");
-    Assert.assertEquals(null, extractionFn.apply(null));
-    Assert.assertEquals(null, extractionFn.apply(""));
-    Assert.assertEquals(null, extractionFn.apply("abc"));
-    Assert.assertEquals(null, extractionFn.apply("123"));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(null));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply("abc"));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply("123"));
     Assert.assertEquals("bob", extractionFn.apply("bobby"));
   }
 
diff --git a/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java
index 43289c3ddc3..b2248f72d56 100644
--- a/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/StringFormatExtractionFnTest.java
@@ -21,6 +21,7 @@
 
 import com.fasterxml.jackson.databind.JsonMappingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -55,7 +56,10 @@ public void testApplyNull2()
   {
     String test = null;
     Assert.assertEquals("null", format("%s", "nullString").apply(test));
-    Assert.assertNull(format("%s", "emptyString").apply(test));
+    Assert.assertEquals(
+        NullHandling.emptyToNullIfNeeded(""),
+        format("%s", "emptyString").apply(test)
+    );
     Assert.assertNull(format("%s", "returnNull").apply(test));
   }
 
diff --git a/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java
index 08b102c6cfb..d83388a7570 100644
--- a/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/StrlenExtractionFnTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.extraction;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -29,7 +30,7 @@
   @Test
   public void testApply()
   {
-    Assert.assertEquals("0", StrlenExtractionFn.instance().apply(null));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? "0" : null, StrlenExtractionFn.instance().apply(null));
     Assert.assertEquals("0", StrlenExtractionFn.instance().apply(""));
     Assert.assertEquals("1", StrlenExtractionFn.instance().apply("x"));
     Assert.assertEquals("3", StrlenExtractionFn.instance().apply("foo"));
diff --git a/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java
index a8ea07a9ce5..e368f047cc6 100644
--- a/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/TimeDimExtractionFnTest.java
@@ -21,6 +21,7 @@
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
@@ -48,7 +49,11 @@ public void testEmptyNullAndUnparseableExtraction()
       ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "MM/yyyy", joda);
 
       Assert.assertNull(extractionFn.apply(null));
-      Assert.assertNull(extractionFn.apply(""));
+      if (NullHandling.replaceWithDefault()) {
+        Assert.assertNull(extractionFn.apply(""));
+      } else {
+        Assert.assertEquals("", extractionFn.apply(""));
+      }
       Assert.assertEquals("foo", extractionFn.apply("foo"));
     }
   }
diff --git a/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java
index 1d038c0674a..fd23548d2b6 100644
--- a/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/extraction/UpperExtractionFnTest.java
@@ -19,6 +19,7 @@
 
 package io.druid.query.extraction;
 
+import io.druid.common.config.NullHandling;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -31,7 +32,7 @@
   public void testApply()
   {
     Assert.assertEquals("UPPER", extractionFn.apply("uPpeR"));
-    Assert.assertEquals(null, extractionFn.apply(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? null : "", extractionFn.apply(""));
     Assert.assertEquals(null, extractionFn.apply(null));
     Assert.assertEquals(null, extractionFn.apply((Object) null));
     Assert.assertEquals("1", extractionFn.apply(1));
diff --git a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java
index 1ba026c01b9..a72b2b68313 100644
--- a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java
+++ b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableRangeSet;
 import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.Intervals;
 import io.druid.js.JavaScriptConfig;
 import io.druid.query.extraction.IdentityExtractionFn;
@@ -60,7 +61,8 @@
   );
   private final DimFilter other1 = new RegexDimFilter("someDim", "pattern", null);
   private final DimFilter other2 = new JavaScriptDimFilter("someOtherDim", "function(x) { return x }", null,
-                                                           JavaScriptConfig.getEnabledInstance());
+                                                           JavaScriptConfig.getEnabledInstance()
+  );
   private final DimFilter other3 = new SearchQueryDimFilter("dim", new ContainsSearchQuerySpec("a", true), null);
 
   private final DimFilter interval1 = new IntervalDimFilter(
@@ -91,13 +93,13 @@ public void testSimpleFilter()
     Assert.assertEquals(expected1, selector1.getDimensionRangeSet("dim1"));
     Assert.assertNull(selector1.getDimensionRangeSet("dim2"));
 
-    RangeSet expected2 = rangeSet(point(""));
+    RangeSet expected2 = rangeSet(point(null));
     Assert.assertEquals(expected2, selector5.getDimensionRangeSet("dim1"));
 
     RangeSet expected3 = rangeSet(ImmutableList.of(point("testing"), point("this"), point("filter"), point("tillend")));
     Assert.assertEquals(expected3, in1.getDimensionRangeSet("dim1"));
 
-    RangeSet expected4 = rangeSet(ImmutableList.of(point("null"), point("")));
+    RangeSet expected4 = rangeSet(ImmutableList.of(point("null"), point(null)));
     Assert.assertEquals(expected4, in3.getDimensionRangeSet("dim1"));
 
     RangeSet expected5 = ImmutableRangeSet.of(Range.closed("from", "to"));
@@ -146,12 +148,13 @@ public void testAndFilter()
   public void testOrFilter()
   {
     DimFilter or1 = new OrDimFilter(ImmutableList.of(selector1, selector2, selector5));
-    RangeSet expected1 = rangeSet(ImmutableList.of(point(""), point("a"), point("z")));
+    RangeSet expected1 = rangeSet(ImmutableList.of(point(null), point("a"), point("z")));
     Assert.assertEquals(expected1, or1.getDimensionRangeSet("dim1"));
 
     DimFilter or2 = new OrDimFilter(ImmutableList.of(selector5, in1, in3));
     RangeSet expected2 = rangeSet(ImmutableList.of(point("testing"), point("this"), point("filter"), point("tillend"),
-                                                   point("null"), point("")));
+                                                   point("null"), point(null)
+    ));
     Assert.assertEquals(expected2, or2.getDimensionRangeSet("dim1"));
 
     DimFilter or3 = new OrDimFilter(ImmutableList.of(bound1, bound2, bound3));
@@ -162,11 +165,13 @@ public void testOrFilter()
     Assert.assertNull(or4.getDimensionRangeSet("dim2"));
 
     DimFilter or5 = new OrDimFilter(ImmutableList.of(or1, or2, bound1));
-    RangeSet expected5 = rangeSet(ImmutableList.of(point(""), point("a"), point("filter"), Range.closed("from", "to"),
-                                                   point("z")));
+    RangeSet expected5 = rangeSet(ImmutableList.of(point(null), point("a"), point("filter"), Range.closed("from", "to"),
+                                                   point("z")
+    ));
     Assert.assertEquals(expected5, or5.getDimensionRangeSet("dim1"));
   }
 
+
   @Test
   public void testNotFilter()
   {
@@ -176,15 +181,28 @@ public void testNotFilter()
     Assert.assertNull(not1.getDimensionRangeSet("dim2"));
 
     DimFilter not2 = new NotDimFilter(in3);
-    RangeSet expected2 = rangeSet(ImmutableList.of(Range.lessThan(""), Range.open("", "null"), Range.greaterThan("null")));
-    Assert.assertEquals(expected2, not2.getDimensionRangeSet("dim1"));
+    if (NullHandling.sqlCompatible()) {
+      // Empty string is included when != null for SQL Compatible case
+      RangeSet expected2 = rangeSet(ImmutableList.of(
+          Range.closedOpen("", "null"),
+          Range.greaterThan("null")
+      ));
+      Assert.assertEquals(expected2, not2.getDimensionRangeSet("dim1"));
+    } else {
+      RangeSet expected2 = rangeSet(ImmutableList.of(
+          Range.lessThan(""),
+          Range.open("", "null"),
+          Range.greaterThan("null")
+      ));
+      Assert.assertEquals(expected2, not2.getDimensionRangeSet("dim1"));
+    }
 
     DimFilter not3 = new NotDimFilter(bound1);
     RangeSet expected3 = rangeSet(ImmutableList.of(Range.lessThan("from"), Range.greaterThan("to")));
     Assert.assertEquals(expected3, not3.getDimensionRangeSet("dim1"));
 
     DimFilter not4 = new NotDimFilter(not2);
-    RangeSet expected4 = rangeSet(ImmutableList.of(point(""), point("null")));
+    RangeSet expected4 = rangeSet(ImmutableList.of(point(null), point("null")));
     Assert.assertEquals(expected4, not4.getDimensionRangeSet("dim1"));
 
     DimFilter or1 = new OrDimFilter(ImmutableList.of(selector1, selector2, bound1, bound3));
@@ -203,7 +221,8 @@ public void testNotFilter()
     DimFilter and1 = new AndDimFilter(ImmutableList.of(in1, bound1, bound2));
     DimFilter not7 = new NotDimFilter(and1);
     RangeSet expected7 = rangeSet(ImmutableList.of(Range.lessThan("testing"), Range.open("testing", "this"),
-                                                   Range.open("this", "tillend"), Range.greaterThan("tillend")));
+                                                   Range.open("this", "tillend"), Range.greaterThan("tillend")
+    ));
     Assert.assertEquals(expected7, not7.getDimensionRangeSet("dim1"));
     Assert.assertNull(not7.getDimensionRangeSet("dim2"));
 
@@ -216,6 +235,15 @@ public void testNotFilter()
 
   private static Range<String> point(String s)
   {
+    if (s == null) {
+      if (NullHandling.sqlCompatible()) {
+        // Range.singleton(null) is invalid
+        return Range.lessThan("");
+      } else {
+        // For non-sql compatible case, null and "" are equivalent
+        return Range.singleton("");
+      }
+    }
     return Range.singleton(s);
   }
 
diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java
index ba04778d2d8..5cb158bc531 100644
--- a/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java
@@ -561,7 +561,7 @@ public void testDescendingNumerics()
         "d2", 13L,
         "a0", 2L
     );
-
+    System.out.println(results);
     Assert.assertEquals(4, results.size());
     Assert.assertEquals(expectedRow0, results.get(0));
     Assert.assertEquals(expectedRow1, results.get(1));
diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
index e35f8b7f2bb..64b7ec7c695 100644
--- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java
@@ -35,6 +35,7 @@
 import io.druid.collections.DefaultBlockingPool;
 import io.druid.collections.NonBlockingPool;
 import io.druid.collections.StupidPool;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Row;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.IAE;
@@ -7011,12 +7012,23 @@ public void testGroupByWithExtractionDimFilterCaseMappingValueIsNullOrEmpty()
                                      .setGranularity(QueryRunnerTestHelper.dayGran)
                                      .setDimFilter(new ExtractionDimFilter("quality", "", lookupExtractionFn, null))
                                      .build();
-    List<Row> expectedResults = Arrays.asList(
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L)
-    );
+
+    List<Row> expectedResults;
+
+    if (NullHandling.replaceWithDefault()) {
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L)
+      );
+    } else {
+      // Only empty string should match, nulls will not match
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
+          GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L)
+      );
+    }
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
     TestHelper.assertExpectedObjects(expectedResults, results, "");
@@ -7065,10 +7077,17 @@ public void testGroupByWithExtractionDimFilterWhenSearchValueNotInTheMap()
   public void testGroupByWithExtractionDimFilterKeyisNull()
   {
     Map<String, String> extractionMap = new HashMap<>();
-    extractionMap.put("", "NULLorEMPTY");
+
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+      extractionMap.put("", "REPLACED_VALUE");
+    } else {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "REPLACED_VALUE", true, false);
+      extractionMap.put("", "NOT_USED");
+    }
 
     GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource)
                                      .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
@@ -7090,7 +7109,7 @@ public void testGroupByWithExtractionDimFilterKeyisNull()
                                      .setDimFilter(
                                          new ExtractionDimFilter(
                                              "null_column",
-                                             "NULLorEMPTY",
+                                             "REPLACED_VALUE",
                                              lookupExtractionFn,
                                              null
                                          )
@@ -7146,25 +7165,137 @@ public void testGroupByWithAggregatorFilterAndExtractionFunction()
                                      .setGranularity(QueryRunnerTestHelper.dayGran)
                                      .build();
     List<Row> expectedResults = Arrays.asList(
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "health", "rows", 0L, "idx", 0L),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "automotive",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "business",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "entertainment",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "health",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 2870L),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 121L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "technology", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 0L, "idx", 0L),
-
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "automotive", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "business", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "health", "rows", 0L, "idx", 0L),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "premium",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "technology",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-01",
+            "alias",
+            "travel",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "automotive",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "business",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "entertainment",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "health",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 2447L),
         GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "news", "rows", 1L, "idx", 114L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "premium", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "technology", "rows", 0L, "idx", 0L),
-        GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "rows", 0L, "idx", 0L)
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "premium",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "technology",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        ),
+        GroupByQueryRunnerTestHelper.createExpectedRow(
+            "2011-04-02",
+            "alias",
+            "travel",
+            "rows",
+            0L,
+            "idx",
+            NullHandling.defaultLongValue()
+        )
     );
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
@@ -7219,7 +7350,14 @@ public void testGroupByWithExtractionDimFilterNullDims()
     extractionMap.put("", "EMPTY");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      extractionMap.put("", "EMPTY");
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    } else {
+      extractionMap.put("", "SHOULD_NOT_BE_USED");
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "EMPTY", true, true);
+    }
 
     GroupByQuery query = GroupByQuery.builder().setDataSource(QueryRunnerTestHelper.dataSource)
                                      .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
@@ -7345,7 +7483,7 @@ public void testGroupByWithAllFiltersOnNullDimsWithExtractionFns()
     extractionMap.put(null, "EMPTY");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn extractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    LookupExtractionFn extractionFn = new LookupExtractionFn(mapLookupExtractor, false, "EMPTY", true, true);
     String jsFn = "function(x) { return(x === 'EMPTY') }";
 
     List<DimFilter> superFilterList = new ArrayList<>();
@@ -8205,14 +8343,16 @@ public void testGroupByNumericStringsAsNumericWithDecoration()
             )
         )
         .setGranularity(QueryRunnerTestHelper.allGran)
+        .addOrderByColumn("ql")
         .build();
 
+    List<Row> expectedResults;
     // "entertainment" rows are excluded by the decorated specs, they become empty rows
-    List<Row> expectedResults = Arrays.asList(
+    expectedResults = Arrays.asList(
         GroupByQueryRunnerTestHelper.createExpectedRow(
             "2011-04-01",
-            "ql", 0L,
-            "qf", 0.0,
+            "ql", NullHandling.defaultLongValue(),
+            "qf", NullHandling.defaultDoubleValue(),
             "count", 2L
         ),
         GroupByQueryRunnerTestHelper.createExpectedRow(
@@ -8224,6 +8364,7 @@ public void testGroupByNumericStringsAsNumericWithDecoration()
     );
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
+    System.out.println(results);
     TestHelper.assertExpectedObjects(expectedResults, results, "");
   }
 
@@ -8264,21 +8405,38 @@ public void testGroupByDecorationOnNumerics()
         )
         .setGranularity(QueryRunnerTestHelper.allGran)
         .build();
-
-    List<Row> expectedResults = Arrays.asList(
-        GroupByQueryRunnerTestHelper.createExpectedRow(
-            "2011-04-01",
-            "ql", 0L,
-            "qf", 0.0,
-            "count", 2L
-        ),
-        GroupByQueryRunnerTestHelper.createExpectedRow(
-            "2011-04-01",
-            "ql", 1700L,
-            "qf", 17000.0,
-            "count", 2L
-        )
-    );
+    List<Row> expectedResults;
+    if (NullHandling.replaceWithDefault()) {
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", 0L,
+              "qf", 0.0,
+              "count", 2L
+          ),
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", 1700L,
+              "qf", 17000.0,
+              "count", 2L
+          )
+      );
+    } else {
+      expectedResults = Arrays.asList(
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", null,
+              "qf", null,
+              "count", 2L
+          ),
+          GroupByQueryRunnerTestHelper.createExpectedRow(
+              "2011-04-01",
+              "ql", 1700L,
+              "qf", 17000.0,
+              "count", 2L
+          )
+      );
+    }
 
     Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
     TestHelper.assertExpectedObjects(expectedResults, results, "");
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java
index acdc2c40604..c55fbf2b0af 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferArrayGrouperTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Ordering;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.CountAggregatorFactory;
@@ -90,17 +91,23 @@ private BufferArrayGrouper newGrouper(
   @Test
   public void testRequiredBufferCapacity()
   {
-    int[] cardinalityArray = new int[] {1, 10, Integer.MAX_VALUE - 1};
-    AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] {
+    int[] cardinalityArray = new int[]{1, 10, Integer.MAX_VALUE - 1};
+    AggregatorFactory[] aggregatorFactories = new AggregatorFactory[]{
         new LongSumAggregatorFactory("sum", "sum")
     };
-
-    long[] requiredSizes = new long[] {17, 90, 16911433721L};
+    long[] requiredSizes;
+    if (NullHandling.sqlCompatible()) {
+      // We need additional size to store nullability information.
+      requiredSizes = new long[]{19, 101, 19058917368L};
+    } else {
+      requiredSizes = new long[]{17, 90, 16911433721L};
+    }
 
     for (int i = 0; i < cardinalityArray.length; i++) {
       Assert.assertEquals(requiredSizes[i], BufferArrayGrouper.requiredBufferCapacity(
           cardinalityArray[i],
-          aggregatorFactories));
+          aggregatorFactories
+      ));
     }
   }
 }
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java
index d3bd541a66b..37a8560dffb 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java
@@ -27,6 +27,7 @@
 import com.google.common.collect.Ordering;
 import com.google.common.io.Files;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.ByteBufferUtils;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -111,7 +112,7 @@ public void testGrowing()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, 10000, 2);
-    final int expectedMaxSize = 219;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 219 : 210;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.<String, Object>of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
@@ -139,7 +140,7 @@ public void testGrowing2()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, 2_000_000_000, 2);
-    final int expectedMaxSize = 40988516;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 40988516 : 39141224;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.<String, Object>of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
@@ -153,7 +154,7 @@ public void testGrowing3()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, Integer.MAX_VALUE, 2);
-    final int expectedMaxSize = 44938972;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 44938972 : 42955456;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.<String, Object>of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
@@ -167,7 +168,7 @@ public void testNoGrowing()
   {
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
     final Grouper<Integer> grouper = makeGrouper(columnSelectorFactory, 10000, Integer.MAX_VALUE);
-    final int expectedMaxSize = 267;
+    final int expectedMaxSize = NullHandling.replaceWithDefault() ? 267 : 258;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.<String, Object>of("value", 10L)));
     for (int i = 0; i < expectedMaxSize; i++) {
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java
index 6710b74bd8f..815e862742a 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Suppliers;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.IAE;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -53,20 +54,37 @@ public void testLimitAndBufferSwapping()
     for (int i = 0; i < numRows; i++) {
       Assert.assertTrue(String.valueOf(i + keyBase), grouper.aggregate(i + keyBase).isOk());
     }
+    if (NullHandling.replaceWithDefault()) {
+      // bucket size is hash(int) + key(int) + aggs(2 longs) + heap offset(int) = 28 bytes
+      // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes
+      // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes
+      // table arena is split in halves when doing push down, so each half is 9798 bytes
+      // each table arena half can hold 9798 / 28 = 349 buckets, with load factor of 0.5 max buckets per half is 174
+      // First buffer swap occurs when we hit 174 buckets
+      // Subsequent buffer swaps occur after every 74 buckets, since we keep 100 buckets due to the limit
+      // With 1000 keys inserted, this results in one swap at the first 174 buckets, then 11 swaps afterwards.
+      // After the last swap, we have 100 keys + 12 new keys inserted.
+      Assert.assertEquals(12, grouper.getGrowthCount());
+      Assert.assertEquals(112, grouper.getSize());
+      Assert.assertEquals(349, grouper.getBuckets());
+      Assert.assertEquals(174, grouper.getMaxSize());
+    } else {
+      // With Nullability enabled
+      // bucket size is hash(int) + key(int) + aggs(2 longs + 1 bytes for Long Agg nullability) + heap offset(int) = 29 bytes
+      // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes
+      // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes
+      // table arena is split in halves when doing push down, so each half is 9798 bytes
+      // each table arena half can hold 9798 / 29 = 337 buckets, with load factor of 0.5 max buckets per half is 168
+      // First buffer swap occurs when we hit 168 buckets
+      // Subsequent buffer swaps occur after every 68 buckets, since we keep 100 buckets due to the limit
+      // With 1000 keys inserted, this results in one swap at the first 169 buckets, then 12 swaps afterwards.
+      // After the last swap, we have 100 keys + 16 new keys inserted.
+      Assert.assertEquals(13, grouper.getGrowthCount());
+      Assert.assertEquals(116, grouper.getSize());
+      Assert.assertEquals(337, grouper.getBuckets());
+      Assert.assertEquals(168, grouper.getMaxSize());
+    }
 
-    // bucket size is hash(int) + key(int) + aggs(2 longs) + heap offset(int) = 28 bytes
-    // limit is 100 so heap occupies 101 * 4 bytes = 404 bytes
-    // buffer is 20000 bytes, so table arena size is 20000 - 404 = 19596 bytes
-    // table arena is split in halves when doing push down, so each half is 9798 bytes
-    // each table arena half can hold 9798 / 28 = 349 buckets, with load factor of 0.5 max buckets per half is 174
-    // First buffer swap occurs when we hit 174 buckets
-    // Subsequent buffer swaps occur after every 74 buckets, since we keep 100 buckets due to the limit
-    // With 1000 keys inserted, this results in one swap at the first 174 buckets, then 11 swaps afterwards.
-    // After the last swap, we have 100 keys + 12 new keys inserted.
-    Assert.assertEquals(12, grouper.getGrowthCount());
-    Assert.assertEquals(112, grouper.getSize());
-    Assert.assertEquals(349, grouper.getBuckets());
-    Assert.assertEquals(174, grouper.getMaxSize());
     Assert.assertEquals(100, grouper.getLimit());
 
     // Aggregate slightly different row
@@ -77,14 +95,27 @@ public void testLimitAndBufferSwapping()
       Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk());
     }
 
-    // we added another 1000 unique keys
-    // previous size is 112, so next swap occurs after 62 rows
-    // after that, there are 1000 - 62 = 938 rows, 938 / 74 = 12 additional swaps after the first,
-    // with 50 keys being added after the final swap.
-    Assert.assertEquals(25, grouper.getGrowthCount());
-    Assert.assertEquals(150, grouper.getSize());
-    Assert.assertEquals(349, grouper.getBuckets());
-    Assert.assertEquals(174, grouper.getMaxSize());
+    if (NullHandling.replaceWithDefault()) {
+      // we added another 1000 unique keys
+      // previous size is 112, so next swap occurs after 62 rows
+      // after that, there are 1000 - 62 = 938 rows, 938 / 74 = 12 additional swaps after the first,
+      // with 50 keys being added after the final swap.
+      Assert.assertEquals(25, grouper.getGrowthCount());
+      Assert.assertEquals(150, grouper.getSize());
+      Assert.assertEquals(349, grouper.getBuckets());
+      Assert.assertEquals(174, grouper.getMaxSize());
+    } else {
+      // With Nullable Aggregator
+      // we added another 1000 unique keys
+      // previous size is 116, so next swap occurs after 52 rows
+      // after that, there are 1000 - 52 = 948 rows, 948 / 68 = 13 additional swaps after the first,
+      // with 64 keys being added after the final swap.
+      Assert.assertEquals(27, grouper.getGrowthCount());
+      Assert.assertEquals(164, grouper.getSize());
+      Assert.assertEquals(337, grouper.getBuckets());
+      Assert.assertEquals(168, grouper.getMaxSize());
+    }
+
     Assert.assertEquals(100, grouper.getLimit());
 
     final List<Grouper.Entry<Integer>> expected = Lists.newArrayList();
@@ -110,7 +141,7 @@ public void testMinBufferSize()
     final int limit = 100;
     final int keyBase = 100000;
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
-    final LimitedBufferHashGrouper<Integer> grouper = makeGrouper(columnSelectorFactory, 11716, 2, limit);
+    final LimitedBufferHashGrouper<Integer> grouper = makeGrouper(columnSelectorFactory, 12120, 2, limit);
     final int numRows = 1000;
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.<String, Object>of("value", 10L)));
@@ -119,10 +150,17 @@ public void testMinBufferSize()
     }
 
     // With minimum buffer size, after the first swap, every new key added will result in a swap
-    Assert.assertEquals(899, grouper.getGrowthCount());
-    Assert.assertEquals(101, grouper.getSize());
-    Assert.assertEquals(202, grouper.getBuckets());
-    Assert.assertEquals(101, grouper.getMaxSize());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(224, grouper.getGrowthCount());
+      Assert.assertEquals(104, grouper.getSize());
+      Assert.assertEquals(209, grouper.getBuckets());
+      Assert.assertEquals(104, grouper.getMaxSize());
+    } else {
+      Assert.assertEquals(899, grouper.getGrowthCount());
+      Assert.assertEquals(101, grouper.getSize());
+      Assert.assertEquals(202, grouper.getBuckets());
+      Assert.assertEquals(101, grouper.getMaxSize());
+    }
     Assert.assertEquals(100, grouper.getLimit());
 
     // Aggregate slightly different row
@@ -132,11 +170,17 @@ public void testMinBufferSize()
     for (int i = 0; i < numRows; i++) {
       Assert.assertTrue(String.valueOf(i), grouper.aggregate(i).isOk());
     }
-
-    Assert.assertEquals(1899, grouper.getGrowthCount());
-    Assert.assertEquals(101, grouper.getSize());
-    Assert.assertEquals(202, grouper.getBuckets());
-    Assert.assertEquals(101, grouper.getMaxSize());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(474, grouper.getGrowthCount());
+      Assert.assertEquals(104, grouper.getSize());
+      Assert.assertEquals(209, grouper.getBuckets());
+      Assert.assertEquals(104, grouper.getMaxSize());
+    } else {
+      Assert.assertEquals(1899, grouper.getGrowthCount());
+      Assert.assertEquals(101, grouper.getSize());
+      Assert.assertEquals(202, grouper.getBuckets());
+      Assert.assertEquals(101, grouper.getMaxSize());
+    }
     Assert.assertEquals(100, grouper.getLimit());
 
     final List<Grouper.Entry<Integer>> expected = Lists.newArrayList();
diff --git a/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java b/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java
index e66cd77b0ac..7458f53f34a 100644
--- a/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java
+++ b/processing/src/test/java/io/druid/query/groupby/epinephelinae/StreamingMergeSortedGrouperTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Ordering;
 import com.google.common.primitives.Ints;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedRow;
 import io.druid.java.util.common.concurrent.Execs;
 import io.druid.query.aggregation.AggregatorFactory;
@@ -100,7 +101,7 @@ public void testStreamingAggregateWithLargeBuffer() throws ExecutionException, I
   @Test(timeout = 5000L)
   public void testStreamingAggregateWithMinimumBuffer() throws ExecutionException, InterruptedException
   {
-    testStreamingAggregate(60);
+    testStreamingAggregate(83);
   }
 
   private void testStreamingAggregate(int bufferSize) throws ExecutionException, InterruptedException
@@ -128,7 +129,10 @@ private void testStreamingAggregate(int bufferSize) throws ExecutionException, I
       });
 
       final List<Entry<Integer>> unsortedEntries = Lists.newArrayList(grouper.iterator(true));
-      final List<Entry<Integer>> actual = Ordering.from((Comparator<Entry<Integer>>) (o1, o2) -> Ints.compare(o1.getKey(), o2.getKey()))
+      final List<Entry<Integer>> actual = Ordering.from((Comparator<Entry<Integer>>) (o1, o2) -> Ints.compare(
+          o1.getKey(),
+          o2.getKey()
+      ))
                                                   .sortedCopy(unsortedEntries);
 
       if (!actual.equals(expected)) {
@@ -145,7 +149,11 @@ private void testStreamingAggregate(int bufferSize) throws ExecutionException, I
   public void testNotEnoughBuffer()
   {
     expectedException.expect(IllegalStateException.class);
-    expectedException.expectMessage("Buffer[50] should be large enough to store at least three records[20]");
+    if (NullHandling.replaceWithDefault()) {
+      expectedException.expectMessage("Buffer[50] should be large enough to store at least three records[20]");
+    } else {
+      expectedException.expectMessage("Buffer[50] should be large enough to store at least three records[21]");
+    }
 
     newGrouper(GrouperTestUtil.newColumnSelectorFactory(), 50);
   }
@@ -157,7 +165,7 @@ public void testTimeout()
     expectedException.expectCause(CoreMatchers.instanceOf(TimeoutException.class));
 
     final TestColumnSelectorFactory columnSelectorFactory = GrouperTestUtil.newColumnSelectorFactory();
-    final StreamingMergeSortedGrouper<Integer> grouper = newGrouper(columnSelectorFactory, 60);
+    final StreamingMergeSortedGrouper<Integer> grouper = newGrouper(columnSelectorFactory, 100);
 
     columnSelectorFactory.setRow(new MapBasedRow(0, ImmutableMap.of("value", 10L)));
     grouper.aggregate(6);
diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java
index a363f760603..004b33258b0 100644
--- a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java
+++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnExpectationsTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.lookup;
 
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.query.extraction.MapLookupExtractor;
 import org.junit.Assert;
 import org.junit.Test;
@@ -65,7 +66,11 @@ public void testNullKeyIsMappable()
         false,
         false
     );
-    Assert.assertEquals("bar", lookupExtractionFn.apply(null));
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals("bar", lookupExtractionFn.apply(null));
+    } else {
+      Assert.assertEquals("REPLACE", lookupExtractionFn.apply(null));
+    }
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java
index aea8bf626d6..7fa56973971 100644
--- a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java
+++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java
@@ -20,13 +20,13 @@
 package io.druid.query.lookup;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.IAE;
 import io.druid.query.extraction.MapLookupExtractor;
@@ -66,7 +66,7 @@
 
   public LookupExtractionFnTest(boolean retainMissing, String replaceMissing, Optional<Boolean> injective)
   {
-    this.replaceMissing = Strings.emptyToNull(replaceMissing);
+    this.replaceMissing = NullHandling.emptyToNullIfNeeded(replaceMissing);
     this.retainMissing = retainMissing;
     this.injective = injective.orElse(null);
   }
@@ -74,7 +74,7 @@ public LookupExtractionFnTest(boolean retainMissing, String replaceMissing, Opti
   @Test
   public void testEqualsAndHash()
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       // skip
       return;
     }
@@ -111,7 +111,7 @@ public void testEqualsAndHash()
   @Test
   public void testSimpleSerDe() throws IOException
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       // skip
       return;
     }
@@ -146,12 +146,12 @@ public void testSimpleSerDe() throws IOException
   @Test(expected = IllegalArgumentException.class)
   public void testIllegalArgs()
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       @SuppressWarnings("unused") // expected exception
       final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(
           new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false),
           retainMissing,
-          Strings.emptyToNull(replaceMissing),
+          NullHandling.emptyToNullIfNeeded(replaceMissing),
           injective,
           false
       );
@@ -163,7 +163,7 @@ public void testIllegalArgs()
   @Test
   public void testCacheKey()
   {
-    if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
+    if (retainMissing && !NullHandling.isNullOrEquivalent(replaceMissing)) {
       // skip
       return;
     }
@@ -178,7 +178,7 @@ public void testCacheKey()
         false
     );
 
-    if (Strings.isNullOrEmpty(replaceMissing) || retainMissing) {
+    if (NullHandling.isNullOrEquivalent(replaceMissing) || retainMissing) {
       Assert.assertFalse(
           Arrays.equals(
               lookupExtractionFn.getCacheKey(),
diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
index f5da46c5fe7..9d893c100e6 100644
--- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Suppliers;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
@@ -751,7 +752,7 @@ public void testSearchWithNullValueInDimension() throws Exception
     QueryRunner runner = factory.createRunner(new QueryableIndexSegment("asdf", TestIndex.persistRealtimeAndLoadMMapped(index)));
     List<SearchHit> expectedHits = Lists.newLinkedList();
     expectedHits.add(new SearchHit("table", "table", 1));
-    expectedHits.add(new SearchHit("table", "", 1));
+    expectedHits.add(new SearchHit("table", NullHandling.defaultStringValue(), 1));
     checkSearchQuery(searchQuery, runner, expectedHits);
   }
 
diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
index 5e0ab9a3409..3c2f76a62c7 100644
--- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java
@@ -23,7 +23,9 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import com.google.common.primitives.Doubles;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
 import io.druid.java.util.common.StringUtils;
@@ -144,16 +146,15 @@ public void testEmptyTimeseries()
                                   )
                                   .descending(descending)
                                   .build();
-
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("first", NullHandling.defaultDoubleValue());
     List<Result<TimeseriesResultValue>> expectedResults = ImmutableList.of(
         new Result<>(
             DateTimes.of("2020-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>of(
-                    "rows", 0L,
-                    "index", 0D,
-                    "first", 0D
-                )
+                resultMap
             )
         )
     );
@@ -205,24 +206,61 @@ public void testFullOnTimeseries()
           QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L,
           value.getLongMetric("rows").longValue()
       );
-      Assert.assertEquals(
-          result.toString(),
-          Doubles.tryParse(expectedIndex[count]).doubleValue(),
-          value.getDoubleMetric("index").doubleValue(),
-          value.getDoubleMetric("index").doubleValue() * 1e-6
-      );
-      Assert.assertEquals(
-          result.toString(),
-          new Double(expectedIndex[count]) +
-          (QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L) + 1L,
-          value.getDoubleMetric("addRowsIndexConstant"),
-          value.getDoubleMetric("addRowsIndexConstant") * 1e-6
-      );
-      Assert.assertEquals(
-          value.getDoubleMetric("uniques"),
-          QueryRunnerTestHelper.skippedDay.equals(current) ? 0.0d : 9.0d,
-          0.02
-      );
+
+      if (!QueryRunnerTestHelper.skippedDay.equals(current)) {
+        Assert.assertEquals(
+            result.toString(),
+            Doubles.tryParse(expectedIndex[count]).doubleValue(),
+            value.getDoubleMetric("index").doubleValue(),
+            value.getDoubleMetric("index").doubleValue() * 1e-6
+        );
+        Assert.assertEquals(
+            result.toString(),
+            new Double(expectedIndex[count]) +
+            13L + 1L,
+            value.getDoubleMetric("addRowsIndexConstant"),
+            value.getDoubleMetric("addRowsIndexConstant") * 1e-6
+        );
+        Assert.assertEquals(
+            value.getDoubleMetric("uniques"),
+            9.0d,
+            0.02
+        );
+      } else {
+        if (NullHandling.replaceWithDefault()) {
+          Assert.assertEquals(
+              result.toString(),
+              0.0D,
+              value.getDoubleMetric("index").doubleValue(),
+              value.getDoubleMetric("index").doubleValue() * 1e-6
+          );
+          Assert.assertEquals(
+              result.toString(),
+              new Double(expectedIndex[count]) + 1L,
+              value.getDoubleMetric("addRowsIndexConstant"),
+              value.getDoubleMetric("addRowsIndexConstant") * 1e-6
+          );
+          Assert.assertEquals(
+              0.0D,
+              value.getDoubleMetric("uniques"),
+              0.02
+          );
+        } else {
+          Assert.assertNull(
+              result.toString(),
+              value.getDoubleMetric("index")
+          );
+          Assert.assertNull(
+              result.toString(),
+              value.getDoubleMetric("addRowsIndexConstant")
+          );
+          Assert.assertEquals(
+              value.getDoubleMetric("uniques"),
+              0.0d,
+              0.02
+          );
+        }
+      }
 
       lastResult = result;
       ++count;
@@ -508,13 +546,13 @@ public void testTimeseriesIntervalOutOfRanges()
         new Result<>(
             QueryRunnerTestHelper.emptyInterval.getIntervals().get(0).getStart(),
             new TimeseriesResultValue(
-                ImmutableMap.of(
+                TestHelper.createExpectedMap(
                     "rows",
                     0L,
                     "index",
-                    0L,
+                    NullHandling.defaultLongValue(),
                     QueryRunnerTestHelper.addRowsIndexConstantMetric,
-                    1.0
+                    NullHandling.sqlCompatible() ? null : 1.0
                 )
             )
         )
@@ -771,14 +809,15 @@ public void testTimeseriesQueryZeroFilling()
     final Iterable<Interval> iterable = Granularities.HOUR.getIterable(
         new Interval(DateTimes.of("2011-04-14T01"), DateTimes.of("2011-04-15"))
     );
+    Map noRowsResult = Maps.newHashMap();
+    noRowsResult.put("rows", 0L);
+    noRowsResult.put("idx", NullHandling.defaultLongValue());
     for (Interval interval : iterable) {
       lotsOfZeroes.add(
-              new Result<>(
-                      interval.getStart(),
-                      new TimeseriesResultValue(
-                              ImmutableMap.<String, Object>of("rows", 0L, "idx", 0L)
-                      )
-              )
+          new Result<>(
+              interval.getStart(),
+              new TimeseriesResultValue(noRowsResult)
+          )
       );
     }
 
@@ -1483,27 +1522,23 @@ public void testTimeseriesWithFilterOnNonExistentDimension()
                                   .descending(descending)
                                   .build();
 
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null);
+    resultMap.put("uniques", 0.0);
+
     List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
         new Result<>(
             DateTimes.of("2011-04-01"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         ),
         new Result<>(
             DateTimes.of("2011-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         )
     );
@@ -1628,28 +1663,23 @@ public void testTimeseriesWithNonExistentFilter()
                                   .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
                                   .descending(descending)
                                   .build();
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null);
+    resultMap.put("uniques", 0.0);
 
     List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
         new Result<>(
             DateTimes.of("2011-04-01"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         ),
         new Result<>(
             DateTimes.of("2011-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         )
     );
@@ -1674,28 +1704,23 @@ public void testTimeseriesWithNonExistentFilterAndMultiDim()
                                   .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
                                   .descending(descending)
                                   .build();
+    Map<String, Object> resultMap = Maps.newHashMap();
+    resultMap.put("rows", 0L);
+    resultMap.put("index", NullHandling.defaultDoubleValue());
+    resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null);
+    resultMap.put("uniques", 0.0);
 
     List<Result<TimeseriesResultValue>> expectedResults = Arrays.asList(
         new Result<>(
             DateTimes.of("2011-04-01"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         ),
         new Result<>(
             DateTimes.of("2011-04-02"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>of(
-                    "rows", 0L,
-                    "index", 0.0,
-                    "addRowsIndexConstant", 1.0,
-                    "uniques", 0.0
-                )
+                resultMap
             )
         )
     );
diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
index 3bf304eb4f5..74be2e02274 100644
--- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java
@@ -29,6 +29,7 @@
 import com.google.common.primitives.Doubles;
 import com.google.common.primitives.Longs;
 import io.druid.collections.StupidPool;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.IAE;
 import io.druid.java.util.common.ISE;
@@ -4205,10 +4206,15 @@ public void testTopNWithExtractionFilter()
   public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue()
   {
     Map<String, String> extractionMap = new HashMap<>();
-    extractionMap.put("", "NULL");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false);
+      extractionMap.put("", "NULL");
+    } else {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "NULL", true, false);
+    }
     DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null);
     TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder()
         .dataSource(QueryRunnerTestHelper.dataSource)
@@ -4278,10 +4284,16 @@ public void testTopNWithExtractionFilterAndFilteredAggregatorCaseNoExistingValue
   public void testTopNWithExtractionFilterNoExistingValue()
   {
     Map<String, String> extractionMap = new HashMap<>();
-    extractionMap.put("", "NULL");
 
     MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
-    LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+    LookupExtractionFn lookupExtractionFn;
+    if (NullHandling.replaceWithDefault()) {
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
+      extractionMap.put("", "NULL");
+    } else {
+      extractionMap.put("", "NOT_USED");
+      lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "NULL", true, true);
+    }
     DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null);
     TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder()
         .dataSource(QueryRunnerTestHelper.dataSource)
diff --git a/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java
index 300092f39d8..ac49f415053 100644
--- a/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java
+++ b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java
@@ -19,6 +19,7 @@
 
 package io.druid.segment;
 
+import io.druid.common.config.NullHandling;
 import io.druid.query.extraction.StringFormatExtractionFn;
 import io.druid.query.extraction.SubstringDimExtractionFn;
 import io.druid.segment.data.IndexedInts;
@@ -68,7 +69,7 @@ public void testLookupName()
   public void testLookupId()
   {
     Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId(null));
-    Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId(""));
+    Assert.assertEquals(NullHandling.replaceWithDefault() ? 0 : -1, NULL_SELECTOR.idLookup().lookupId(""));
     Assert.assertEquals(-1, NULL_SELECTOR.idLookup().lookupId("billy"));
     Assert.assertEquals(-1, NULL_SELECTOR.idLookup().lookupId("bob"));
 
diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java
index a334a764654..9aada382dae 100644
--- a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java
+++ b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java
@@ -27,6 +27,7 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import io.druid.collections.bitmap.RoaringBitmapFactory;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
 import io.druid.data.input.impl.DimensionSchema;
@@ -221,11 +222,11 @@ public void testPersistWithDifferentDims() throws Exception
     Assert.assertEquals(ImmutableList.of("1", "2"), rowList.get(0).dimensionValues());
     Assert.assertEquals(Arrays.asList("3", null), rowList.get(1).dimensionValues());
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "1"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dim1", "3"));
 
-    checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dim2", ""));
+    checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dim2", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim2", "2"));
   }
 
@@ -804,17 +805,17 @@ public void testNonLexicographicDimOrderMerge() throws Exception
     Assert.assertEquals(Arrays.asList("50000", "200", "3000"), rowList.get(2).dimensionValues());
     Assert.assertEquals(Collections.singletonList(3L), rowList.get(2).metricValues());
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d3", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d3", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d3", "30000"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d3", "40000"));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d3", "50000"));
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d1", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d1", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d1", "100"));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d1", "200"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d1", "300"));
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d2", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d2", null));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d2", "2000"));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d2", "3000"));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d2", "4000"));
@@ -904,13 +905,13 @@ public void testMergeWithDimensionsList() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dimC").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimA", ""));
+      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimA", null));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("dimA", "1"));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("dimA", "2"));
 
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dimB", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dimB", null));
 
-      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("dimC", ""));
+      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("dimC", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dimC", "1"));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dimC", "2"));
     }
@@ -977,7 +978,7 @@ public void testDisjointDimMerge() throws Exception
 
       // dimA always has bitmap indexes, since it has them in indexA (it comes in through discovery).
       Assert.assertTrue(adapter.getCapabilities("dimA").hasBitmapIndexes());
-      checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("dimA", ""));
+      checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("dimA", null));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("dimA", "1"));
       checkBitmapIndex(Collections.singletonList(4), adapter.getBitmapIndex("dimA", "2"));
 
@@ -989,7 +990,7 @@ public void testDisjointDimMerge() throws Exception
       }
       //noinspection ObjectEquality
       if (toPersistB != toPersistB2 || useBitmapIndexes) {
-        checkBitmapIndex(Arrays.asList(3, 4), adapter.getBitmapIndex("dimB", ""));
+        checkBitmapIndex(Arrays.asList(3, 4), adapter.getBitmapIndex("dimB", null));
         checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dimB", "1"));
         checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dimB", "2"));
         checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("dimB", "3"));
@@ -1086,52 +1087,82 @@ public void testJointDimMerge() throws Exception
       final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
       final List<DebugRow> rowList = RowIteratorHelper.toList(adapter.getRows());
 
-      Assert.assertEquals(
-          ImmutableList.of("d2", "d3", "d5", "d6", "d7", "d8", "d9"),
-          ImmutableList.copyOf(adapter.getDimensionNames())
-      );
+      if (NullHandling.replaceWithDefault()) {
+        Assert.assertEquals(
+            ImmutableList.of("d2", "d3", "d5", "d6", "d7", "d8", "d9"),
+            ImmutableList.copyOf(adapter.getDimensionNames())
+        );
+      } else {
+        Assert.assertEquals(
+            ImmutableList.of("d1", "d2", "d3", "d5", "d6", "d7", "d8", "d9"),
+            ImmutableList.copyOf(adapter.getDimensionNames())
+        );
+      }
       Assert.assertEquals(4, rowList.size());
-      Assert.assertEquals(
-          Arrays.asList(null, "310", null, null, null, null, "910"),
-          rowList.get(0).dimensionValues()
-      );
-      Assert.assertEquals(
-          Arrays.asList("210", "311", null, null, "710", "810", "911"),
-          rowList.get(1).dimensionValues()
-      );
-      Assert.assertEquals(
-          Arrays.asList(null, null, "520", "620", "720", "820", "920"),
-          rowList.get(2).dimensionValues()
-      );
-      Assert.assertEquals(
-          Arrays.asList(null, null, null, "621", null, "821", "921"),
-          rowList.get(3).dimensionValues()
-      );
+      if (NullHandling.replaceWithDefault()) {
+        Assert.assertEquals(
+            Arrays.asList(null, "310", null, null, null, null, "910"),
+            rowList.get(0).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList("210", "311", null, null, "710", "810", "911"),
+            rowList.get(1).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, "520", "620", "720", "820", "920"),
+            rowList.get(2).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, null, "621", null, "821", "921"),
+            rowList.get(3).dimensionValues()
+        );
+
+        checkBitmapIndex(Arrays.asList(0, 2, 3), adapter.getBitmapIndex("d2", null));
+        checkBitmapIndex(Arrays.asList(0, 1, 3), adapter.getBitmapIndex("d5", null));
+        checkBitmapIndex(Arrays.asList(0, 3), adapter.getBitmapIndex("d7", null));
+      } else {
+        Assert.assertEquals(
+            Arrays.asList("", "", "310", null, null, "", null, "910"),
+            rowList.get(0).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, "210", "311", null, null, "710", "810", "911"),
+            rowList.get(1).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, null, "520", "620", "720", "820", "920"),
+            rowList.get(2).dimensionValues()
+        );
+        Assert.assertEquals(
+            Arrays.asList(null, null, null, "", "621", "", "821", "921"),
+            rowList.get(3).dimensionValues()
+        );
+        checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d2", null));
+        checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d5", null));
+        checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d7", null));
+      }
 
-      checkBitmapIndex(Arrays.asList(0, 2, 3), adapter.getBitmapIndex("d2", ""));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d2", "210"));
 
-      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d3", ""));
+      checkBitmapIndex(Arrays.asList(2, 3), adapter.getBitmapIndex("d3", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d3", "310"));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d3", "311"));
 
-      checkBitmapIndex(Arrays.asList(0, 1, 3), adapter.getBitmapIndex("d5", ""));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d5", "520"));
 
-      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d6", ""));
+      checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("d6", null));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d6", "620"));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d6", "621"));
 
-      checkBitmapIndex(Arrays.asList(0, 3), adapter.getBitmapIndex("d7", ""));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d7", "710"));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d7", "720"));
 
-      checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d8", ""));
+      checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d8", null));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d8", "810"));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d8", "820"));
       checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d8", "821"));
 
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("d9", "910"));
       checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("d9", "911"));
       checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("d9", "920"));
@@ -1229,26 +1260,44 @@ public void testNoRollupMergeWithDuplicateRow() throws Exception
     final QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(merged);
     final List<DebugRow> rowList = RowIteratorHelper.toList(adapter.getRows());
 
-    Assert.assertEquals(
-        ImmutableList.of("d3", "d6", "d8", "d9"),
-        ImmutableList.copyOf(adapter.getDimensionNames())
-    );
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(
+          ImmutableList.of("d3", "d6", "d8", "d9"),
+          ImmutableList.copyOf(adapter.getDimensionNames())
+      );
+    } else {
+      Assert.assertEquals(
+          ImmutableList.of("d1", "d2", "d3", "d5", "d6", "d7", "d8", "d9"),
+          ImmutableList.copyOf(adapter.getDimensionNames())
+      );
+    }
+
     Assert.assertEquals(4, rowList.size());
-    Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(0).dimensionValues());
-    Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(1).dimensionValues());
-    Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(2).dimensionValues());
-    Assert.assertEquals(Arrays.asList(null, "621", "821", "921"), rowList.get(3).dimensionValues());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(0).dimensionValues());
+      Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(1).dimensionValues());
+      Assert.assertEquals(Arrays.asList("310", null, null, "910"), rowList.get(2).dimensionValues());
+      Assert.assertEquals(Arrays.asList(null, "621", "821", "921"), rowList.get(3).dimensionValues());
+    } else {
+      Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(0).dimensionValues());
+      Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(1).dimensionValues());
+      Assert.assertEquals(Arrays.asList("", "", "310", null, null, "", null, "910"), rowList.get(2).dimensionValues());
+      Assert.assertEquals(
+          Arrays.asList(null, null, null, "", "621", "", "821", "921"),
+          rowList.get(3).dimensionValues()
+      );
+    }
 
-    checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d3", ""));
+    checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d3", null));
     checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d3", "310"));
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d6", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d6", null));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d6", "621"));
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d8", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d8", null));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d8", "821"));
 
-    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", ""));
+    checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("d9", null));
     checkBitmapIndex(Arrays.asList(0, 1, 2), adapter.getBitmapIndex("d9", "910"));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("d9", "921"));
   }
@@ -1281,7 +1330,7 @@ public void testMergeWithSupersetOrdering() throws Exception
         new MapBasedInputRow(
             1,
             Arrays.asList("dimB", "dimA"),
-            ImmutableMap.of("dimB", "1", "dimA", "")
+            ImmutableMap.of("dimB", "1")
         )
     );
 
@@ -1289,7 +1338,7 @@ public void testMergeWithSupersetOrdering() throws Exception
         new MapBasedInputRow(
             1,
             Arrays.asList("dimB", "dimA"),
-            ImmutableMap.of("dimB", "", "dimA", "1")
+            ImmutableMap.of("dimA", "1")
         )
     );
 
@@ -1375,11 +1424,11 @@ public void testMergeWithSupersetOrdering() throws Exception
     Assert.assertEquals(Arrays.asList("3", null), rowList.get(4).dimensionValues());
     Assert.assertEquals(Collections.singletonList(2L), rowList.get(4).metricValues());
 
-    checkBitmapIndex(Arrays.asList(2, 3, 4), adapter.getBitmapIndex("dimA", ""));
+    checkBitmapIndex(Arrays.asList(2, 3, 4), adapter.getBitmapIndex("dimA", null));
     checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dimA", "1"));
     checkBitmapIndex(Collections.singletonList(1), adapter.getBitmapIndex("dimA", "2"));
 
-    checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimB", ""));
+    checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dimB", null));
     checkBitmapIndex(Collections.singletonList(2), adapter.getBitmapIndex("dimB", "1"));
     checkBitmapIndex(Collections.singletonList(3), adapter.getBitmapIndex("dimB", "2"));
     checkBitmapIndex(Collections.singletonList(4), adapter.getBitmapIndex("dimB", "3"));
@@ -1417,16 +1466,16 @@ public void testMergeWithSupersetOrdering() throws Exception
     Assert.assertEquals(Arrays.asList("2", null, null), rowList2.get(11).dimensionValues());
     Assert.assertEquals(Collections.singletonList(2L), rowList2.get(11).metricValues());
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2, 3, 4, 5, 8, 9, 10), adapter2.getBitmapIndex("dimA", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2, 3, 4, 5, 8, 9, 10), adapter2.getBitmapIndex("dimA", null));
     checkBitmapIndex(Collections.singletonList(6), adapter2.getBitmapIndex("dimA", "1"));
     checkBitmapIndex(Arrays.asList(7, 11), adapter2.getBitmapIndex("dimA", "2"));
 
-    checkBitmapIndex(Arrays.asList(0, 1, 2, 6, 7, 11), adapter2.getBitmapIndex("dimB", ""));
+    checkBitmapIndex(Arrays.asList(0, 1, 2, 6, 7, 11), adapter2.getBitmapIndex("dimB", null));
     checkBitmapIndex(Arrays.asList(3, 8), adapter2.getBitmapIndex("dimB", "1"));
     checkBitmapIndex(Arrays.asList(4, 9), adapter2.getBitmapIndex("dimB", "2"));
     checkBitmapIndex(Arrays.asList(5, 10), adapter2.getBitmapIndex("dimB", "3"));
 
-    checkBitmapIndex(Arrays.asList(3, 4, 5, 6, 7, 8, 9, 10, 11), adapter2.getBitmapIndex("dimC", ""));
+    checkBitmapIndex(Arrays.asList(3, 4, 5, 6, 7, 8, 9, 10, 11), adapter2.getBitmapIndex("dimC", null));
     checkBitmapIndex(Collections.singletonList(0), adapter2.getBitmapIndex("dimC", "1"));
     checkBitmapIndex(Collections.singletonList(1), adapter2.getBitmapIndex("dimC", "2"));
     checkBitmapIndex(Collections.singletonList(2), adapter2.getBitmapIndex("dimC", "3"));
@@ -1747,7 +1796,14 @@ public void testMergeNumericDims() throws Exception
     Assert.assertEquals(ImmutableList.of("dimA", "dimB", "dimC"), ImmutableList.copyOf(adapter.getDimensionNames()));
     Assert.assertEquals(4, rowList.size());
 
-    Assert.assertEquals(Arrays.asList(0L, 0.0f, "Nully Row"), rowList.get(0).dimensionValues());
+    Assert.assertEquals(
+        Arrays.asList(
+            NullHandling.defaultLongValue(),
+            NullHandling.defaultFloatValue(),
+            "Nully Row"
+        ),
+        rowList.get(0).dimensionValues()
+    );
     Assert.assertEquals(Collections.singletonList(2L), rowList.get(0).metricValues());
 
     Assert.assertEquals(Arrays.asList(72L, 60000.789f, "World"), rowList.get(1).dimensionValues());
@@ -1829,13 +1885,13 @@ public void testPersistNullColumnSkipping() throws Exception
     index1.add(new MapBasedInputRow(
         1L,
         Arrays.asList("d1", "d2"),
-        ImmutableMap.of("d1", "a", "d2", "", "A", 1)
+        ImmutableMap.of("d1", "a", "A", 1)
     ));
 
     index1.add(new MapBasedInputRow(
         1L,
         Arrays.asList("d1", "d2"),
-        ImmutableMap.of("d1", "b", "d2", "", "A", 1)
+        ImmutableMap.of("d1", "b", "A", 1)
     ));
 
     final File tempDir = temporaryFolder.newFolder();
@@ -2033,7 +2089,7 @@ public void testMultiValueHandling() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dim2").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "a"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "b"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "x"));
@@ -2064,7 +2120,7 @@ public void testMultiValueHandling() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dim2").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "a"));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "b"));
       checkBitmapIndex(Collections.singletonList(0), adapter.getBitmapIndex("dim1", "x"));
@@ -2099,7 +2155,7 @@ public void testMultiValueHandling() throws Exception
     Assert.assertEquals(useBitmapIndexes, adapter.getCapabilities("dim2").hasBitmapIndexes());
 
     if (useBitmapIndexes) {
-      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", ""));
+      checkBitmapIndex(Collections.emptyList(), adapter.getBitmapIndex("dim1", null));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "a"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "b"));
       checkBitmapIndex(Arrays.asList(0, 1), adapter.getBitmapIndex("dim1", "x"));
diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java
index 352f3b66d6d..7f711a1564e 100644
--- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java
+++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
 import io.druid.java.util.common.Pair;
@@ -399,13 +400,13 @@ public void testNonIntersectingSchemas()
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
                 ImmutableMap.<String, Object>builder()
-                            .put("rows", 2L)
-                            .put("index", 100.0D)
-                            .put("addRowsIndexConstant", 103.0D)
-                            .put("uniques", UNIQUES_1)
-                            .put("maxIndex", 100.0D)
-                            .put("minIndex", 0.0D)
-                            .build()
+                    .put("rows", 2L)
+                    .put("index", 100.0D)
+                    .put("addRowsIndexConstant", 103.0D)
+                    .put("uniques", UNIQUES_1)
+                    .put("maxIndex", 100.0D)
+                    .put("minIndex", NullHandling.replaceWithDefault() ? 0.0D : 100.0D)
+                    .build()
             )
         )
     );
@@ -746,13 +747,13 @@ public void testValueAndEmptySchemas()
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
                 ImmutableMap.<String, Object>builder()
-                            .put("rows", 2L)
-                            .put("index", 100.0D)
-                            .put("addRowsIndexConstant", 103.0D)
-                            .put("uniques", UNIQUES_1)
-                            .put("maxIndex", 100.0D)
-                            .put("minIndex", 0.0D)
-                            .build()
+                    .put("rows", 2L)
+                    .put("index", 100.0D)
+                    .put("addRowsIndexConstant", 103.0D)
+                    .put("uniques", UNIQUES_1)
+                    .put("maxIndex", 100.0D)
+                    .put("minIndex", NullHandling.replaceWithDefault() ? 0.0D : 100.0D)
+                    .build()
             )
         )
     );
@@ -868,15 +869,14 @@ public void testEmptySchemas()
         new Result<>(
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>builder()
-                            .put("rows", 1L)
-                            .put("index", 0.0D)
-                            .put("addRowsIndexConstant", 2.0D)
-                            .put("uniques", 0.0D)
-                            .put("maxIndex", 0.0D)
-                            .put("minIndex", 0.0D)
-                            .build()
-            )
+                TestHelper.createExpectedMap(
+                    "rows", 1L,
+                    "index", NullHandling.replaceWithDefault() ? 0.0D : null,
+                    "addRowsIndexConstant", NullHandling.replaceWithDefault() ? 2.0D : null,
+                    "uniques", 0.0D,
+                    "maxIndex", NullHandling.replaceWithDefault() ? 0.0D : null,
+                    "minIndex", NullHandling.replaceWithDefault() ? 0.0D : null
+                ))
         )
     );
 
@@ -884,14 +884,14 @@ public void testEmptySchemas()
         new Result<>(
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
-                ImmutableMap.<String, Object>builder()
-                            .put("rows", 0L)
-                            .put("index", 0.0D)
-                            .put("addRowsIndexConstant", 1.0D)
-                            .put("uniques", 0.0D)
-                            .put("maxIndex", Double.NEGATIVE_INFINITY)
-                            .put("minIndex", Double.POSITIVE_INFINITY)
-                            .build()
+                TestHelper.createExpectedMap(
+                    "rows", 0L,
+                    "index", NullHandling.replaceWithDefault() ? 0.0D : null,
+                    "addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0D : null,
+                    "uniques", 0.0D,
+                    "maxIndex", NullHandling.replaceWithDefault() ? Double.NEGATIVE_INFINITY : null,
+                    "minIndex", NullHandling.replaceWithDefault() ? Double.POSITIVE_INFINITY : null
+                )
             )
         )
     );
@@ -1185,13 +1185,13 @@ public void testDifferentMetrics()
             DateTimes.of("2011-01-12T00:00:00.000Z"),
             new TimeseriesResultValue(
                 ImmutableMap.<String, Object>builder()
-                            .put("rows", 10L)
-                            .put("index", 900.0D)
-                            .put("addRowsIndexConstant", 911.0D)
-                            .put("uniques", UNIQUES_1)
-                            .put("maxIndex", 100.0D)
-                            .put("minIndex", 0.0D)
-                            .build()
+                    .put("rows", NullHandling.sqlCompatible() ? 11L : 10L)
+                    .put("index", 900.0D)
+                    .put("addRowsIndexConstant", NullHandling.sqlCompatible() ? 912.0D : 911.0D)
+                    .put("uniques", UNIQUES_1)
+                    .put("maxIndex", 100.0D)
+                    .put("minIndex", NullHandling.replaceWithDefault() ? 0.0D : 100.0D)
+                    .build()
             )
         )
     );
diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java
index e2e4a2e4b7b..5f686c52108 100644
--- a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java
+++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java
@@ -22,6 +22,7 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
 import io.druid.java.util.common.granularity.Granularities;
@@ -116,7 +117,8 @@
   public SchemalessTestSimpleTest(Segment segment, boolean coalesceAbsentAndEmptyDims)
   {
     this.segment = segment;
-    this.coalesceAbsentAndEmptyDims = coalesceAbsentAndEmptyDims;
+    // Empty and empty dims are equivalent only when replaceWithDefault is true
+    this.coalesceAbsentAndEmptyDims = coalesceAbsentAndEmptyDims && NullHandling.replaceWithDefault();
   }
 
   @Test
@@ -150,7 +152,7 @@ public void testFullOnTimeseries()
                             .put("addRowsIndexConstant", coalesceAbsentAndEmptyDims ? 911.0 : 912.0)
                             .put("uniques", 2.000977198748901D)
                             .put("maxIndex", 100.0)
-                            .put("minIndex", 0.0)
+                            .put("minIndex", NullHandling.replaceWithDefault() ? 0.0 : 100.0)
                             .build()
             )
         )
diff --git a/processing/src/test/java/io/druid/segment/TestHelper.java b/processing/src/test/java/io/druid/segment/TestHelper.java
index 299f5c060bb..a9539a40b43 100644
--- a/processing/src/test/java/io/druid/segment/TestHelper.java
+++ b/processing/src/test/java/io/druid/segment/TestHelper.java
@@ -21,7 +21,9 @@
 
 import com.fasterxml.jackson.databind.InjectableValues;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import io.druid.data.input.MapBasedRow;
 import io.druid.data.input.Row;
 import io.druid.jackson.DefaultObjectMapper;
@@ -315,12 +317,16 @@ private static void assertRow(String msg, Row expected, Row actual)
       final Object actualValue = actualMap.get(key);
 
       if (expectedValue instanceof Float || expectedValue instanceof Double) {
-        Assert.assertEquals(
-            StringUtils.format("%s: key[%s]", msg, key),
-            ((Number) expectedValue).doubleValue(),
-            ((Number) actualValue).doubleValue(),
-            Math.abs(((Number) expectedValue).doubleValue() * 1e-6)
-        );
+        if (expectedValue == null) {
+          Assert.assertNull(actualValue);
+        } else {
+          Assert.assertEquals(
+              StringUtils.format("%s: key[%s]", msg, key),
+              ((Number) expectedValue).doubleValue(),
+              ((Number) actualValue).doubleValue(),
+              Math.abs(((Number) expectedValue).doubleValue() * 1e-6)
+          );
+        }
       } else {
         Assert.assertEquals(
             StringUtils.format("%s: key[%s]", msg, key),
@@ -330,4 +336,16 @@ private static void assertRow(String msg, Row expected, Row actual)
       }
     }
   }
+
+
+  public static Map<String, Object> createExpectedMap(Object... vals)
+  {
+    Preconditions.checkArgument(vals.length % 2 == 0);
+
+    Map<String, Object> theVals = Maps.newHashMap();
+    for (int i = 0; i < vals.length; i += 2) {
+      theVals.put(vals[i].toString(), vals[i + 1]);
+    }
+    return theVals;
+  }
 }
diff --git a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java
index 0e6c7bb318e..da8de3682c8 100644
--- a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -88,6 +89,21 @@ public static void tearDown() throws Exception
 
   @Test
   public void testLexicographicMatchEverything()
+  {
+    final List<BoundDimFilter> filters = ImmutableList.of(
+        new BoundDimFilter("dim0", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+        new BoundDimFilter("dim1", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+        new BoundDimFilter("dim2", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+        new BoundDimFilter("dim3", null, "z", false, false, false, null, StringComparators.LEXICOGRAPHIC)
+    );
+
+    for (BoundDimFilter filter : filters) {
+      assertFilterMatches(filter, ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+    }
+  }
+
+  @Test
+  public void testLexicographicMatchWithEmptyString()
   {
     final List<BoundDimFilter> filters = ImmutableList.of(
         new BoundDimFilter("dim0", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
@@ -95,9 +111,15 @@ public void testLexicographicMatchEverything()
         new BoundDimFilter("dim2", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC),
         new BoundDimFilter("dim3", "", "z", false, false, false, null, StringComparators.LEXICOGRAPHIC)
     );
-
-    for (BoundDimFilter filter : filters) {
-      assertFilterMatches(filter, ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+    if (NullHandling.replaceWithDefault()) {
+      for (BoundDimFilter filter : filters) {
+        assertFilterMatches(filter, ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+      }
+    } else {
+      assertFilterMatches(filters.get(0), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+      assertFilterMatches(filters.get(1), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7"));
+      assertFilterMatches(filters.get(2), ImmutableList.of("0", "2", "3", "4", "6", "7"));
+      assertFilterMatches(filters.get(3), ImmutableList.of());
     }
   }
 
@@ -112,19 +134,49 @@ public void testLexicographicMatchNull()
         new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.of("0")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("1", "2", "5")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("1", "2", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("2")
+      );
+    }
   }
 
   @Test
   public void testLexicographicMatchMissingColumn()
   {
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.<String>of()
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.<String>of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    }
     assertFilterMatches(
         new BoundDimFilter("dim3", "", "", true, false, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.<String>of()
@@ -133,18 +185,11 @@ public void testLexicographicMatchMissingColumn()
         new BoundDimFilter("dim3", "", "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.<String>of()
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", null, false, true, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+
     assertFilterMatches(
         new BoundDimFilter("dim3", null, "", false, false, false, null, StringComparators.LEXICOGRAPHIC),
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", null, "", false, true, false, null, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.<String>of()
-    );
   }
 
 
@@ -229,14 +274,25 @@ public void testAlphaNumericMatchNull()
         new BoundDimFilter("dim1", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
         ImmutableList.of("0")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
-        ImmutableList.of("1", "2", "5")
-    );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of("1", "2", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, true, null, StringComparators.ALPHANUMERIC),
+          ImmutableList.of()
+      );
+    }
   }
 
   @Test
@@ -327,14 +383,26 @@ public void testNumericMatchNull()
         new BoundDimFilter("dim1", "", "", false, false, false, null, StringComparators.NUMERIC),
         ImmutableList.of("0")
     );
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.NUMERIC),
-        ImmutableList.of("1", "2", "5")
-    );
-    assertFilterMatches(
-        new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.NUMERIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of("1", "2", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim3", "", "", false, false, false, null, StringComparators.NUMERIC),
+          ImmutableList.of()
+      );
+    }
+
   }
 
   @Test
@@ -432,10 +500,17 @@ public void testMatchWithExtractionFn()
     String nullJsFn = "function(str) { return null; }";
     ExtractionFn makeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
 
-    assertFilterMatches(
-        new BoundDimFilter("dim0", "", "", false, false, false, makeNullFn, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter("dim0", "", "", false, false, false, makeNullFn, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter("dim0", "", "", false, false, false, makeNullFn, StringComparators.LEXICOGRAPHIC),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         new BoundDimFilter("dim1", "super-ab", "super-abd", true, true, false, superFn, StringComparators.LEXICOGRAPHIC),
@@ -452,10 +527,69 @@ public void testMatchWithExtractionFn()
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
     );
 
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "super-null", "super-null", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
-        ImmutableList.of("1", "2", "5")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.LEXICOGRAPHIC
+          ),
+          ImmutableList.of("1", "2", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.NUMERIC
+          ),
+          ImmutableList.of("1", "2", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.LEXICOGRAPHIC
+          ),
+          ImmutableList.of("1", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "super-", "super-", false, false, false, superFn, StringComparators.NUMERIC),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new BoundDimFilter(
+              "dim2",
+              "super-null",
+              "super-null",
+              false,
+              false,
+              false,
+              superFn,
+              StringComparators.LEXICOGRAPHIC
+          ),
+          ImmutableList.of("1", "5")
+      );
+      assertFilterMatches(
+          new BoundDimFilter("dim2", "super-", "super-", false, false, false, superFn, StringComparators.NUMERIC),
+          ImmutableList.of("2")
+      );
+    }
 
     assertFilterMatches(
         new BoundDimFilter("dim3", "super-null", "super-null", false, false, false, superFn, StringComparators.LEXICOGRAPHIC),
@@ -467,11 +601,6 @@ public void testMatchWithExtractionFn()
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
     );
 
-    assertFilterMatches(
-        new BoundDimFilter("dim2", "super-null", "super-null", false, false, false, superFn, StringComparators.NUMERIC),
-        ImmutableList.of("1", "2", "5")
-    );
-
     assertFilterMatches(
         new BoundDimFilter("dim4", "super-null", "super-null", false, false, false, superFn, StringComparators.NUMERIC),
         ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7")
diff --git a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java
index 1b320c6ef8f..c0d7043fe0f 100644
--- a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -123,14 +124,28 @@ public void testMissingColumnNotSpecifiedInDimensionList()
         DefaultDimensionSpec.of("dim6"),
         DefaultDimensionSpec.of("dim7")
     )), ImmutableList.<String>of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.<DimensionSpec>of(
-        DefaultDimensionSpec.of("dim1"),
-        DefaultDimensionSpec.of("dim6")
-    )), ImmutableList.<String>of("0"));
-    assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.<DimensionSpec>of(
-        DefaultDimensionSpec.of("dim2"),
-        DefaultDimensionSpec.of("dim6")
-    )), ImmutableList.<String>of("1", "2", "6", "7", "8"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.<DimensionSpec>of(
+          DefaultDimensionSpec.of("dim1"),
+          DefaultDimensionSpec.of("dim6")
+      )), ImmutableList.<String>of("0"));
+
+      assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.<DimensionSpec>of(
+          DefaultDimensionSpec.of("dim2"),
+          DefaultDimensionSpec.of("dim6")
+      )), ImmutableList.<String>of("1", "2", "6", "7", "8"));
+    } else {
+      assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.<DimensionSpec>of(
+          DefaultDimensionSpec.of("dim1"),
+          DefaultDimensionSpec.of("dim6")
+      )), ImmutableList.<String>of());
+
+      assertFilterMatches(new ColumnComparisonDimFilter(ImmutableList.<DimensionSpec>of(
+          DefaultDimensionSpec.of("dim2"),
+          DefaultDimensionSpec.of("dim6")
+      )), ImmutableList.<String>of("1", "6", "7", "8"));
+    }
+
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java
index 2e52b4c5097..c0323a8d9a2 100644
--- a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import com.google.common.collect.Sets;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
@@ -122,8 +123,14 @@ public void testOneSingleValuedStringColumn()
     assertFilterMatches(EDF("dim3 == 1.0"), ImmutableList.of("3", "4", "6"));
     assertFilterMatches(EDF("dim3 == 1.234"), ImmutableList.of("9"));
     assertFilterMatches(EDF("dim3 < '2'"), ImmutableList.of("0", "1", "3", "4", "6", "9"));
-    assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
-    assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
+      assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("0", "3", "4", "6", "7", "9"));
+    } else {
+      // Empty String and "a" will not match
+      assertFilterMatches(EDF("dim3 < 2"), ImmutableList.of("3", "4", "6", "9"));
+      assertFilterMatches(EDF("dim3 < 2.0"), ImmutableList.of("3", "4", "6", "9"));
+    }
     assertFilterMatches(EDF("like(dim3, '1%')"), ImmutableList.of("1", "3", "4", "6", "9"));
   }
 
@@ -132,8 +139,13 @@ public void testOneMultiValuedStringColumn()
   {
     // Expressions currently treat multi-valued arrays as nulls.
     // This test is just documenting the current behavior, not necessarily saying it makes sense.
-
-    assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("0", "1", "2", "4", "5", "6", "7", "8"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("0", "1", "2", "4", "5", "6", "7", "8"));
+    } else {
+      assertFilterMatches(EDF("dim4 == ''"), ImmutableList.of("2"));
+      // AS per SQL standard null == null returns false.
+      assertFilterMatches(EDF("dim4 == null"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("dim4 == '1'"), ImmutableList.of());
     assertFilterMatches(EDF("dim4 == '3'"), ImmutableList.of("3"));
   }
@@ -141,7 +153,12 @@ public void testOneMultiValuedStringColumn()
   @Test
   public void testOneLongColumn()
   {
-    assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of("0"));
+    } else {
+      // A long does not match empty string
+      assertFilterMatches(EDF("dim1 == ''"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("dim1 == '1'"), ImmutableList.of("1"));
     assertFilterMatches(EDF("dim1 == 2"), ImmutableList.of("2"));
     assertFilterMatches(EDF("dim1 < '2'"), ImmutableList.of("0", "1"));
@@ -153,7 +170,12 @@ public void testOneLongColumn()
   @Test
   public void testOneFloatColumn()
   {
-    assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of("0"));
+    } else {
+      // A float does not match empty string
+      assertFilterMatches(EDF("dim2 == ''"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("dim2 == '1'"), ImmutableList.of("1"));
     assertFilterMatches(EDF("dim2 == 2"), ImmutableList.of("2"));
     assertFilterMatches(EDF("dim2 < '2'"), ImmutableList.of("0", "1"));
@@ -175,11 +197,19 @@ public void testCompareColumns()
     // String vs string
     assertFilterMatches(EDF("dim0 == dim3"), ImmutableList.of("2", "5", "8"));
 
-    // String vs long
-    assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+    if (NullHandling.replaceWithDefault()) {
+      // String vs long
+      assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+
+      // String vs float
+      assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+    } else {
+      // String vs long
+      assertFilterMatches(EDF("dim1 == dim3"), ImmutableList.of("2", "5", "8"));
 
-    // String vs float
-    assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("0", "2", "5", "8"));
+      // String vs float
+      assertFilterMatches(EDF("dim2 == dim3"), ImmutableList.of("2", "5", "8"));
+    }
 
     // String vs. multi-value string
     // Expressions currently treat multi-valued arrays as nulls.
@@ -190,12 +220,25 @@ public void testCompareColumns()
   @Test
   public void testMissingColumn()
   {
-    assertFilterMatches(EDF("missing == ''"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(EDF("missing == ''"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    } else {
+      // AS per SQL standard null == null returns false.
+      assertFilterMatches(EDF("missing == null"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("missing == '1'"), ImmutableList.of());
     assertFilterMatches(EDF("missing == 2"), ImmutableList.of());
-    assertFilterMatches(EDF("missing < '2'"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(EDF("missing < 2"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      // missing equivaluent to 0
+      assertFilterMatches(EDF("missing < '2'"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+      assertFilterMatches(EDF("missing < 2"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+      assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    } else {
+      // missing equivalent to null
+      assertFilterMatches(EDF("missing < '2'"), ImmutableList.of());
+      assertFilterMatches(EDF("missing < 2"), ImmutableList.of());
+      assertFilterMatches(EDF("missing < 2.0"), ImmutableList.of());
+    }
     assertFilterMatches(EDF("missing > '2'"), ImmutableList.of());
     assertFilterMatches(EDF("missing > 2"), ImmutableList.of());
     assertFilterMatches(EDF("missing > 2.0"), ImmutableList.of());
diff --git a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java
index 5bd793a571b..cb44cf27aff 100644
--- a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java
@@ -21,9 +21,9 @@
 
 import com.google.common.base.Function;
 import com.google.common.base.Predicate;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -107,6 +107,7 @@ public NoBitmapSelectorDimFilter(
     {
       super(dimension, value, extractionFn);
     }
+
     @Override
     public Filter toFilter()
     {
@@ -116,7 +117,7 @@ public Filter toFilter()
       if (extractionFn == null) {
         return new NoBitmapSelectorFilter(dimension, value);
       } else {
-        final String valueOrNull = Strings.emptyToNull(value);
+        final String valueOrNull = NullHandling.emptyToNullIfNeeded(value);
         final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
         {
           @Override
@@ -199,7 +200,11 @@ public static void tearDown() throws Exception
   @Test
   public void testSinglePreFilterWithNulls()
   {
-    assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of());
+    }
     assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
     assertFilterMatches(new SelectorDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new SelectorDimFilter("dim1", "2", null), ImmutableList.of("2"));
@@ -212,7 +217,11 @@ public void testSinglePreFilterWithNulls()
   @Test
   public void testSinglePostFilterWithNulls()
   {
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", null, null), ImmutableList.of());
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "2", null), ImmutableList.of("2"));
@@ -221,8 +230,11 @@ public void testSinglePostFilterWithNulls()
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "abc", null), ImmutableList.of("5", "8"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "ab", null), ImmutableList.<String>of());
 
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0"));
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-", JS_EXTRACTION_FN), ImmutableList.of("0"));
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-10", JS_EXTRACTION_FN), ImmutableList.of("1"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN), ImmutableList.of("2"));
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN), ImmutableList.of("3", "9"));
@@ -234,13 +246,23 @@ public void testSinglePostFilterWithNulls()
   @Test
   public void testBasicPreAndPostFilterWithNulls()
   {
-    assertFilterMatches(
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new SelectorDimFilter("dim2", "a", null),
-            new NoBitmapSelectorDimFilter("dim1", null, null)
-        )),
-        ImmutableList.of("0")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim2", "a", null),
+              new NoBitmapSelectorDimFilter("dim1", null, null)
+          )),
+          ImmutableList.of("0")
+      );
+    } else {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim2", "a", null),
+              new NoBitmapSelectorDimFilter("dim1", null, null)
+          )),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         new AndDimFilter(Arrays.<DimFilter>asList(
@@ -274,14 +296,51 @@ public void testBasicPreAndPostFilterWithNulls()
         ImmutableList.<String>of()
     );
 
-
-    assertFilterMatches(
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
-            new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN)
-        )),
-        ImmutableList.of("0")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("0")
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("2")
+      );
+    } else {
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim1", "super-", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("0")
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim2", "super-a", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim1", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("2")
+      );
+      assertFilterMatches(
+          new AndDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         new AndDimFilter(Arrays.<DimFilter>asList(
@@ -291,14 +350,6 @@ public void testBasicPreAndPostFilterWithNulls()
         ImmutableList.of("1")
     );
 
-    assertFilterMatches(
-        new AndDimFilter(Arrays.<DimFilter>asList(
-            new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
-            new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
-        )),
-        ImmutableList.of("2")
-    );
-
     assertFilterMatches(
         new AndDimFilter(Arrays.<DimFilter>asList(
             new SelectorDimFilter("dim1", "super-1", JS_EXTRACTION_FN),
@@ -327,13 +378,23 @@ public void testOrPostFilterWithNulls()
         ImmutableList.of("0", "3")
     );
 
-    assertFilterMatches(
-        new OrDimFilter(Arrays.<DimFilter>asList(
-            new SelectorDimFilter("dim1", "abc", null),
-            new NoBitmapSelectorDimFilter("dim2", null, null)
-        )),
-        ImmutableList.of("1", "2", "5", "8")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "abc", null),
+              new NoBitmapSelectorDimFilter("dim2", null, null)
+          )),
+          ImmutableList.of("1", "2", "5", "8")
+      );
+    } else {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "abc", null),
+              new NoBitmapSelectorDimFilter("dim2", null, null)
+          )),
+          ImmutableList.of("1", "5", "8")
+      );
+    }
 
     assertFilterMatches(
         new OrDimFilter(Arrays.<DimFilter>asList(
@@ -382,13 +443,32 @@ public void testOrPostFilterWithNulls()
         )),
         ImmutableList.of("0", "3")
     );
-    assertFilterMatches(
-        new OrDimFilter(Arrays.<DimFilter>asList(
-            new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
-            new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
-        )),
-        ImmutableList.of("1", "2", "5", "8")
-    );
+
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("1", "2", "5", "8")
+      );
+    } else {
+      assertFilterMatches(
+          new OrDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-null", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("1", "5", "8")
+      );
+      assertFilterMatches(
+          new OrDimFilter(Arrays.<DimFilter>asList(
+              new SelectorDimFilter("dim1", "super-abc", JS_EXTRACTION_FN),
+              new NoBitmapSelectorDimFilter("dim2", "super-", JS_EXTRACTION_FN)
+          )),
+          ImmutableList.of("2", "5", "8")
+      );
+    }
+
     assertFilterMatches(
         new OrDimFilter(Arrays.<DimFilter>asList(
             new SelectorDimFilter("dim1", "super-2", JS_EXTRACTION_FN),
@@ -432,7 +512,14 @@ public void testOrPostFilterWithNulls()
   public void testMissingColumnSpecifiedInDimensionList()
   {
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new NoBitmapSelectorDimFilter("dim3", "", null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
+      );
+    } else {
+      assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "", null), ImmutableList.of());
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "a", null), ImmutableList.<String>of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "b", null), ImmutableList.<String>of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim3", "c", null), ImmutableList.<String>of());
@@ -482,7 +569,17 @@ public void testMissingColumnSpecifiedInDimensionList()
   public void testMissingColumnNotSpecifiedInDimensionList()
   {
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
-    assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new NoBitmapSelectorDimFilter("dim4", "", null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
+      );
+    } else {
+      assertFilterMatches(
+          new NoBitmapSelectorDimFilter("dim4", "", null),
+          ImmutableList.of()
+      );
+    }
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "a", null), ImmutableList.<String>of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "b", null), ImmutableList.<String>of());
     assertFilterMatches(new NoBitmapSelectorDimFilter("dim4", "c", null), ImmutableList.<String>of());
diff --git a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java
index d7e1a14293e..58c98db4802 100644
--- a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -125,10 +126,17 @@ public void testSingleValueStringColumnWithNulls()
         ImmutableList.of("a")
     );
 
-    assertFilterMatches(
-        toInFilter("dim1", null, "10", "abc"),
-        ImmutableList.of("a", "b", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilter("dim1", null, "10", "abc"),
+          ImmutableList.of("a", "b", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilter("dim1", null, "10", "abc"),
+          ImmutableList.of("b", "f")
+      );
+    }
 
     assertFilterMatches(
         toInFilter("dim1", "-1", "ab", "de"),
@@ -139,28 +147,47 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(
-        toInFilter("dim2", null),
-        ImmutableList.of("b", "c", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilter("dim2", null),
+          ImmutableList.of("b", "c", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "a"),
+          ImmutableList.of("a", "b", "c", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "b"),
+          ImmutableList.of("a", "b", "c", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", ""),
+          ImmutableList.of("b", "c", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilter("dim2", null),
+          ImmutableList.of("b", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "a"),
+          ImmutableList.of("a", "b", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", null, "b"),
+          ImmutableList.of("a", "b", "f")
+      );
+      assertFilterMatches(
+          toInFilter("dim2", ""),
+          ImmutableList.of("c")
+      );
+    }
 
     assertFilterMatches(
         toInFilter("dim2", "", (String) null),
         ImmutableList.of("b", "c", "f")
     );
 
-    assertFilterMatches(
-        toInFilter("dim2", null, "a"),
-        ImmutableList.of("a", "b", "c", "d", "f")
-
-    );
-
-    assertFilterMatches(
-        toInFilter("dim2", null, "b"),
-        ImmutableList.of("a", "b", "c", "f")
-
-    );
-
     assertFilterMatches(
         toInFilter("dim2", "c"),
         ImmutableList.of("e")
@@ -180,10 +207,17 @@ public void testMissingColumn()
         ImmutableList.of("a", "b", "c", "d", "e", "f")
     );
 
-    assertFilterMatches(
-        toInFilter("dim3", ""),
-        ImmutableList.of("a", "b", "c", "d", "e", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilter("dim3", ""),
+          ImmutableList.of("a", "b", "c", "d", "e", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilter("dim3", ""),
+          ImmutableList.of()
+      );
+    }
 
     assertFilterMatches(
         toInFilter("dim3", null, "a"),
@@ -215,20 +249,43 @@ public void testMatchWithExtractionFn()
     String nullJsFn = "function(str) { if (str === null) { return 'YES'; } else { return 'NO';} }";
     ExtractionFn yesNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
 
-    assertFilterMatches(
-        toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"),
-        ImmutableList.of("a", "b", "c", "d", "f")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"),
+          ImmutableList.of("a", "b", "c", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"),
+          ImmutableList.of("a", "b", "e")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim2", yesNullFn, "YES"),
+          ImmutableList.of("b", "c", "f")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim1", yesNullFn, "NO"),
+          ImmutableList.of("b", "c", "d", "e", "f")
+      );
+    } else {
+      assertFilterMatches(
+          toInFilterWithFn("dim2", superFn, "super-null", "super-a", "super-b"),
+          ImmutableList.of("a", "b", "d", "f")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"),
+          ImmutableList.of("b", "e")
+      );
+      assertFilterMatches(
+          toInFilterWithFn("dim2", yesNullFn, "YES"),
+          ImmutableList.of("b", "f")
+      );
+
+      assertFilterMatches(
+          toInFilterWithFn("dim1", yesNullFn, "NO"),
+          ImmutableList.of("a", "b", "c", "d", "e", "f")
+      );
+    }
 
-    assertFilterMatches(
-        toInFilterWithFn("dim2", yesNullFn, "YES"),
-        ImmutableList.of("b", "c", "f")
-    );
-
-    assertFilterMatches(
-        toInFilterWithFn("dim1", superFn, "super-null", "super-10", "super-def"),
-        ImmutableList.of("a", "b", "e")
-    );
 
     assertFilterMatches(
         toInFilterWithFn("dim3", yesNullFn, "NO"),
@@ -240,10 +297,6 @@ public void testMatchWithExtractionFn()
         ImmutableList.of("a", "b", "c", "d", "e", "f")
     );
 
-    assertFilterMatches(
-        toInFilterWithFn("dim1", yesNullFn, "NO"),
-        ImmutableList.of("b", "c", "d", "e", "f")
-    );
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java
index 760ae0f7ad5..3127f8dee02 100644
--- a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -109,7 +110,12 @@ public void testSingleValueStringColumnWithoutNulls()
   @Test
   public void testSingleValueStringColumnWithNulls()
   {
-    assertFilterMatches(newJavaScriptDimFilter("dim1", jsNullFilter, null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(newJavaScriptDimFilter("dim1", jsNullFilter, null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(newJavaScriptDimFilter("dim1", jsNullFilter, null), ImmutableList.of());
+      assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter(""), null), ImmutableList.of("0"));
+    }
     assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("10"), null), ImmutableList.of("1"));
     assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("2"), null), ImmutableList.of("2"));
     assertFilterMatches(newJavaScriptDimFilter("dim1", jsValueFilter("1"), null), ImmutableList.of("3"));
@@ -122,7 +128,12 @@ public void testSingleValueStringColumnWithNulls()
   public void testMultiValueStringColumn()
   {
     // multi-val null......
-    assertFilterMatches(newJavaScriptDimFilter("dim2", jsNullFilter, null), ImmutableList.of("1", "2", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(newJavaScriptDimFilter("dim2", jsNullFilter, null), ImmutableList.of("1", "2", "5"));
+    } else {
+      assertFilterMatches(newJavaScriptDimFilter("dim2", jsNullFilter, null), ImmutableList.of("1", "5"));
+      assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter(""), null), ImmutableList.of("2"));
+    }
     assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("a"), null), ImmutableList.of("0", "3"));
     assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("b"), null), ImmutableList.of("0"));
     assertFilterMatches(newJavaScriptDimFilter("dim2", jsValueFilter("c"), null), ImmutableList.of("4"));
diff --git a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java
index b9c127a81bc..d97acb6105f 100644
--- a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -156,10 +157,17 @@ public void testMatchEmptyString()
   @Test
   public void testMatchEmptyStringWithExtractionFn()
   {
-    assertFilterMatches(
-        new LikeDimFilter("dim1", "", null, new SubstringDimExtractionFn(100, 1)),
-        ImmutableList.of("0", "1", "2", "3", "4", "5")
-    );
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new LikeDimFilter("dim1", "", null, new SubstringDimExtractionFn(100, 1)),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new LikeDimFilter("dim1", "", null, new SubstringDimExtractionFn(100, 1)),
+          ImmutableList.of()
+      );
+    }
   }
 
   @Test
diff --git a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java
index 732cbbcb8f7..7dcc4c70647 100644
--- a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -99,7 +100,11 @@ public void testSingleValueStringColumnWithoutNulls()
   public void testSingleValueStringColumnWithNulls()
   {
     // RegexFilter always returns false for null row values.
-    assertFilterMatches(new RegexDimFilter("dim1", ".*", null), ImmutableList.of("1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*", null), ImmutableList.of("1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    }
     assertFilterMatches(new RegexDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new RegexDimFilter("dim1", "2", null), ImmutableList.of("2"));
     assertFilterMatches(new RegexDimFilter("dim1", "1", null), ImmutableList.of("1", "3"));
@@ -111,7 +116,11 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(new RegexDimFilter("dim2", ".*", null), ImmutableList.of("0", "3", "4"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new RegexDimFilter("dim2", ".*", null), ImmutableList.of("0", "3", "4"));
+    } else {
+      assertFilterMatches(new RegexDimFilter("dim2", ".*", null), ImmutableList.of("0", "2", "3", "4"));
+    }
     assertFilterMatches(new RegexDimFilter("dim2", "a", null), ImmutableList.of("0", "3"));
     assertFilterMatches(new RegexDimFilter("dim2", "b", null), ImmutableList.of("0"));
     assertFilterMatches(new RegexDimFilter("dim2", "c", null), ImmutableList.of("4"));
@@ -141,11 +150,16 @@ public void testRegexWithExtractionFn()
   {
     String nullJsFn = "function(str) { if (str === null) { return 'NOT_NULL_ANYMORE'; } else { return str;} }";
     ExtractionFn changeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
-
-    assertFilterMatches(new RegexDimFilter("dim1", ".*ANYMORE", changeNullFn), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*ANYMORE", changeNullFn), ImmutableList.of("0"));
+      assertFilterMatches(new RegexDimFilter("dim2", ".*ANYMORE", changeNullFn), ImmutableList.of("1", "2", "5"));
+    } else {
+      assertFilterMatches(new RegexDimFilter("dim1", ".*ANYMORE", changeNullFn), ImmutableList.of());
+      assertFilterMatches(new RegexDimFilter("dim2", ".*ANYMORE", changeNullFn), ImmutableList.of("1", "5"));
+    }
     assertFilterMatches(new RegexDimFilter("dim1", "ab.*", changeNullFn), ImmutableList.<String>of("4", "5"));
 
-    assertFilterMatches(new RegexDimFilter("dim2", ".*ANYMORE", changeNullFn), ImmutableList.of("1", "2", "5"));
+
     assertFilterMatches(new RegexDimFilter("dim2", "a.*", changeNullFn), ImmutableList.of("0", "3"));
 
     assertFilterMatches(new RegexDimFilter("dim3", ".*ANYMORE", changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5"));
diff --git a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java
index b1cf8293457..cc714c0d5fc 100644
--- a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -105,8 +106,18 @@ public void testSingleValueStringColumnWithoutNulls()
   @Test
   public void testSingleValueStringColumnWithNulls()
   {
-    // SearchQueryFilter always returns false for null row values.
-    assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue(""), null), ImmutableList.of("1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      // SearchQueryFilter always returns false for null row values.
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue(""), null),
+          ImmutableList.of("1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue(""), null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    }
     assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("10"), null), ImmutableList.of("1"));
     assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("2"), null), ImmutableList.of("2"));
     assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("1"), null), ImmutableList.of("1", "3"));
@@ -118,7 +129,14 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue(""), null), ImmutableList.of("0", "3", "4"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue(""), null), ImmutableList.of("0", "3", "4"));
+    } else {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim2", specForValue(""), null),
+          ImmutableList.of("0", "2", "3", "4")
+      );
+    }
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("a"), null), ImmutableList.of("0", "3"));
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("b"), null), ImmutableList.of("0"));
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("c"), null), ImmutableList.of("4"));
@@ -151,10 +169,31 @@ public void testSearchQueryWithExtractionFn()
     String nullJsFn = "function(str) { if (str === null) { return 'NOT_NULL_ANYMORE'; } else { return str;} }";
     ExtractionFn changeNullFn = new JavaScriptExtractionFn(nullJsFn, false, JavaScriptConfig.getEnabledInstance());
 
-    assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("0"));
-    assertFilterMatches(new SearchQueryDimFilter("dim1", specForValue("ab"), changeNullFn), ImmutableList.<String>of("4", "5"));
-
-    assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("1", "2", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of("0")
+      );
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim2", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of("1", "2", "5")
+      );
+
+    } else {
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim1", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of()
+      );
+      assertFilterMatches(
+          new SearchQueryDimFilter("dim2", specForValue("ANYMORE"), changeNullFn),
+          ImmutableList.of("1", "5")
+      );
+    }
+
+    assertFilterMatches(
+        new SearchQueryDimFilter("dim1", specForValue("ab"), changeNullFn),
+        ImmutableList.<String>of("4", "5")
+    );
     assertFilterMatches(new SearchQueryDimFilter("dim2", specForValue("a"), changeNullFn), ImmutableList.of("0", "3"));
 
     assertFilterMatches(new SearchQueryDimFilter("dim3", specForValue("ANYMORE"), changeNullFn), ImmutableList.of("0", "1", "2", "3", "4", "5"));
diff --git a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java
index b37009f578e..6244a27c1c1 100644
--- a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java
+++ b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java
@@ -22,6 +22,7 @@
 import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -124,8 +125,13 @@ public void testSingleValueStringColumnWithoutNulls()
   @Test
   public void testSingleValueStringColumnWithNulls()
   {
-    assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
-    assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of("0"));
+      assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim1", null, null), ImmutableList.of());
+      assertFilterMatches(new SelectorDimFilter("dim1", "", null), ImmutableList.of("0"));
+    }
     assertFilterMatches(new SelectorDimFilter("dim1", "10", null), ImmutableList.of("1"));
     assertFilterMatches(new SelectorDimFilter("dim1", "2", null), ImmutableList.of("2"));
     assertFilterMatches(new SelectorDimFilter("dim1", "1", null), ImmutableList.of("3"));
@@ -137,8 +143,13 @@ public void testSingleValueStringColumnWithNulls()
   @Test
   public void testMultiValueStringColumn()
   {
-    assertFilterMatches(new SelectorDimFilter("dim2", null, null), ImmutableList.of("1", "2", "5"));
-    assertFilterMatches(new SelectorDimFilter("dim2", "", null), ImmutableList.of("1", "2", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim2", null, null), ImmutableList.of("1", "2", "5"));
+      assertFilterMatches(new SelectorDimFilter("dim2", "", null), ImmutableList.of("1", "2", "5"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim2", null, null), ImmutableList.of("1", "5"));
+      assertFilterMatches(new SelectorDimFilter("dim2", "", null), ImmutableList.of("2"));
+    }
     assertFilterMatches(new SelectorDimFilter("dim2", "a", null), ImmutableList.of("0", "3"));
     assertFilterMatches(new SelectorDimFilter("dim2", "b", null), ImmutableList.of("0"));
     assertFilterMatches(new SelectorDimFilter("dim2", "c", null), ImmutableList.of("4"));
@@ -149,7 +160,11 @@ public void testMultiValueStringColumn()
   public void testMissingColumnSpecifiedInDimensionList()
   {
     assertFilterMatches(new SelectorDimFilter("dim3", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
-    assertFilterMatches(new SelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim3", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim3", "", null), ImmutableList.of());
+    }
     assertFilterMatches(new SelectorDimFilter("dim3", "a", null), ImmutableList.<String>of());
     assertFilterMatches(new SelectorDimFilter("dim3", "b", null), ImmutableList.<String>of());
     assertFilterMatches(new SelectorDimFilter("dim3", "c", null), ImmutableList.<String>of());
@@ -159,7 +174,11 @@ public void testMissingColumnSpecifiedInDimensionList()
   public void testMissingColumnNotSpecifiedInDimensionList()
   {
     assertFilterMatches(new SelectorDimFilter("dim4", null, null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
-    assertFilterMatches(new SelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(new SelectorDimFilter("dim4", "", null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(new SelectorDimFilter("dim4", "", null), ImmutableList.of());
+    }
     assertFilterMatches(new SelectorDimFilter("dim4", "a", null), ImmutableList.<String>of());
     assertFilterMatches(new SelectorDimFilter("dim4", "b", null), ImmutableList.<String>of());
     assertFilterMatches(new SelectorDimFilter("dim4", "c", null), ImmutableList.<String>of());
@@ -211,7 +230,23 @@ public void testSelectorWithLookupExtractionFn()
     );
     LookupExtractor mapExtractor3 = new MapLookupExtractor(stringMap3, false);
     LookupExtractionFn lookupFn3 = new LookupExtractionFn(mapExtractor3, false, null, false, true);
-    assertFilterMatches(new SelectorDimFilter("dim0", null, lookupFn3), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      // Nulls and empty strings are considered equivalent
+      assertFilterMatches(
+          new SelectorDimFilter("dim0", null, lookupFn3),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new SelectorDimFilter("dim0", null, lookupFn3),
+          ImmutableList.of("0", "2", "3", "4", "5")
+      );
+      assertFilterMatches(
+          new SelectorDimFilter("dim0", "", lookupFn3),
+          ImmutableList.of("1")
+      );
+    }
+
 
     final Map<String, String> stringMap4 = ImmutableMap.of(
         "9", "4"
@@ -252,7 +287,12 @@ public void testSelectorWithLookupExtractionFn()
 
     assertFilterMatches(optFilter1, ImmutableList.of("0", "1", "2", "5"));
     assertFilterMatches(optFilter2, ImmutableList.of("2", "5"));
-    assertFilterMatches(optFilter3, ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      // Null and Empty strings are same
+      assertFilterMatches(optFilter3, ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    } else {
+      assertFilterMatches(optFilter3, ImmutableList.of("0", "2", "3", "4", "5"));
+    }
     assertFilterMatches(optFilter4, ImmutableList.of("5"));
     assertFilterMatches(optFilter5, ImmutableList.<String>of());
     assertFilterMatches(optFilter6, ImmutableList.of("5"));
@@ -261,6 +301,20 @@ public void testSelectorWithLookupExtractionFn()
     // remove these when ExtractionDimFilter is removed.
     assertFilterMatches(new ExtractionDimFilter("dim1", "UNKNOWN", lookupFn, null), ImmutableList.of("0", "1", "2", "5"));
     assertFilterMatches(new ExtractionDimFilter("dim0", "5", lookupFn2, null), ImmutableList.of("2", "5"));
-    assertFilterMatches(new ExtractionDimFilter("dim0", null, lookupFn3, null), ImmutableList.of("0", "1", "2", "3", "4", "5"));
+    if (NullHandling.replaceWithDefault()) {
+      assertFilterMatches(
+          new ExtractionDimFilter("dim0", null, lookupFn3, null),
+          ImmutableList.of("0", "1", "2", "3", "4", "5")
+      );
+    } else {
+      assertFilterMatches(
+          new ExtractionDimFilter("dim0", null, lookupFn3, null),
+          ImmutableList.of("0", "2", "3", "4", "5")
+      );
+      assertFilterMatches(
+          new ExtractionDimFilter("dim0", "", lookupFn3, null),
+          ImmutableList.of("1")
+      );
+    }
   }
 }
diff --git a/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java b/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java
index 55ce4d17be3..10c075333b2 100644
--- a/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java
+++ b/processing/src/test/java/io/druid/segment/virtual/ExpressionVirtualColumnTest.java
@@ -134,7 +134,11 @@ public void testLongSelector()
     final BaseLongColumnValueSelector selector = XPLUSY.makeColumnValueSelector("expr", COLUMN_SELECTOR_FACTORY);
 
     CURRENT_ROW.set(ROW0);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW1);
     if (NullHandling.replaceWithDefault()) {
@@ -157,7 +161,11 @@ public void testLongSelectorUsingStringFunction()
     final BaseLongColumnValueSelector selector = ZCONCATX.makeColumnValueSelector("expr", COLUMN_SELECTOR_FACTORY);
 
     CURRENT_ROW.set(ROW0);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW1);
     if (NullHandling.replaceWithDefault()) {
@@ -168,10 +176,18 @@ public void testLongSelectorUsingStringFunction()
     }
 
     CURRENT_ROW.set(ROW2);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW3);
-    Assert.assertEquals(0L, selector.getLong());
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0L, selector.getLong());
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
   }
 
   @Test
@@ -180,7 +196,11 @@ public void testFloatSelector()
     final BaseFloatColumnValueSelector selector = XPLUSY.makeColumnValueSelector("expr", COLUMN_SELECTOR_FACTORY);
 
     CURRENT_ROW.set(ROW0);
-    Assert.assertEquals(0.0f, selector.getFloat(), 0.0f);
+    if (NullHandling.replaceWithDefault()) {
+      Assert.assertEquals(0.0f, selector.getFloat(), 0.0f);
+    } else {
+      Assert.assertTrue(selector.isNull());
+    }
 
     CURRENT_ROW.set(ROW1);
     if (NullHandling.replaceWithDefault()) {
diff --git a/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java b/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java
index 97448a55cb1..a1667cb4b58 100644
--- a/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java
+++ b/server/src/main/java/io/druid/query/dimension/LookupDimensionSpec.java
@@ -24,6 +24,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.StringUtils;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.filter.DimFilterUtils;
@@ -53,6 +54,7 @@
   private final boolean retainMissingValue;
 
   @JsonProperty
+  @Nullable
   private final String replaceMissingValueWith;
 
   @JsonProperty
@@ -77,7 +79,7 @@ public LookupDimensionSpec(
   {
     this.retainMissingValue = retainMissingValue;
     this.optimize = optimize == null ? true : optimize;
-    this.replaceMissingValueWith = Strings.emptyToNull(replaceMissingValueWith);
+    this.replaceMissingValueWith = NullHandling.emptyToNullIfNeeded(replaceMissingValueWith);
     this.dimension = Preconditions.checkNotNull(dimension, "dimension can not be Null");
     this.outputName = Preconditions.checkNotNull(outputName, "outputName can not be Null");
     this.lookupReferencesManager = lookupReferencesManager;
@@ -166,13 +168,13 @@ public boolean mustDecorate()
   @Override
   public byte[] getCacheKey()
   {
+
     byte[] dimensionBytes = StringUtils.toUtf8(dimension);
     byte[] dimExtractionFnBytes = Strings.isNullOrEmpty(name)
                                   ? getLookup().getCacheKey()
                                   : StringUtils.toUtf8(name);
     byte[] outputNameBytes = StringUtils.toUtf8(outputName);
-    byte[] replaceWithBytes = StringUtils.toUtf8(Strings.nullToEmpty(replaceMissingValueWith));
-
+    byte[] replaceWithBytes = StringUtils.toUtf8(StringUtils.nullToEmptyNonDruidDataString(replaceMissingValueWith));
 
     return ByteBuffer.allocate(6
                                + dimensionBytes.length
diff --git a/server/src/main/java/io/druid/query/expression/LookupExprMacro.java b/server/src/main/java/io/druid/query/expression/LookupExprMacro.java
index 77dc88c184b..e24ad3ff8c7 100644
--- a/server/src/main/java/io/druid/query/expression/LookupExprMacro.java
+++ b/server/src/main/java/io/druid/query/expression/LookupExprMacro.java
@@ -19,8 +19,8 @@
 
 package io.druid.query.expression;
 
-import com.google.common.base.Strings;
 import com.google.inject.Inject;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.IAE;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.ExprEval;
@@ -77,7 +77,7 @@ public Expr apply(final List<Expr> args)
       @Override
       public ExprEval eval(final ObjectBinding bindings)
       {
-        return ExprEval.of(extractionFn.apply(Strings.emptyToNull(arg.eval(bindings).asString())));
+        return ExprEval.of(extractionFn.apply(NullHandling.emptyToNullIfNeeded(arg.eval(bindings).asString())));
       }
 
       @Override
diff --git a/server/src/main/java/io/druid/query/lookup/LookupModule.java b/server/src/main/java/io/druid/query/lookup/LookupModule.java
index ee869c342f9..93195f371ac 100644
--- a/server/src/main/java/io/druid/query/lookup/LookupModule.java
+++ b/server/src/main/java/io/druid/query/lookup/LookupModule.java
@@ -28,7 +28,6 @@
 import com.fasterxml.jackson.databind.jsontype.NamedType;
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.inject.Binder;
@@ -48,6 +47,7 @@
 import io.druid.guice.annotations.Self;
 import io.druid.guice.annotations.Smile;
 import io.druid.initialization.DruidModule;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
 import io.druid.query.dimension.LookupDimensionSpec;
 import io.druid.query.expression.LookupExprMacro;
@@ -259,8 +259,9 @@ public String getLookupTier()
         "Cannot specify both `lookupTier` and `lookupTierIsDatasource`"
     );
     final String lookupTier = lookupTierIsDatasource ? dataSourceTaskIdHolder.getDataSource() : this.lookupTier;
+
     return Preconditions.checkNotNull(
-        lookupTier == null ? DEFAULT_TIER : Strings.emptyToNull(lookupTier),
+        lookupTier == null ? DEFAULT_TIER : StringUtils.emptyToNullNonDruidDataString(lookupTier),
         "Cannot have empty lookup tier from %s",
         lookupTierIsDatasource ? "bound value" : LookupModule.PROPERTY_BASE
     );
diff --git a/server/src/main/java/io/druid/server/QueryLifecycle.java b/server/src/main/java/io/druid/server/QueryLifecycle.java
index 9639aa860bc..3cf0901f24b 100644
--- a/server/src/main/java/io/druid/server/QueryLifecycle.java
+++ b/server/src/main/java/io/druid/server/QueryLifecycle.java
@@ -25,6 +25,7 @@
 import io.druid.client.DirectDruidClient;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.ISE;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.guava.Sequence;
 import io.druid.java.util.common.guava.SequenceWrapper;
 import io.druid.java.util.common.guava.Sequences;
@@ -285,11 +286,12 @@ public void emitLogsAndMetrics(
 
     try {
       final long queryTimeNs = System.nanoTime() - startNs;
+
       QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics(
           queryMetricsFactory,
           toolChest,
           baseQuery,
-          Strings.nullToEmpty(remoteAddress)
+          StringUtils.nullToEmptyNonDruidDataString(remoteAddress)
       );
       queryMetrics.success(success);
       queryMetrics.reportQueryTime(queryTimeNs);
@@ -323,11 +325,10 @@ public void emitLogsAndMetrics(
           statsMap.put("reason", e.toString());
         }
       }
-
       requestLogger.log(
           new RequestLogLine(
               DateTimes.utc(startMs),
-              Strings.nullToEmpty(remoteAddress),
+              StringUtils.nullToEmptyNonDruidDataString(remoteAddress),
               baseQuery,
               new QueryStats(statsMap)
           )
diff --git a/server/src/main/java/io/druid/server/emitter/EmitterModule.java b/server/src/main/java/io/druid/server/emitter/EmitterModule.java
index 7df0137b59c..9c3641bad97 100644
--- a/server/src/main/java/io/druid/server/emitter/EmitterModule.java
+++ b/server/src/main/java/io/druid/server/emitter/EmitterModule.java
@@ -19,7 +19,6 @@
 
 package io.druid.server.emitter;
 
-import com.google.common.base.Strings;
 import com.google.common.base.Supplier;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
@@ -34,14 +33,15 @@
 import com.google.inject.multibindings.MapBinder;
 import com.google.inject.name.Named;
 import com.google.inject.name.Names;
-import io.druid.java.util.emitter.EmittingLogger;
-import io.druid.java.util.emitter.core.Emitter;
-import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.guice.LazySingleton;
 import io.druid.guice.ManageLifecycle;
 import io.druid.guice.annotations.Self;
 import io.druid.java.util.common.ISE;
+import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.logger.Logger;
+import io.druid.java.util.emitter.EmittingLogger;
+import io.druid.java.util.emitter.core.Emitter;
+import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.server.DruidNode;
 
 import java.lang.annotation.Annotation;
@@ -88,7 +88,7 @@ public void configure(Binder binder)
     String version = getClass().getPackage().getImplementationVersion();
     extraServiceDimensions
         .addBinding("version")
-        .toInstance(Strings.nullToEmpty(version)); // Version is null during `mvn test`.
+        .toInstance(StringUtils.nullToEmptyNonDruidDataString(version)); // Version is null during `mvn test`.
   }
 
   @Provides
diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java b/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java
index 35288b6bc3f..535b5032ee6 100644
--- a/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java
+++ b/server/src/main/java/io/druid/server/listener/announcer/ListeningAnnouncerConfig.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.annotation.JacksonInject;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.inject.Inject;
+import io.druid.java.util.common.StringUtils;
 import io.druid.server.initialization.ZkPathsConfig;
 import org.apache.curator.utils.ZKPaths;
 
@@ -93,7 +93,7 @@ public String getAnnouncementPath(String listenerName)
   {
     return ZKPaths.makePath(
         getListenersPath(), Preconditions.checkNotNull(
-            Strings.emptyToNull(listenerName), "Listener name cannot be null"
+            StringUtils.emptyToNullNonDruidDataString(listenerName), "Listener name cannot be null"
         )
     );
   }
diff --git a/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java b/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java
index 02c44c29725..2ee0a19c2d9 100644
--- a/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java
+++ b/server/src/test/java/io/druid/query/dimension/LookupDimensionSpecTest.java
@@ -22,8 +22,8 @@
 import com.fasterxml.jackson.databind.InjectableValues;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.jsontype.NamedType;
-import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.extraction.MapLookupExtractor;
@@ -31,6 +31,7 @@
 import io.druid.query.lookup.LookupExtractorFactoryContainer;
 import io.druid.query.lookup.LookupReferencesManager;
 import io.druid.query.lookup.MapLookupExtractorFactory;
+import io.druid.segment.TestHelper;
 import junitparams.JUnitParamsRunner;
 import junitparams.Parameters;
 import org.easymock.EasyMock;
@@ -127,11 +128,11 @@ public void testGetOutputName()
         },
         new Object[]{
             new LookupDimensionSpec("dimName", "outputName", MAP_LOOKUP_EXTRACTOR, false, null, null, null, true),
-            ImmutableMap.of("not there", "")
+            TestHelper.createExpectedMap("not there", null)
         },
         new Object[]{
             new LookupDimensionSpec("dimName", "outputName", null, false, null, "lookupName", LOOKUP_REF_MANAGER, true),
-            ImmutableMap.of("not there", "")
+            TestHelper.createExpectedMap("not there", null)
         },
         new Object[]{
             new LookupDimensionSpec("dimName", "outputName", MAP_LOOKUP_EXTRACTOR, false, "Missing_value", null, null,
@@ -162,7 +163,10 @@ public void testGetOutputName()
   public void testApply(DimensionSpec dimensionSpec, Map<String, String> map)
   {
     for (Map.Entry<String, String> entry : map.entrySet()) {
-      Assert.assertEquals(Strings.emptyToNull(entry.getValue()), dimensionSpec.getExtractionFn().apply(entry.getKey()));
+      Assert.assertEquals(
+          NullHandling.emptyToNullIfNeeded(entry.getValue()),
+          dimensionSpec.getExtractionFn().apply(entry.getKey())
+      );
     }
   }
 
diff --git a/server/src/test/java/io/druid/query/expression/ExprMacroTest.java b/server/src/test/java/io/druid/query/expression/ExprMacroTest.java
index 5dc3135ed96..be1bf5464bd 100644
--- a/server/src/test/java/io/druid/query/expression/ExprMacroTest.java
+++ b/server/src/test/java/io/druid/query/expression/ExprMacroTest.java
@@ -20,6 +20,7 @@
 package io.druid.query.expression;
 
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.math.expr.Expr;
 import io.druid.math.expr.Parser;
@@ -84,8 +85,8 @@ public void testRegexpExtract()
   public void testTimestampCeil()
   {
     assertExpr("timestamp_ceil(t, 'P1M')", DateTimes.of("2000-03-01").getMillis());
-    assertExpr("timestamp_ceil(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-03T08").getMillis());
-    assertExpr("timestamp_ceil(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-03T08").getMillis());
+    assertExpr("timestamp_ceil(t, 'P1D',null,'America/Los_Angeles')", DateTimes.of("2000-02-03T08").getMillis());
+    assertExpr("timestamp_ceil(t, 'P1D',null,CityOfAngels)", DateTimes.of("2000-02-03T08").getMillis());
     assertExpr("timestamp_ceil(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-04T01").getMillis());
   }
 
@@ -93,8 +94,8 @@ public void testTimestampCeil()
   public void testTimestampFloor()
   {
     assertExpr("timestamp_floor(t, 'P1M')", DateTimes.of("2000-02-01").getMillis());
-    assertExpr("timestamp_floor(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-02T08").getMillis());
-    assertExpr("timestamp_floor(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-02T08").getMillis());
+    assertExpr("timestamp_floor(t, 'P1D',null,'America/Los_Angeles')", DateTimes.of("2000-02-02T08").getMillis());
+    assertExpr("timestamp_floor(t, 'P1D',null,CityOfAngels)", DateTimes.of("2000-02-02T08").getMillis());
     assertExpr("timestamp_floor(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-03T01").getMillis());
   }
 
@@ -122,12 +123,12 @@ public void testTimestampParse()
     assertExpr("timestamp_parse(tstr)", DateTimes.of("2000-02-03T04:05:06").getMillis());
     assertExpr("timestamp_parse(tstr_sql)", DateTimes.of("2000-02-03T04:05:06").getMillis());
     assertExpr(
-        "timestamp_parse(tstr_sql,'','America/Los_Angeles')",
+        "timestamp_parse(tstr_sql,null,'America/Los_Angeles')",
         DateTimes.of("2000-02-03T04:05:06-08:00").getMillis()
     );
     assertExpr("timestamp_parse('2000-02-03')", DateTimes.of("2000-02-03").getMillis());
     assertExpr("timestamp_parse('2000-02')", DateTimes.of("2000-02-01").getMillis());
-    assertExpr("timestamp_parse('')", null);
+    assertExpr("timestamp_parse(null)", null);
     assertExpr("timestamp_parse('z2000')", null);
     assertExpr("timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss')", DateTimes.of("2000-02-03T04:05:06").getMillis());
     assertExpr("timestamp_parse('02/03/2000','MM/dd/yyyy')", DateTimes.of("2000-02-03").getMillis());
@@ -148,36 +149,39 @@ public void testTimestampFormat()
   @Test
   public void testTrim()
   {
-    assertExpr("trim('')", null);
+    String emptyString = NullHandling.replaceWithDefault() ? null : "";
+    assertExpr("trim('')", emptyString);
     assertExpr("trim(concat(' ',x,' '))", "foo");
     assertExpr("trim(spacey)", "hey there");
     assertExpr("trim(spacey, '')", "  hey there  ");
     assertExpr("trim(spacey, 'he ')", "y ther");
-    assertExpr("trim(spacey, spacey)", null);
+    assertExpr("trim(spacey, spacey)", emptyString);
     assertExpr("trim(spacey, substring(spacey, 0, 4))", "y ther");
   }
 
   @Test
   public void testLTrim()
   {
-    assertExpr("ltrim('')", null);
+    String emptyString = NullHandling.replaceWithDefault() ? null : "";
+    assertExpr("ltrim('')", emptyString);
     assertExpr("ltrim(concat(' ',x,' '))", "foo ");
     assertExpr("ltrim(spacey)", "hey there  ");
     assertExpr("ltrim(spacey, '')", "  hey there  ");
     assertExpr("ltrim(spacey, 'he ')", "y there  ");
-    assertExpr("ltrim(spacey, spacey)", null);
+    assertExpr("ltrim(spacey, spacey)", emptyString);
     assertExpr("ltrim(spacey, substring(spacey, 0, 4))", "y there  ");
   }
 
   @Test
   public void testRTrim()
   {
-    assertExpr("rtrim('')", null);
+    String emptyString = NullHandling.replaceWithDefault() ? null : "";
+    assertExpr("rtrim('')", emptyString);
     assertExpr("rtrim(concat(' ',x,' '))", " foo");
     assertExpr("rtrim(spacey)", "  hey there");
     assertExpr("rtrim(spacey, '')", "  hey there  ");
     assertExpr("rtrim(spacey, 'he ')", "  hey ther");
-    assertExpr("rtrim(spacey, spacey)", null);
+    assertExpr("rtrim(spacey, spacey)", emptyString);
     assertExpr("rtrim(spacey, substring(spacey, 0, 4))", "  hey ther");
   }
 
diff --git a/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java b/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java
index 0e990d89e37..2643f31aa55 100644
--- a/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java
+++ b/server/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java
@@ -66,7 +66,7 @@ public void testSimpleDelegation()
     Assert.assertEquals(false, fn.isInjective());
     Assert.assertEquals(ExtractionFn.ExtractionType.MANY_TO_ONE, fn.getExtractionType());
 
-    for (String orig : Arrays.asList("", "foo", "bat")) {
+    for (String orig : Arrays.asList(null, "foo", "bat")) {
       Assert.assertEquals(LOOKUP_EXTRACTOR.apply(orig), fn.apply(orig));
     }
     Assert.assertEquals("not in the map", fn.apply("not in the map"));
diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java
index a052583ed88..dbaa036cfd1 100644
--- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java
+++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java
@@ -25,6 +25,7 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.data.input.Committer;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
@@ -82,21 +83,24 @@ public void testSimpleIngestion() throws Exception
 
       // add
       commitMetadata.put("x", "1");
-      Assert.assertEquals(1,
-                          appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier)
-                                      .getNumRowsInSegment()
+      Assert.assertEquals(
+          1,
+          appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier)
+                      .getNumRowsInSegment()
       );
 
       commitMetadata.put("x", "2");
-      Assert.assertEquals(2,
-                          appenderator.add(IDENTIFIERS.get(0), IR("2000", "bar", 2), committerSupplier)
-                                      .getNumRowsInSegment()
+      Assert.assertEquals(
+          2,
+          appenderator.add(IDENTIFIERS.get(0), IR("2000", "bar", 2), committerSupplier)
+                      .getNumRowsInSegment()
       );
 
       commitMetadata.put("x", "3");
-      Assert.assertEquals(1,
-                          appenderator.add(IDENTIFIERS.get(1), IR("2000", "qux", 4), committerSupplier)
-                                      .getNumRowsInSegment()
+      Assert.assertEquals(
+          1,
+          appenderator.add(IDENTIFIERS.get(1), IR("2000", "qux", 4), committerSupplier)
+                      .getNumRowsInSegment()
       );
 
       // getSegments
@@ -172,10 +176,17 @@ public void run()
 
       appenderator.startJob();
       appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier);
-      //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0)));
+      //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138 + 1 byte when null handling is enabled
+      int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
+      Assert.assertEquals(
+          138 + nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0))
+      );
       appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier);
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(1)));
+      Assert.assertEquals(
+          138 + nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(1))
+      );
       appenderator.close();
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
     }
@@ -209,9 +220,13 @@ public void run()
       appenderator.startJob();
       appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier);
       //expectedSizeInBytes = 44(map overhead) + 28 (TimeAndDims overhead) + 56 (aggregator metrics) + 10 (dimsKeySize) = 138
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
+      int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
+      Assert.assertEquals(138 + nullHandlingOverhead, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
       appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier);
-      Assert.assertEquals(276, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
+      Assert.assertEquals(
+          276 + 2 * nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory()
+      );
       appenderator.close();
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
     }
@@ -247,10 +262,17 @@ public void run()
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
       appenderator.add(IDENTIFIERS.get(0), IR("2000", "foo", 1), committerSupplier);
       //we still calculate the size even when ignoring it to make persist decision
-      Assert.assertEquals(138, ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0)));
+      int nullHandlingOverhead = NullHandling.sqlCompatible() ? 1 : 0;
+      Assert.assertEquals(
+          138 + nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesInMemory(IDENTIFIERS.get(0))
+      );
       Assert.assertEquals(1, ((AppenderatorImpl) appenderator).getRowsInMemory());
       appenderator.add(IDENTIFIERS.get(1), IR("2000", "bar", 1), committerSupplier);
-      Assert.assertEquals(276, ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory());
+      Assert.assertEquals(
+          276 + 2 * nullHandlingOverhead,
+          ((AppenderatorImpl) appenderator).getBytesCurrentlyInMemory()
+      );
       Assert.assertEquals(2, ((AppenderatorImpl) appenderator).getRowsInMemory());
       appenderator.close();
       Assert.assertEquals(0, ((AppenderatorImpl) appenderator).getRowsInMemory());
diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
index 850fc8b8937..38632888c79 100644
--- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
+++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java
@@ -68,9 +68,6 @@
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-//CHECKSTYLE.OFF: Regexp
-//CHECKSTYLE.ON: Regexp
-
 /**
  */
 public class LoadRuleTest
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java
index b779fdd03a0..69c3915a3b4 100644
--- a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java
@@ -90,7 +90,7 @@ public static String stringLiteral(final String s)
 
   public static String nullLiteral()
   {
-    return "''";
+    return "null";
   }
 
   public static String functionCall(final String functionName, final List<DruidExpression> args)
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java
index 8917260d741..efddca82f13 100644
--- a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java
@@ -33,14 +33,15 @@
 import io.druid.query.extraction.ExtractionFn;
 import io.druid.query.extraction.TimeFormatExtractionFn;
 import io.druid.query.filter.AndDimFilter;
-import io.druid.query.filter.BoundDimFilter;
 import io.druid.query.filter.DimFilter;
 import io.druid.query.filter.ExpressionDimFilter;
 import io.druid.query.filter.LikeDimFilter;
 import io.druid.query.filter.NotDimFilter;
 import io.druid.query.filter.OrDimFilter;
+import io.druid.query.filter.SelectorDimFilter;
 import io.druid.query.ordering.StringComparator;
 import io.druid.query.ordering.StringComparators;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.column.Column;
 import io.druid.segment.column.ValueType;
 import io.druid.sql.calcite.filtration.BoundRefKey;
@@ -311,13 +312,10 @@ private static DimFilter toSimpleLeafFilter(
         return null;
       }
 
-      final BoundDimFilter equalFilter = Bounds.equalTo(
-          new BoundRefKey(
-              druidExpression.getSimpleExtraction().getColumn(),
-              druidExpression.getSimpleExtraction().getExtractionFn(),
-              StringComparators.LEXICOGRAPHIC
-          ),
-          ""
+      final DimFilter equalFilter = new SelectorDimFilter(
+          druidExpression.getSimpleExtraction().getColumn(),
+          NullHandling.defaultStringValue(),
+          druidExpression.getSimpleExtraction().getExtractionFn()
       );
 
       return kind == SqlKind.IS_NOT_NULL ? new NotDimFilter(equalFilter) : equalFilter;
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/UnaryFunctionOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/UnaryFunctionOperatorConversion.java
new file mode 100644
index 00000000000..5465e311b75
--- /dev/null
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/UnaryFunctionOperatorConversion.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.sql.calcite.expression;
+
+import com.google.common.collect.Iterables;
+import io.druid.java.util.common.StringUtils;
+import io.druid.sql.calcite.planner.PlannerContext;
+import io.druid.sql.calcite.table.RowSignature;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.SqlOperator;
+
+public class UnaryFunctionOperatorConversion implements SqlOperatorConversion
+{
+  private final SqlOperator operator;
+  private final String druidOperator;
+
+  public UnaryFunctionOperatorConversion(final SqlOperator operator, final String druidOperator)
+  {
+    this.operator = operator;
+    this.druidOperator = druidOperator;
+  }
+
+  @Override
+  public SqlOperator calciteOperator()
+  {
+    return operator;
+  }
+
+  @Override
+  public DruidExpression toDruidExpression(
+      final PlannerContext plannerContext,
+      final RowSignature rowSignature,
+      final RexNode rexNode
+  )
+  {
+    return OperatorConversions.convertCall(
+        plannerContext,
+        rowSignature,
+        rexNode,
+        operands -> DruidExpression.fromExpression(
+            StringUtils.format(
+                "%s(%s)",
+                druidOperator,
+                Iterables.getOnlyElement(operands).getExpression()
+            )
+        )
+    );
+  }
+}
diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java
index 216d7bcbcb1..5d4bd4b9fbd 100644
--- a/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java
+++ b/sql/src/main/java/io/druid/sql/calcite/expression/builtin/CeilOperatorConversion.java
@@ -19,7 +19,6 @@
 
 package io.druid.sql.calcite.expression.builtin;
 
-import com.google.common.collect.ImmutableList;
 import io.druid.java.util.common.StringUtils;
 import io.druid.java.util.common.granularity.PeriodGranularity;
 import io.druid.sql.calcite.expression.DruidExpression;
@@ -35,6 +34,7 @@
 import org.apache.calcite.sql.SqlOperator;
 import org.apache.calcite.sql.fun.SqlStdOperatorTable;
 
+import java.util.Arrays;
 import java.util.stream.Collectors;
 
 public class CeilOperatorConversion implements SqlOperatorConversion
@@ -80,7 +80,7 @@ public DruidExpression toDruidExpression(
       // So there is no simple extraction for this operator.
       return DruidExpression.fromFunctionCall(
           "timestamp_ceil",
-          ImmutableList.of(
+          Arrays.asList(
               druidExpression.getExpression(),
               DruidExpression.stringLiteral(granularity.getPeriod().toString()),
               DruidExpression.numberLiteral(
diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java
index 8afb1466399..5d825be4d24 100644
--- a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java
+++ b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java
@@ -19,6 +19,7 @@
 
 package io.druid.sql.calcite.planner;
 
+import com.google.common.base.Preconditions;
 import com.google.common.io.BaseEncoding;
 import com.google.common.primitives.Chars;
 import io.druid.java.util.common.DateTimes;
@@ -107,6 +108,7 @@ public static SchemaPlus createRootSchema(final Schema druidSchema, final Author
 
   public static String escapeStringLiteral(final String s)
   {
+    Preconditions.checkNotNull(s);
     if (s == null) {
       return "''";
     } else {
diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java
index b2ea2e183d7..59fab5b5520 100644
--- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java
+++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java
@@ -37,6 +37,7 @@
 import io.druid.sql.calcite.expression.BinaryOperatorConversion;
 import io.druid.sql.calcite.expression.DirectOperatorConversion;
 import io.druid.sql.calcite.expression.SqlOperatorConversion;
+import io.druid.sql.calcite.expression.UnaryFunctionOperatorConversion;
 import io.druid.sql.calcite.expression.UnaryPrefixOperatorConversion;
 import io.druid.sql.calcite.expression.UnarySuffixOperatorConversion;
 import io.druid.sql.calcite.expression.builtin.BTrimOperatorConversion;
@@ -117,8 +118,8 @@
           .add(new DirectOperatorConversion(SqlStdOperatorTable.UPPER, "upper"))
           .add(new UnaryPrefixOperatorConversion(SqlStdOperatorTable.NOT, "!"))
           .add(new UnaryPrefixOperatorConversion(SqlStdOperatorTable.UNARY_MINUS, "-"))
-          .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NULL, "== ''"))
-          .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "!= ''"))
+          .add(new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NULL, "isnull"))
+          .add(new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "notnull"))
           .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_FALSE, "<= 0")) // Matches Evals.asBoolean
           .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_NOT_TRUE, "<= 0")) // Matches Evals.asBoolean
           .add(new UnarySuffixOperatorConversion(SqlStdOperatorTable.IS_TRUE, "> 0")) // Matches Evals.asBoolean
diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java
index 1e187f78742..093befbbf62 100644
--- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java
+++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java
@@ -84,7 +84,7 @@ public void reduce(
         if (sqlTypeName == SqlTypeName.BOOLEAN) {
           literal = rexBuilder.makeLiteral(exprResult.asBoolean(), constExp.getType(), true);
         } else if (sqlTypeName == SqlTypeName.DATE) {
-          if (!constExp.getType().isNullable() && exprResult.isNull()) {
+          if (!constExp.getType().isNullable() && exprResult.isNumericNull()) {
             throw new IAE("Illegal DATE constant: %s", constExp);
           }
 
@@ -95,7 +95,7 @@ public void reduce(
               )
           );
         } else if (sqlTypeName == SqlTypeName.TIMESTAMP) {
-          if (!constExp.getType().isNullable() && exprResult.isNull()) {
+          if (!constExp.getType().isNullable() && exprResult.isNumericNull()) {
             throw new IAE("Illegal TIMESTAMP constant: %s", constExp);
           }
 
diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java
index 0da912b9564..5b98c3f15c5 100644
--- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java
+++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java
@@ -28,6 +28,7 @@
 import io.druid.java.util.common.guava.Sequence;
 import io.druid.java.util.common.guava.Sequences;
 import io.druid.query.ResourceLimitExceededException;
+import io.druid.segment.DimensionHandlerUtils;
 import io.druid.sql.calcite.planner.PlannerContext;
 import org.apache.calcite.interpreter.BindableConvention;
 import org.apache.calcite.plan.RelOptCluster;
@@ -294,7 +295,11 @@ public RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadata
 
             for (int i : rightKeys) {
               final Object value = row[i];
-              final String stringValue = value != null ? String.valueOf(value) : "";
+              if (value == null) {
+                // NULLS are not supposed to match NULLs in a join. So ignore them.
+                continue;
+              }
+              final String stringValue = DimensionHandlerUtils.convertObjectToString(value);
               values.add(stringValue);
               if (values.size() > maxSemiJoinRowsInMemory) {
                 throw new ResourceLimitExceededException(
@@ -308,16 +313,18 @@ public RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadata
 
               for (int i = 0; i < values.size(); i++) {
                 final String value = values.get(i);
-                subConditions.add(
-                    getCluster().getRexBuilder().makeCall(
-                        SqlStdOperatorTable.EQUALS,
-                        leftExpressions.get(i),
-                        getCluster().getRexBuilder().makeLiteral(value)
-                    )
-                );
+                // NULLS are not supposed to match NULLs in a join. So ignore them.
+                if (value != null) {
+                  subConditions.add(
+                      getCluster().getRexBuilder().makeCall(
+                          SqlStdOperatorTable.EQUALS,
+                          leftExpressions.get(i),
+                          getCluster().getRexBuilder().makeLiteral(value)
+                      )
+                  );
+                }
+                theConditions.add(makeAnd(subConditions));
               }
-
-              theConditions.add(makeAnd(subConditions));
             }
             return theConditions;
           }
diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java
index 3c063f82b36..bc82a7f9dec 100644
--- a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java
+++ b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java
@@ -22,7 +22,6 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Function;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Maps;
 import com.google.common.primitives.Ints;
@@ -46,6 +45,7 @@
 import io.druid.query.topn.TopNQuery;
 import io.druid.query.topn.TopNResultValue;
 import io.druid.segment.DimensionHandlerUtils;
+import io.druid.common.config.NullHandling;
 import io.druid.segment.column.Column;
 import io.druid.server.QueryLifecycleFactory;
 import io.druid.server.security.AuthenticationResult;
@@ -400,7 +400,7 @@ private Object coerce(final Object value, final SqlTypeName sqlType)
 
     if (SqlTypeName.CHAR_TYPES.contains(sqlType)) {
       if (value == null || value instanceof String) {
-        coercedValue = Strings.nullToEmpty((String) value);
+        coercedValue = NullHandling.nullToEmptyIfNeeded((String) value);
       } else if (value instanceof NlsString) {
         coercedValue = ((NlsString) value).getValue();
       } else if (value instanceof Number) {
diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java
index c8b9e6e00b0..542f174d3f9 100644
--- a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java
+++ b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.math.expr.ExprMacroTable;
 import io.druid.server.security.AllowAllAuthenticator;
@@ -139,11 +140,17 @@ public void testSelectAllInFirstFrame()
             true,
             Lists.<Object>newArrayList(
                 new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f},
-                new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f},
+                new Object[]{
+                    DateTimes.of("2000-01-02").getMillis(),
+                    1L,
+                    "10.1",
+                    NullHandling.defaultStringValue(),
+                    2.0f
+                },
                 new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f},
                 new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f},
                 new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f},
-                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f}
+                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f}
             )
         ),
         frame
@@ -166,7 +173,13 @@ public void testSelectSplitOverTwoFrames()
             false,
             Lists.<Object>newArrayList(
                 new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f},
-                new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f}
+                new Object[]{
+                    DateTimes.of("2000-01-02").getMillis(),
+                    1L,
+                    "10.1",
+                    NullHandling.defaultStringValue(),
+                    2.0f
+                }
             )
         ),
         frame
@@ -183,7 +196,7 @@ public void testSelectSplitOverTwoFrames()
                 new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f},
                 new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f},
                 new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f},
-                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f}
+                new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", NullHandling.defaultStringValue(), 6.0f}
             )
         ),
         frame
diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java
index d558f777b42..e2a3ecafab5 100644
--- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.hll.HLLCV1;
 import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.Intervals;
@@ -88,6 +89,7 @@
 import io.druid.segment.virtual.ExpressionVirtualColumn;
 import io.druid.server.security.AuthenticationResult;
 import io.druid.server.security.ForbiddenException;
+import io.druid.sql.calcite.expression.DruidExpression;
 import io.druid.sql.calcite.filtration.Filtration;
 import io.druid.sql.calcite.planner.Calcites;
 import io.druid.sql.calcite.planner.DruidOperatorTable;
@@ -127,6 +129,7 @@
 
 public class CalciteQueryTest extends CalciteTestBase
 {
+
   private static final Logger log = new Logger(CalciteQueryTest.class);
 
   private static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig();
@@ -303,7 +306,7 @@ public void testSelectCountStart() throws Exception
                                .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS)
                                .build()),
         ImmutableList.of(
-            new Object[]{11.0, 0.0}
+            new Object[]{11.0, NullHandling.defaultDoubleValue()}
         )
     );
 
@@ -326,7 +329,7 @@ public void testSelectCountStart() throws Exception
                                .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS)
                                .build()),
         ImmutableList.of(
-            new Object[]{11.0, 0.0}
+            new Object[]{11.0, NullHandling.defaultDoubleValue()}
         )
     );
 
@@ -545,6 +548,7 @@ public void testExplainInformationSchemaColumns() throws Exception
   @Test
   public void testSelectStar() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT * FROM druid.foo",
         ImmutableList.<Query>of(
@@ -558,11 +562,11 @@ public void testSelectStar() throws Exception
         ),
         ImmutableList.of(
             new Object[]{T("2000-01-01"), 1L, "", "a", 1f, 1.0, HLLCV1.class.getName()},
-            new Object[]{T("2000-01-02"), 1L, "10.1", "", 2f, 2.0, HLLCV1.class.getName()},
+            new Object[]{T("2000-01-02"), 1L, "10.1", nullValue, 2f, 2.0, HLLCV1.class.getName()},
             new Object[]{T("2000-01-03"), 1L, "2", "", 3f, 3.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-01"), 1L, "1", "a", 4f, 4.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-02"), 1L, "def", "abc", 5f, 5.0, HLLCV1.class.getName()},
-            new Object[]{T("2001-01-03"), 1L, "abc", "", 6f, 6.0, HLLCV1.class.getName()}
+            new Object[]{T("2001-01-03"), 1L, "abc", nullValue, 6f, 6.0, HLLCV1.class.getName()}
         )
     );
   }
@@ -589,7 +593,15 @@ public void testSelectStarOnForbiddenTable() throws Exception
                 .build()
         ),
         ImmutableList.of(
-            new Object[]{T("2000-01-01"), 1L, "forbidden", "abcd", 9999.0f, 0.0, HLLCV1.class.getName()}
+            new Object[]{
+                T("2000-01-01"),
+                1L,
+                "forbidden",
+                "abcd",
+                9999.0f,
+                NullHandling.defaultDoubleValue(),
+                HLLCV1.class.getName()
+            }
         )
     );
   }
@@ -631,6 +643,8 @@ public void testExplainSelectStar() throws Exception
   @Test
   public void testSelectStarWithLimit() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT * FROM druid.foo LIMIT 2",
         ImmutableList.of(
@@ -645,7 +659,7 @@ public void testSelectStarWithLimit() throws Exception
         ),
         ImmutableList.of(
             new Object[]{T("2000-01-01"), 1L, "", "a", 1.0f, 1.0, HLLCV1.class.getName()},
-            new Object[]{T("2000-01-02"), 1L, "10.1", "", 2.0f, 2.0, HLLCV1.class.getName()}
+            new Object[]{T("2000-01-02"), 1L, "10.1", nullValue, 2.0f, 2.0, HLLCV1.class.getName()}
         )
     );
   }
@@ -653,6 +667,7 @@ public void testSelectStarWithLimit() throws Exception
   @Test
   public void testSelectWithProjection() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT SUBSTRING(dim2, 1, 1) FROM druid.foo LIMIT 2",
         ImmutableList.of(
@@ -670,7 +685,7 @@ public void testSelectWithProjection() throws Exception
         ),
         ImmutableList.of(
             new Object[]{"a"},
-            new Object[]{""}
+            new Object[]{nullValue}
         )
     );
   }
@@ -678,6 +693,8 @@ public void testSelectWithProjection() throws Exception
   @Test
   public void testSelectStarWithLimitTimeDescending() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT * FROM druid.foo ORDER BY __time DESC LIMIT 2",
         ImmutableList.of(
@@ -693,7 +710,7 @@ public void testSelectStarWithLimitTimeDescending() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{T("2001-01-03"), 1L, "abc", "", 6f, 6d, HLLCV1.class.getName()},
+            new Object[]{T("2001-01-03"), 1L, "abc", nullValue, 6f, 6d, HLLCV1.class.getName()},
             new Object[]{T("2001-01-02"), 1L, "def", "abc", 5f, 5d, HLLCV1.class.getName()}
         )
     );
@@ -702,6 +719,7 @@ public void testSelectStarWithLimitTimeDescending() throws Exception
   @Test
   public void testSelectStarWithoutLimitTimeAscending() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT * FROM druid.foo ORDER BY __time",
         ImmutableList.of(
@@ -734,11 +752,11 @@ public void testSelectStarWithoutLimitTimeAscending() throws Exception
         ),
         ImmutableList.of(
             new Object[]{T("2000-01-01"), 1L, "", "a", 1f, 1.0, HLLCV1.class.getName()},
-            new Object[]{T("2000-01-02"), 1L, "10.1", "", 2f, 2.0, HLLCV1.class.getName()},
+            new Object[]{T("2000-01-02"), 1L, "10.1", nullValue, 2f, 2.0, HLLCV1.class.getName()},
             new Object[]{T("2000-01-03"), 1L, "2", "", 3f, 3.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-01"), 1L, "1", "a", 4f, 4.0, HLLCV1.class.getName()},
             new Object[]{T("2001-01-02"), 1L, "def", "abc", 5f, 5.0, HLLCV1.class.getName()},
-            new Object[]{T("2001-01-03"), 1L, "abc", "", 6f, 6.0, HLLCV1.class.getName()}
+            new Object[]{T("2001-01-03"), 1L, "abc", nullValue, 6f, 6.0, HLLCV1.class.getName()}
         )
     );
   }
@@ -746,6 +764,7 @@ public void testSelectStarWithoutLimitTimeAscending() throws Exception
   @Test
   public void testSelectSingleColumnTwice() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT dim2 x, dim2 y FROM druid.foo LIMIT 2",
         ImmutableList.of(
@@ -760,7 +779,7 @@ public void testSelectSingleColumnTwice() throws Exception
         ),
         ImmutableList.of(
             new Object[]{"a", "a"},
-            new Object[]{"", ""}
+            new Object[]{nullValue, nullValue}
         )
     );
   }
@@ -866,9 +885,12 @@ public void testSelfJoinWithFallback() throws Exception
   @Test
   public void testExplainSelfJoinWithFallback() throws Exception
   {
+    String emptyStringEq = NullHandling.replaceWithDefault() ? null : "\"\"";
     final String explanation =
         "BindableJoin(condition=[=($0, $2)], joinType=[inner])\n"
-        + "  DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"columns\":[\"dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING}])\n"
+        + "  DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":"
+        + emptyStringEq
+        + ",\"extractionFn\":null}},\"columns\":[\"dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING}])\n"
         + "  DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"dim1\",\"dim2\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}], signature=[{dim1:STRING, dim2:STRING}])\n";
 
     testQuery(
@@ -1215,9 +1237,14 @@ public void testHavingOnApproximateCountDistinct() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 3L},
             new Object[]{"a", 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, 2L},
+            new Object[]{"a", 2L}
         )
     );
   }
@@ -1267,9 +1294,14 @@ public void testHavingOnExactCountDistinct() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 3L},
             new Object[]{"a", 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, 2L},
+            new Object[]{"a", 2L}
         )
     );
   }
@@ -1334,9 +1366,13 @@ public void testColumnComparison() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 1.0f, 1L},
             new Object[]{"2", 3.0f, 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"2", 3.0f, 1L}
         )
     );
   }
@@ -1385,6 +1421,7 @@ public void testHavingOnRatio() throws Exception
   @Test
   public void testGroupByWithSelectProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1404,10 +1441,10 @@ public void testGroupByWithSelectProjections() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", ""},
-            new Object[]{"1", ""},
+            new Object[]{"", nullValue},
+            new Object[]{"1", nullValue},
             new Object[]{"10.1", "0.1"},
-            new Object[]{"2", ""},
+            new Object[]{"2", nullValue},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"}
         )
@@ -1417,6 +1454,7 @@ public void testGroupByWithSelectProjections() throws Exception
   @Test
   public void testGroupByWithSelectAndOrderByProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1456,9 +1494,9 @@ public void testGroupByWithSelectAndOrderByProjections() throws Exception
             new Object[]{"10.1", "0.1"},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"},
-            new Object[]{"1", ""},
-            new Object[]{"2", ""},
-            new Object[]{"", ""}
+            new Object[]{"1", nullValue},
+            new Object[]{"2", nullValue},
+            new Object[]{"", nullValue}
         )
     );
   }
@@ -1466,6 +1504,8 @@ public void testGroupByWithSelectAndOrderByProjections() throws Exception
   @Test
   public void testTopNWithSelectProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1488,10 +1528,10 @@ public void testTopNWithSelectProjections() throws Exception
                 .build()
         ),
         ImmutableList.of(
-            new Object[]{"", ""},
-            new Object[]{"1", ""},
+            new Object[]{"", nullValue},
+            new Object[]{"1", nullValue},
             new Object[]{"10.1", "0.1"},
-            new Object[]{"2", ""},
+            new Object[]{"2", nullValue},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"}
         )
@@ -1501,6 +1541,8 @@ public void testTopNWithSelectProjections() throws Exception
   @Test
   public void testTopNWithSelectAndOrderByProjections() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
+
     testQuery(
         "SELECT\n"
         + "  dim1,"
@@ -1528,9 +1570,9 @@ public void testTopNWithSelectAndOrderByProjections() throws Exception
             new Object[]{"10.1", "0.1"},
             new Object[]{"abc", "bc"},
             new Object[]{"def", "ef"},
-            new Object[]{"1", ""},
-            new Object[]{"2", ""},
-            new Object[]{"", ""}
+            new Object[]{"1", nullValue},
+            new Object[]{"2", nullValue},
+            new Object[]{"", nullValue}
         )
     );
   }
@@ -1649,7 +1691,7 @@ public void testGroupByCaseWhen() throws Exception
                                 + "'match-cnt',"
                                 + "(timestamp_extract(\"__time\",'DAY','UTC') == 0),"
                                 + "'zero     ',"
-                                + "'')",
+                                + DruidExpression.nullLiteral() + ")",
                                 ValueType.STRING
                             )
                         )
@@ -1659,7 +1701,7 @@ public void testGroupByCaseWhen() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", 2L},
+            new Object[]{NullHandling.defaultStringValue(), 2L},
             new Object[]{"match-cnt", 1L},
             new Object[]{"match-m1 ", 3L}
         )
@@ -1683,7 +1725,7 @@ public void testGroupByCaseWhenOfTripleAnd() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "case_searched(((\"m1\" > 1) && (\"m1\" < 5) && (\"cnt\" == 1)),'x','')",
+                                "case_searched(((\"m1\" > 1) && (\"m1\" < 5) && (\"cnt\" == 1)),'x',null)",
                                 ValueType.STRING
                             )
                         )
@@ -1693,7 +1735,7 @@ public void testGroupByCaseWhenOfTripleAnd() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", 3L},
+            new Object[]{NullHandling.defaultStringValue(), 3L},
             new Object[]{"x", 3L}
         )
     );
@@ -1702,35 +1744,85 @@ public void testGroupByCaseWhenOfTripleAnd() throws Exception
   @Test
   public void testNullEmptyStringEquality() throws Exception
   {
-    // Doesn't conform to the SQL standard, but it's how we do it.
-    // This example is used in the sql.md doc.
+    testQuery(
+        "SELECT COUNT(*)\n"
+        + "FROM druid.foo\n"
+        + "WHERE NULLIF(dim2, 'a') IS NULL",
+        ImmutableList.of(
+            Druids.newTimeseriesQueryBuilder()
+                  .dataSource(CalciteTests.DATASOURCE1)
+                  .intervals(QSS(Filtration.eternity()))
+                  .granularity(Granularities.ALL)
+                  .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),1,isnull(\"dim2\"))"))
+                  .aggregators(AGGS(new CountAggregatorFactory("a0")))
+                  .context(TIMESERIES_CONTEXT_DEFAULT)
+                  .build()
+        ),
+        ImmutableList.of(
+            NullHandling.replaceWithDefault() ?
+            // Matches everything but "abc"
+            new Object[]{5L} :
+            // match only null values
+            new Object[]{4L}
+        )
+    );
+  }
+
+  @Test
+  public void testEmptyStringEquality() throws Exception
+  {
+    testQuery(
+        "SELECT COUNT(*)\n"
+        + "FROM druid.foo\n"
+        + "WHERE NULLIF(dim2, 'a') = ''",
+        ImmutableList.of(
+            Druids.newTimeseriesQueryBuilder()
+                  .dataSource(CalciteTests.DATASOURCE1)
+                  .intervals(QSS(Filtration.eternity()))
+                  .granularity(Granularities.ALL)
+                  .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),"
+                                             + (NullHandling.replaceWithDefault() ? "1" : "0")
+                                             + ",(\"dim2\" == ''))"))
+                  .aggregators(AGGS(new CountAggregatorFactory("a0")))
+                  .context(TIMESERIES_CONTEXT_DEFAULT)
+                  .build()
+        ),
+        ImmutableList.of(
+            NullHandling.replaceWithDefault() ?
+            // Matches everything but "abc"
+            new Object[]{5L} :
+            // match only empty string
+            new Object[]{1L}
+        )
+    );
+  }
+
+  @Test
+  public void testNullStringEquality() throws Exception
+  {
+    testQuery(
+        "SELECT COUNT(*)\n"
+        + "FROM druid.foo\n"
+        + "WHERE NULLIF(dim2, 'a') = null",
+        ImmutableList.of(
+            Druids.newTimeseriesQueryBuilder()
+                  .dataSource(CalciteTests.DATASOURCE1)
+                  .intervals(QSS(Filtration.eternity()))
+                  .granularity(Granularities.ALL)
+                  .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),"
+                                             + (NullHandling.replaceWithDefault() ? "1" : "0")
+                                             + ",(\"dim2\" == null))"))
+                  .aggregators(AGGS(new CountAggregatorFactory("a0")))
+                  .context(TIMESERIES_CONTEXT_DEFAULT)
+                  .build()
+        ),
+        NullHandling.replaceWithDefault() ?
+        // Matches everything but "abc"
+        ImmutableList.of(new Object[]{5L}) :
+        // null is not eqaual to null or any other value
+        ImmutableList.of()
+    );
 
-    final ImmutableList<String> wheres = ImmutableList.of(
-        "NULLIF(dim2, 'a') = ''",
-        "NULLIF(dim2, 'a') IS NULL"
-    );
-
-    for (String where : wheres) {
-      testQuery(
-          "SELECT COUNT(*)\n"
-          + "FROM druid.foo\n"
-          + "WHERE " + where,
-          ImmutableList.of(
-              Druids.newTimeseriesQueryBuilder()
-                    .dataSource(CalciteTests.DATASOURCE1)
-                    .intervals(QSS(Filtration.eternity()))
-                    .granularity(Granularities.ALL)
-                    .filters(EXPRESSION_FILTER("case_searched((\"dim2\" == 'a'),1,(\"dim2\" == ''))"))
-                    .aggregators(AGGS(new CountAggregatorFactory("a0")))
-                    .context(TIMESERIES_CONTEXT_DEFAULT)
-                    .build()
-          ),
-          ImmutableList.of(
-              // Matches everything but "abc"
-              new Object[]{5L}
-          )
-      );
-    }
   }
 
   @Test
@@ -1749,7 +1841,7 @@ public void testCoalesceColumns() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "case_searched((\"dim2\" != ''),\"dim2\",\"dim1\")",
+                                "case_searched(notnull(\"dim2\"),\"dim2\",\"dim1\")",
                                 ValueType.STRING
                             )
                         )
@@ -1758,11 +1850,18 @@ public void testCoalesceColumns() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"10.1", 1L},
             new Object[]{"2", 1L},
             new Object[]{"a", 2L},
             new Object[]{"abc", 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"", 1L},
+            new Object[]{"10.1", 1L},
+            new Object[]{"a", 2L},
+            new Object[]{"abc", 2L}
         )
     );
   }
@@ -1786,7 +1885,7 @@ public void testColumnIsNull() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{3L}
+            new Object[]{NullHandling.replaceWithDefault() ? 3L : 2L}
         )
     );
   }
@@ -2007,14 +2106,18 @@ public void testCountNullableColumn() throws Exception
                   .aggregators(AGGS(
                       new FilteredAggregatorFactory(
                           new CountAggregatorFactory("a0"),
-                          NOT(SELECTOR("dim2", "", null))
+                          NOT(SELECTOR("dim2", null, null))
                       )
                   ))
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{4L}
         )
     );
   }
@@ -2033,7 +2136,9 @@ public void testCountNullableExpression() throws Exception
                       new FilteredAggregatorFactory(
                           new CountAggregatorFactory("a0"),
                           EXPRESSION_FILTER(
-                              "(case_searched((\"dim2\" == 'abc'),'yes',(\"dim2\" == 'def'),'yes','') != '')"
+                              "notnull(case_searched((\"dim2\" == 'abc'),'yes',(\"dim2\" == 'def'),'yes',"
+                              + DruidExpression.nullLiteral()
+                              + "))"
                           )
                       )
                   ))
@@ -2325,7 +2430,7 @@ public void testFilterOnStringAsNumber() throws Exception
   public void testSimpleAggregations() throws Exception
   {
     testQuery(
-        "SELECT COUNT(*), COUNT(cnt), COUNT(dim1), AVG(cnt), SUM(cnt), SUM(cnt) + MIN(cnt) + MAX(cnt) FROM druid.foo",
+        "SELECT COUNT(*), COUNT(cnt), COUNT(dim1), AVG(cnt), SUM(cnt), SUM(cnt) + MIN(cnt) + MAX(cnt), COUNT(dim2) FROM druid.foo",
         ImmutableList.of(
             Druids.newTimeseriesQueryBuilder()
                   .dataSource(CalciteTests.DATASOURCE1)
@@ -2336,13 +2441,17 @@ public void testSimpleAggregations() throws Exception
                           new CountAggregatorFactory("a0"),
                           new FilteredAggregatorFactory(
                               new CountAggregatorFactory("a1"),
-                              NOT(SELECTOR("dim1", "", null))
+                              NOT(SELECTOR("dim1", null, null))
                           ),
                           new LongSumAggregatorFactory("a2:sum", "cnt"),
                           new CountAggregatorFactory("a2:count"),
                           new LongSumAggregatorFactory("a3", "cnt"),
                           new LongMinAggregatorFactory("a4", "cnt"),
-                          new LongMaxAggregatorFactory("a5", "cnt")
+                          new LongMaxAggregatorFactory("a5", "cnt"),
+                          new FilteredAggregatorFactory(
+                              new CountAggregatorFactory("a6"),
+                              NOT(SELECTOR("dim2", null, null))
+                          )
                       )
                   )
                   .postAggregators(
@@ -2359,8 +2468,12 @@ public void testSimpleAggregations() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
-            new Object[]{6L, 6L, 5L, 1L, 6L, 8L}
+            new Object[]{6L, 6L, 5L, 1L, 6L, 8L, 3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 6L, 6L, 1L, 6L, 8L, 4L}
         )
     );
   }
@@ -2533,7 +2646,7 @@ public void testFilteredAggregations() throws Exception
                       new FilteredAggregatorFactory(
                           new CountAggregatorFactory("a3"),
                           AND(
-                              NOT(SELECTOR("dim2", "", null)),
+                              NOT(SELECTOR("dim2", null, null)),
                               NOT(SELECTOR("dim1", "1", null))
                           )
                       ),
@@ -2578,8 +2691,12 @@ public void testFilteredAggregations() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{1L, 5L, 1L, 2L, 5L, 5L, 2L, 1L, 5L, 1L, 5L}
+        ) :
+        ImmutableList.of(
+            new Object[]{1L, 5L, 1L, 3L, 5L, 5L, 2L, 1L, 5L, 1L, 5L}
         )
     );
   }
@@ -2647,8 +2764,12 @@ public void testFilteredAggregationWithNotIn() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{5L, 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{5L, 3L}
         )
     );
   }
@@ -2828,10 +2949,16 @@ public void testExpressionFilteringAndGroupingOnStringCastToNumber() throws Exce
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{10.0f, 1L},
             new Object[]{2.0f, 1L},
             new Object[]{0.0f, 4L}
+        ) :
+        ImmutableList.of(
+            new Object[]{10.0f, 1L},
+            new Object[]{2.0f, 1L},
+            new Object[]{0.0f, 1L}
         )
     );
   }
@@ -3462,8 +3589,6 @@ public void testCountStarWithTimeFilterOnLongColumnUsingTimestampToMillis() thro
   @Test
   public void testSumOfString() throws Exception
   {
-    // Perhaps should be 13, but dim1 has "1", "2" and "10.1"; and CAST('10.1' AS INTEGER) = 0 since parsing is strict.
-
     testQuery(
         "SELECT SUM(CAST(dim1 AS INTEGER)) FROM druid.foo",
         ImmutableList.of(
@@ -3483,7 +3608,7 @@ public void testSumOfString() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{3L}
+            new Object[]{13L}
         )
     );
   }
@@ -3491,8 +3616,6 @@ public void testSumOfString() throws Exception
   @Test
   public void testSumOfExtractionFn() throws Exception
   {
-    // Perhaps should be 13, but dim1 has "1", "2" and "10.1"; and CAST('10.1' AS INTEGER) = 0 since parsing is strict.
-
     testQuery(
         "SELECT SUM(CAST(SUBSTRING(dim1, 1, 10) AS INTEGER)) FROM druid.foo",
         ImmutableList.of(
@@ -3512,7 +3635,7 @@ public void testSumOfExtractionFn() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{3L}
+            new Object[]{13L}
         )
     );
   }
@@ -3537,7 +3660,7 @@ public void testTimeseriesWithTimeFilterOnLongColumnUsingMillisToTimestamp() thr
                 .setInterval(QSS(Filtration.eternity()))
                 .setGranularity(Granularities.ALL)
                 .setVirtualColumns(
-                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"cnt\",'P1Y','','UTC')", ValueType.LONG)
+                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"cnt\",'P1Y',null,'UTC')", ValueType.LONG)
                 )
                 .setDimFilter(
                     BOUND(
@@ -3642,10 +3765,17 @@ public void testSelectDistinctWithLimit() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{""},
             new Object[]{"a"},
             new Object[]{"abc"}
+        ) :
+        ImmutableList.of(
+            new Object[]{null},
+            new Object[]{""},
+            new Object[]{"a"},
+            new Object[]{"abc"}
         )
     );
   }
@@ -3666,10 +3796,17 @@ public void testSelectDistinctWithSortAsOuterQuery() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{""},
             new Object[]{"a"},
             new Object[]{"abc"}
+        ) :
+        ImmutableList.of(
+            new Object[]{null},
+            new Object[]{""},
+            new Object[]{"a"},
+            new Object[]{"abc"}
         )
     );
   }
@@ -3690,7 +3827,14 @@ public void testSelectDistinctWithSortAsOuterQuery2() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
+        ImmutableList.of(
+            new Object[]{""},
+            new Object[]{"a"},
+            new Object[]{"abc"}
+        ) :
         ImmutableList.of(
+            new Object[]{null},
             new Object[]{""},
             new Object[]{"a"},
             new Object[]{"abc"}
@@ -3726,10 +3870,17 @@ public void testSelectDistinctWithSortAsOuterQuery4() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{""},
             new Object[]{"abc"},
             new Object[]{"a"}
+        ) :
+        ImmutableList.of(
+            new Object[]{null},
+            new Object[]{"abc"},
+            new Object[]{"a"},
+            new Object[]{""}
         )
     );
   }
@@ -3844,14 +3995,14 @@ public void testExactCountDistinct() throws Exception
                         .setAggregatorSpecs(AGGS(
                             new FilteredAggregatorFactory(
                                 new CountAggregatorFactory("a0"),
-                                NOT(SELECTOR("d0", "", null))
+                                NOT(SELECTOR("d0", null, null))
                             )
                         ))
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{2L}
+            new Object[]{NullHandling.replaceWithDefault() ? 2L : 3L}
         )
     );
   }
@@ -3923,16 +4074,23 @@ public void testExactCountDistinctWithGroupingAndOtherAggregators() throws Excep
                             new LongSumAggregatorFactory("_a0", "a0"),
                             new FilteredAggregatorFactory(
                                 new CountAggregatorFactory("_a1"),
-                                NOT(SELECTOR("d0", "", null))
+                                NOT(SELECTOR("d0", null, null))
                             )
                         ))
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 3L, 3L},
             new Object[]{"a", 2L, 1L},
             new Object[]{"abc", 1L, 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, 2L, 2L},
+            new Object[]{"", 1L, 1L},
+            new Object[]{"a", 2L, 2L},
+            new Object[]{"abc", 1L, 1L}
         )
     );
   }
@@ -4004,8 +4162,12 @@ public void testApproxCountDistinct() throws Exception
                   .context(TIMESERIES_CONTEXT_DEFAULT)
                   .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{6L, 3L, 2L, 2L, 2L, 6L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 3L, 2L, 1L, 1L, 6L}
         )
     );
   }
@@ -4050,7 +4212,7 @@ public void testNestedGroupBy() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "_d0:v",
-                                "timestamp_floor(\"a0\",'PT1H','','UTC')",
+                                "timestamp_floor(\"a0\",'PT1H',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -4118,8 +4280,12 @@ public void testDoubleNestedGroupBy() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{6L, 3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 4L}
         )
     );
   }
@@ -4184,8 +4350,12 @@ public void testExactCountDistinctUsingSubquery() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{6L, 3L}
+        ) :
+        ImmutableList.of(
+            new Object[]{6L, 4L}
         )
     );
   }
@@ -4193,6 +4363,9 @@ public void testExactCountDistinctUsingSubquery() throws Exception
   @Test
   public void testTopNFilterJoin() throws Exception
   {
+    DimFilter filter = NullHandling.replaceWithDefault() ?
+                       IN("dim2", Arrays.asList(null, "a"), null)
+                                                         : SELECTOR("dim2", "a", null);
     // Filters on top N values of some dimension by using an inner join.
     testQuery(
         "SELECT t1.dim1, SUM(t1.cnt)\n"
@@ -4223,7 +4396,7 @@ public void testTopNFilterJoin() throws Exception
                         .setDataSource(CalciteTests.DATASOURCE1)
                         .setInterval(QSS(Filtration.eternity()))
                         .setGranularity(Granularities.ALL)
-                        .setDimFilter(IN("dim2", ImmutableList.of("", "a"), null))
+                        .setDimFilter(filter)
                         .setDimensions(DIMS(new DefaultDimensionSpec("dim1", "d0")))
                         .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
                         .setLimitSpec(
@@ -4241,12 +4414,17 @@ public void testTopNFilterJoin() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", 1L},
             new Object[]{"1", 1L},
             new Object[]{"10.1", 1L},
             new Object[]{"2", 1L},
             new Object[]{"abc", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"", 1L},
+            new Object[]{"1", 1L}
         )
     );
   }
@@ -4427,7 +4605,7 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception
     final String explanation =
         "DruidOuterQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"skipEmptyBuckets\":true,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"}}], signature=[{a0:LONG}])\n"
         + "  DruidSemiJoin(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}], leftExpressions=[[SUBSTRING($3, 1, 1)]], rightKeys=[[0]])\n"
-        + "    DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}], signature=[{d0:STRING}])\n";
+        + "    DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":null,\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false}], signature=[{d0:STRING}])\n";
 
     testQuery(
         "EXPLAIN PLAN FOR SELECT COUNT(*)\n"
@@ -4435,7 +4613,7 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception
         + "  SELECT DISTINCT dim2\n"
         + "  FROM druid.foo\n"
         + "  WHERE SUBSTRING(dim2, 1, 1) IN (\n"
-        + "    SELECT SUBSTRING(dim1, 1, 1) FROM druid.foo WHERE dim1 <> ''\n"
+        + "    SELECT SUBSTRING(dim1, 1, 1) FROM druid.foo WHERE dim1 IS NOT NULL\n"
         + "  )\n"
         + ")",
         ImmutableList.of(),
@@ -4476,8 +4654,51 @@ public void testExactCountDistinctUsingSubqueryWithWherePushDown() throws Except
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{3L, 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{5L, 3L}
+        )
+    );
+
+    testQuery(
+        "SELECT\n"
+        + "  SUM(cnt),\n"
+        + "  COUNT(*)\n"
+        + "FROM (SELECT dim2, SUM(cnt) AS cnt FROM druid.foo GROUP BY dim2)\n"
+        + "WHERE dim2 IS NOT NULL",
+        ImmutableList.of(
+            GroupByQuery.builder()
+                        .setDataSource(
+                            new QueryDataSource(
+                                GroupByQuery.builder()
+                                            .setDataSource(CalciteTests.DATASOURCE1)
+                                            .setInterval(QSS(Filtration.eternity()))
+                                            .setDimFilter(NOT(SELECTOR("dim2", null, null)))
+                                            .setGranularity(Granularities.ALL)
+                                            .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0")))
+                                            .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
+                                            .setContext(QUERY_CONTEXT_DEFAULT)
+                                            .build()
+                            )
+                        )
+                        .setInterval(QSS(Filtration.eternity()))
+                        .setGranularity(Granularities.ALL)
+                        .setAggregatorSpecs(AGGS(
+                            new LongSumAggregatorFactory("_a0", "a0"),
+                            new CountAggregatorFactory("_a1")
+                        ))
+                        .setContext(QUERY_CONTEXT_DEFAULT)
+                        .build()
+        ),
+        NullHandling.replaceWithDefault() ?
+        ImmutableList.of(
+            new Object[]{3L, 2L}
+        ) :
+        ImmutableList.of(
+            new Object[]{4L, 3L}
         )
     );
   }
@@ -4516,8 +4737,12 @@ public void testExactCountDistinctUsingSubqueryWithWhereToOuterFilter() throws E
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{3L, 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{2L, 1L}
         )
     );
   }
@@ -4603,10 +4828,15 @@ public void testHistogramUsingSubquery() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"1", 1L},
             new Object[]{"2", 1L},
             new Object[]{"3", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"1", 2L},
+            new Object[]{"2", 2L}
         )
     );
   }
@@ -4653,9 +4883,14 @@ public void testHistogramUsingSubqueryWithSort() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"1", 1L},
             new Object[]{"2", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"1", 2L},
+            new Object[]{"2", 2L}
         )
     );
   }
@@ -4809,6 +5044,7 @@ public void testSillyQuarters() throws Exception
   @Test
   public void testRegexpExtract() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT DISTINCT\n"
         + "  REGEXP_EXTRACT(dim1, '^.'),\n"
@@ -4845,7 +5081,7 @@ public void testRegexpExtract() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", ""},
+            new Object[]{nullValue, nullValue},
             new Object[]{"1", "1"},
             new Object[]{"2", "2"},
             new Object[]{"a", "a"},
@@ -4857,6 +5093,7 @@ public void testRegexpExtract() throws Exception
   @Test
   public void testGroupBySortPushDown() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT dim2, dim1, SUM(cnt) FROM druid.foo GROUP BY dim2, dim1 ORDER BY dim1 LIMIT 4",
         ImmutableList.of(
@@ -4889,7 +5126,7 @@ public void testGroupBySortPushDown() throws Exception
         ImmutableList.of(
             new Object[]{"a", "", 1L},
             new Object[]{"a", "1", 1L},
-            new Object[]{"", "10.1", 1L},
+            new Object[]{nullValue, "10.1", 1L},
             new Object[]{"", "2", 1L}
         )
     );
@@ -4898,6 +5135,7 @@ public void testGroupBySortPushDown() throws Exception
   @Test
   public void testGroupByLimitPushDownWithHavingOnLong() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     testQuery(
         "SELECT dim1, dim2, SUM(cnt) AS thecnt "
         + "FROM druid.foo "
@@ -4933,11 +5171,18 @@ public void testGroupByLimitPushDownWithHavingOnLong() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"10.1", "", 1L},
             new Object[]{"2", "", 1L},
             new Object[]{"abc", "", 1L},
             new Object[]{"", "a", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{"10.1", null, 1L},
+            new Object[]{"abc", null, 1L},
+            new Object[]{"2", "", 1L},
+            new Object[]{"", "a", 1L}
         )
     );
   }
@@ -5242,7 +5487,7 @@ public void testGroupByFloor() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{0.0f, 3L},
+            new Object[]{NullHandling.defaultFloatValue(), 3L},
             new Object[]{1.0f, 1L},
             new Object[]{2.0f, 1L},
             new Object[]{10.0f, 1L}
@@ -5296,7 +5541,7 @@ public void testGroupByFloorWithOrderBy() throws Exception
             new Object[]{10.0f, 1L},
             new Object[]{2.0f, 1L},
             new Object[]{1.0f, 1L},
-            new Object[]{0.0f, 3L}
+            new Object[]{NullHandling.defaultFloatValue(), 3L}
         )
     );
   }
@@ -5317,7 +5562,7 @@ public void testGroupByFloorTimeAndOneOtherDimensionWithOrderBy() throws Excepti
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_floor(\"__time\",'P1Y','','UTC')",
+                                "timestamp_floor(\"__time\",'P1Y',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -5357,12 +5602,21 @@ public void testGroupByFloorTimeAndOneOtherDimensionWithOrderBy() throws Excepti
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{T("2000"), "", 2L},
             new Object[]{T("2000"), "a", 1L},
             new Object[]{T("2001"), "", 1L},
             new Object[]{T("2001"), "a", 1L},
             new Object[]{T("2001"), "abc", 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{T("2000"), null, 1L},
+            new Object[]{T("2000"), "", 1L},
+            new Object[]{T("2000"), "a", 1L},
+            new Object[]{T("2001"), null, 1L},
+            new Object[]{T("2001"), "a", 1L},
+            new Object[]{T("2001"), "abc", 1L}
         )
     );
   }
@@ -5395,6 +5649,7 @@ public void testGroupByStringLength() throws Exception
   @Test
   public void testFilterAndGroupByLookup() throws Exception
   {
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
     final RegisteredLookupExtractionFn extractionFn = new RegisteredLookupExtractionFn(
         null,
         "lookyloo",
@@ -5439,7 +5694,7 @@ public void testFilterAndGroupByLookup() throws Exception
                         .build()
         ),
         ImmutableList.of(
-            new Object[]{"", 5L},
+            new Object[]{nullValue, 5L},
             new Object[]{"xabc", 1L}
         )
     );
@@ -5477,7 +5732,7 @@ public void testCountDistinctOfLookup() throws Exception
                   .build()
         ),
         ImmutableList.of(
-            new Object[]{2L}
+            new Object[]{NullHandling.replaceWithDefault() ? 2L : 1L}
         )
     );
   }
@@ -5636,7 +5891,7 @@ public void testTimeseriesUsingTimeFloorWithTimeShift() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_floor(timestamp_shift(\"__time\",'P1D',-1),'P1M','','UTC')",
+                                "timestamp_floor(timestamp_shift(\"__time\",'P1D',-1),'P1M',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -5684,7 +5939,7 @@ public void testTimeseriesUsingTimeFloorWithTimestampAdd() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_floor((\"__time\" + -86400000),'P1M','','UTC')",
+                                "timestamp_floor((\"__time\" + -86400000),'P1M',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -5811,7 +6066,7 @@ public void testTimeseriesLosAngelesUsingTimeFloorConnectionLosAngeles() throws
   public void testTimeseriesDontSkipEmptyBuckets() throws Exception
   {
     // Tests that query context parameters are passed through to the underlying query engine.
-
+    Long defaultVal = NullHandling.replaceWithDefault() ? 0L : null;
     testQuery(
         PLANNER_CONFIG_DEFAULT,
         QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS,
@@ -5833,29 +6088,29 @@ public void testTimeseriesDontSkipEmptyBuckets() throws Exception
         ),
         ImmutableList.<Object[]>builder()
             .add(new Object[]{1L, T("2000-01-01")})
-            .add(new Object[]{0L, T("2000-01-01T01")})
-            .add(new Object[]{0L, T("2000-01-01T02")})
-            .add(new Object[]{0L, T("2000-01-01T03")})
-            .add(new Object[]{0L, T("2000-01-01T04")})
-            .add(new Object[]{0L, T("2000-01-01T05")})
-            .add(new Object[]{0L, T("2000-01-01T06")})
-            .add(new Object[]{0L, T("2000-01-01T07")})
-            .add(new Object[]{0L, T("2000-01-01T08")})
-            .add(new Object[]{0L, T("2000-01-01T09")})
-            .add(new Object[]{0L, T("2000-01-01T10")})
-            .add(new Object[]{0L, T("2000-01-01T11")})
-            .add(new Object[]{0L, T("2000-01-01T12")})
-            .add(new Object[]{0L, T("2000-01-01T13")})
-            .add(new Object[]{0L, T("2000-01-01T14")})
-            .add(new Object[]{0L, T("2000-01-01T15")})
-            .add(new Object[]{0L, T("2000-01-01T16")})
-            .add(new Object[]{0L, T("2000-01-01T17")})
-            .add(new Object[]{0L, T("2000-01-01T18")})
-            .add(new Object[]{0L, T("2000-01-01T19")})
-            .add(new Object[]{0L, T("2000-01-01T20")})
-            .add(new Object[]{0L, T("2000-01-01T21")})
-            .add(new Object[]{0L, T("2000-01-01T22")})
-            .add(new Object[]{0L, T("2000-01-01T23")})
+            .add(new Object[]{defaultVal, T("2000-01-01T01")})
+            .add(new Object[]{defaultVal, T("2000-01-01T02")})
+            .add(new Object[]{defaultVal, T("2000-01-01T03")})
+            .add(new Object[]{defaultVal, T("2000-01-01T04")})
+            .add(new Object[]{defaultVal, T("2000-01-01T05")})
+            .add(new Object[]{defaultVal, T("2000-01-01T06")})
+            .add(new Object[]{defaultVal, T("2000-01-01T07")})
+            .add(new Object[]{defaultVal, T("2000-01-01T08")})
+            .add(new Object[]{defaultVal, T("2000-01-01T09")})
+            .add(new Object[]{defaultVal, T("2000-01-01T10")})
+            .add(new Object[]{defaultVal, T("2000-01-01T11")})
+            .add(new Object[]{defaultVal, T("2000-01-01T12")})
+            .add(new Object[]{defaultVal, T("2000-01-01T13")})
+            .add(new Object[]{defaultVal, T("2000-01-01T14")})
+            .add(new Object[]{defaultVal, T("2000-01-01T15")})
+            .add(new Object[]{defaultVal, T("2000-01-01T16")})
+            .add(new Object[]{defaultVal, T("2000-01-01T17")})
+            .add(new Object[]{defaultVal, T("2000-01-01T18")})
+            .add(new Object[]{defaultVal, T("2000-01-01T19")})
+            .add(new Object[]{defaultVal, T("2000-01-01T20")})
+            .add(new Object[]{defaultVal, T("2000-01-01T21")})
+            .add(new Object[]{defaultVal, T("2000-01-01T22")})
+            .add(new Object[]{defaultVal, T("2000-01-01T23")})
             .build()
     );
   }
@@ -6051,7 +6306,7 @@ public void testGroupByExtractFloorTime() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y','','UTC'),'YEAR','UTC')",
+                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'UTC'),'YEAR','UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -6086,7 +6341,7 @@ public void testGroupByExtractFloorTimeLosAngeles() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y','','America/Los_Angeles'),'YEAR','America/Los_Angeles')",
+                                "timestamp_extract(timestamp_floor(\"__time\",'P1Y',null,'America/Los_Angeles'),'YEAR','America/Los_Angeles')",
                                 ValueType.LONG
                             )
                         )
@@ -6125,7 +6380,7 @@ public void testTimeseriesWithLimitNoTopN() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d0:v",
-                                "timestamp_floor(\"__time\",'P1M','','UTC')",
+                                "timestamp_floor(\"__time\",'P1M',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -6169,7 +6424,7 @@ public void testTimeseriesWithLimit() throws Exception
                 .intervals(QSS(Filtration.eternity()))
                 .granularity(Granularities.ALL)
                 .virtualColumns(
-                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M','','UTC')", ValueType.LONG)
+                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M',null,'UTC')", ValueType.LONG)
                 )
                 .dimension(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))
                 .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
@@ -6202,7 +6457,7 @@ public void testTimeseriesWithOrderByAndLimit() throws Exception
                 .intervals(QSS(Filtration.eternity()))
                 .granularity(Granularities.ALL)
                 .virtualColumns(
-                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M','','UTC')", ValueType.LONG)
+                    EXPRESSION_VIRTUAL_COLUMN("d0:v", "timestamp_floor(\"__time\",'P1M',null,'UTC')", ValueType.LONG)
                 )
                 .dimension(new DefaultDimensionSpec("d0:v", "d0", ValueType.LONG))
                 .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
@@ -6233,7 +6488,7 @@ public void testGroupByTimeAndOtherDimension() throws Exception
                         .setVirtualColumns(
                             EXPRESSION_VIRTUAL_COLUMN(
                                 "d1:v",
-                                "timestamp_floor(\"__time\",'P1M','','UTC')",
+                                "timestamp_floor(\"__time\",'P1M',null,'UTC')",
                                 ValueType.LONG
                             )
                         )
@@ -6260,12 +6515,21 @@ public void testGroupByTimeAndOtherDimension() throws Exception
                         .setContext(QUERY_CONTEXT_DEFAULT)
                         .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", T("2000-01-01"), 2L},
             new Object[]{"", T("2001-01-01"), 1L},
             new Object[]{"a", T("2000-01-01"), 1L},
             new Object[]{"a", T("2001-01-01"), 1L},
             new Object[]{"abc", T("2001-01-01"), 1L}
+        ) :
+        ImmutableList.of(
+            new Object[]{null, T("2000-01-01"), 1L},
+            new Object[]{null, T("2001-01-01"), 1L},
+            new Object[]{"", T("2000-01-01"), 1L},
+            new Object[]{"a", T("2000-01-01"), 1L},
+            new Object[]{"a", T("2001-01-01"), 1L},
+            new Object[]{"abc", T("2001-01-01"), 1L}
         )
     );
   }
@@ -6395,7 +6659,13 @@ public void testUsingSubqueryAsFilterOnTwoColumns() throws Exception
             newScanQueryBuilder()
                 .dataSource(CalciteTests.DATASOURCE1)
                 .intervals(QSS(Filtration.eternity()))
-                .filters(AND(SELECTOR("dim1", "def", null), SELECTOR("dim2", "abc", null)))
+                .filters(OR(
+                    SELECTOR("dim1", "def", null),
+                    AND(
+                        SELECTOR("dim1", "def", null),
+                        SELECTOR("dim2", "abc", null)
+                    )
+                ))
                 .columns("__time", "cnt", "dim1", "dim2")
                 .resultFormat(ScanQuery.RESULT_FORMAT_COMPACTED_LIST)
                 .context(QUERY_CONTEXT_DEFAULT)
@@ -6410,8 +6680,9 @@ public void testUsingSubqueryAsFilterOnTwoColumns() throws Exception
   @Test
   public void testUsingSubqueryAsFilterWithInnerSort() throws Exception
   {
-    // Regression test for https://github.com/druid-io/druid/issues/4208
+    String nullValue = NullHandling.replaceWithDefault() ? "" : null;
 
+    // Regression test for https://github.com/druid-io/druid/issues/4208
     testQuery(
         "SELECT dim1, dim2 FROM druid.foo\n"
         + " WHERE dim2 IN (\n"
@@ -6448,13 +6719,20 @@ public void testUsingSubqueryAsFilterWithInnerSort() throws Exception
                 .context(QUERY_CONTEXT_DEFAULT)
                 .build()
         ),
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             new Object[]{"", "a"},
-            new Object[]{"10.1", ""},
+            new Object[]{"10.1", nullValue},
             new Object[]{"2", ""},
             new Object[]{"1", "a"},
             new Object[]{"def", "abc"},
-            new Object[]{"abc", ""}
+            new Object[]{"abc", nullValue}
+        ) :
+        ImmutableList.of(
+            new Object[]{"", "a"},
+            new Object[]{"2", ""},
+            new Object[]{"1", "a"},
+            new Object[]{"def", "abc"}
         )
     );
   }
@@ -6608,9 +6886,9 @@ public void testProjectAfterSort2() throws Exception
         ImmutableList.of(
             new Object[]{1.0, "", "a", 1.0},
             new Object[]{4.0, "1", "a", 4.0},
-            new Object[]{2.0, "10.1", "", 2.0},
+            new Object[]{2.0, "10.1", NullHandling.defaultStringValue(), 2.0},
             new Object[]{3.0, "2", "", 3.0},
-            new Object[]{6.0, "abc", "", 6.0},
+            new Object[]{6.0, "abc", NullHandling.defaultStringValue(), 6.0},
             new Object[]{5.0, "def", "abc", 5.0}
         )
     );
diff --git a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java
index b565484055c..e2e7ce94807 100644
--- a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java
@@ -21,6 +21,7 @@
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import io.druid.common.config.NullHandling;
 import io.druid.java.util.common.DateTimes;
 import io.druid.math.expr.ExprEval;
 import io.druid.math.expr.Parser;
@@ -194,8 +195,8 @@ public void testStrpos()
             rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.VARCHAR)),
             rexBuilder.makeLiteral("ax")
         ),
-        DruidExpression.fromExpression("(strpos('','ax') + 1)"),
-        0L
+        DruidExpression.fromExpression("(strpos(null,'ax') + 1)"),
+        NullHandling.replaceWithDefault() ? 0L : null
     );
   }
 
@@ -328,7 +329,7 @@ public void testDateTrunc()
             rexBuilder.makeLiteral("hour"),
             timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z"))
         ),
-        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H',null,'UTC')"),
         DateTimes.of("2000-02-03T04:00:00").getMillis()
     );
 
@@ -338,7 +339,7 @@ public void testDateTrunc()
             rexBuilder.makeLiteral("DAY"),
             timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z"))
         ),
-        DruidExpression.fromExpression("timestamp_floor(949550706000,'P1D','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(949550706000,'P1D',null,'UTC')"),
         DateTimes.of("2000-02-03T00:00:00").getMillis()
     );
   }
@@ -389,7 +390,7 @@ public void testTimeFloor()
             timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z")),
             rexBuilder.makeLiteral("PT1H")
         ),
-        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H',null,'UTC')"),
         DateTimes.of("2000-02-03T04:00:00").getMillis()
     );
 
@@ -401,7 +402,7 @@ public void testTimeFloor()
             rexBuilder.makeNullLiteral(typeFactory.createSqlType(SqlTypeName.TIMESTAMP)),
             rexBuilder.makeLiteral("America/Los_Angeles")
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','America/Los_Angeles')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D',null,'America/Los_Angeles')"),
         DateTimes.of("2000-02-02T08:00:00").getMillis()
     );
   }
@@ -417,7 +418,7 @@ public void testOtherTimeFloor()
             inputRef("t"),
             rexBuilder.makeFlag(TimeUnitRange.YEAR)
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1Y','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1Y',null,'UTC')"),
         DateTimes.of("2000").getMillis()
     );
   }
@@ -433,7 +434,7 @@ public void testOtherTimeCeil()
             inputRef("t"),
             rexBuilder.makeFlag(TimeUnitRange.YEAR)
         ),
-        DruidExpression.fromExpression("timestamp_ceil(\"t\",'P1Y','','UTC')"),
+        DruidExpression.fromExpression("timestamp_ceil(\"t\",'P1Y',null,'UTC')"),
         DateTimes.of("2001").getMillis()
     );
   }
@@ -668,7 +669,7 @@ public void testCastAsTimestamp()
         ),
         DruidExpression.of(
             null,
-            "timestamp_parse(\"tstr\",'','UTC')"
+            "timestamp_parse(\"tstr\",null,'UTC')"
         ),
         DateTimes.of("2000-02-03T04:05:06Z").getMillis()
     );
@@ -715,7 +716,7 @@ public void testCastAsDate()
             typeFactory.createSqlType(SqlTypeName.DATE),
             inputRef("t")
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D',null,'UTC')"),
         DateTimes.of("2000-02-03").getMillis()
     );
 
@@ -725,7 +726,7 @@ public void testCastAsDate()
             inputRef("dstr")
         ),
         DruidExpression.fromExpression(
-            "timestamp_floor(timestamp_parse(\"dstr\",'','UTC'),'P1D','','UTC')"
+            "timestamp_floor(timestamp_parse(\"dstr\",null,'UTC'),'P1D',null,'UTC')"
         ),
         DateTimes.of("2000-02-03").getMillis()
     );
@@ -743,7 +744,7 @@ public void testCastFromDate()
             )
         ),
         DruidExpression.fromExpression(
-            "timestamp_format(timestamp_floor(\"t\",'P1D','','UTC'),'yyyy-MM-dd','UTC')"
+            "timestamp_format(timestamp_floor(\"t\",'P1D',null,'UTC'),'yyyy-MM-dd','UTC')"
         ),
         "2000-02-03"
     );
@@ -756,7 +757,7 @@ public void testCastFromDate()
                 inputRef("t")
             )
         ),
-        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D','','UTC')"),
+        DruidExpression.fromExpression("timestamp_floor(\"t\",'P1D',null,'UTC')"),
         DateTimes.of("2000-02-03").getMillis()
     );
   }
diff --git a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java
index c80a4d0f540..3258888d340 100644
--- a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java
@@ -23,6 +23,8 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+import io.druid.common.config.NullHandling;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.java.util.common.ISE;
 import io.druid.java.util.common.Pair;
@@ -215,10 +217,18 @@ public void testFieldAliasingGroupBy() throws Exception
     ).rhs;
 
     Assert.assertEquals(
+        NullHandling.replaceWithDefault() ?
         ImmutableList.of(
             ImmutableMap.of("x", "", "y", ""),
             ImmutableMap.of("x", "a", "y", "a"),
             ImmutableMap.of("x", "abc", "y", "abc")
+        ) :
+        ImmutableList.of(
+            // x and y both should be null instead of empty string
+            Maps.transformValues(ImmutableMap.of("x", "", "y", ""), (val) -> null),
+            ImmutableMap.of("x", "", "y", ""),
+            ImmutableMap.of("x", "a", "y", "a"),
+            ImmutableMap.of("x", "abc", "y", "abc")
         ),
         rows
     );
diff --git a/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java b/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java
index 937ba7f8109..9dc6fe72aee 100644
--- a/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java
+++ b/sql/src/test/java/io/druid/sql/calcite/planner/CalcitesTest.java
@@ -29,7 +29,6 @@
   @Test
   public void testEscapeStringLiteral()
   {
-    Assert.assertEquals("''", Calcites.escapeStringLiteral(null));
     Assert.assertEquals("''", Calcites.escapeStringLiteral(""));
     Assert.assertEquals("'foo'", Calcites.escapeStringLiteral("foo"));
     Assert.assertEquals("'foo bar'", Calcites.escapeStringLiteral("foo bar"));
diff --git a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java
index c22ab27c076..d119ba02a24 100644
--- a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java
+++ b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java
@@ -32,8 +32,6 @@
 import com.google.inject.Injector;
 import com.google.inject.Key;
 import com.google.inject.Module;
-import io.druid.java.util.emitter.core.NoopEmitter;
-import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.collections.StupidPool;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionsSpec;
@@ -43,8 +41,9 @@
 import io.druid.data.input.impl.TimestampSpec;
 import io.druid.guice.ExpressionModule;
 import io.druid.guice.annotations.Json;
+import io.druid.java.util.emitter.core.NoopEmitter;
+import io.druid.java.util.emitter.service.ServiceEmitter;
 import io.druid.math.expr.ExprMacroTable;
-import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
 import io.druid.query.DefaultGenericQueryMetricsFactory;
 import io.druid.query.DefaultQueryRunnerFactoryConglomerate;
 import io.druid.query.DruidProcessingConfig;
@@ -92,12 +91,12 @@
 import io.druid.segment.QueryableIndex;
 import io.druid.segment.TestHelper;
 import io.druid.segment.incremental.IncrementalIndexSchema;
+import io.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
 import io.druid.server.QueryLifecycleFactory;
 import io.druid.server.log.NoopRequestLogger;
 import io.druid.server.security.Access;
 import io.druid.server.security.Action;
 import io.druid.server.security.AllowAllAuthenticator;
-import io.druid.server.security.NoopEscalator;
 import io.druid.server.security.AuthConfig;
 import io.druid.server.security.AuthenticationResult;
 import io.druid.server.security.Authenticator;
@@ -105,6 +104,7 @@
 import io.druid.server.security.Authorizer;
 import io.druid.server.security.AuthorizerMapper;
 import io.druid.server.security.Escalator;
+import io.druid.server.security.NoopEscalator;
 import io.druid.server.security.Resource;
 import io.druid.server.security.ResourceType;
 import io.druid.sql.calcite.expression.SqlOperatorConversion;
@@ -137,7 +137,8 @@
   public static final String FORBIDDEN_DATASOURCE = "forbiddenDatasource";
 
   public static final String TEST_SUPERUSER_NAME = "testSuperuser";
-  public static final AuthorizerMapper TEST_AUTHORIZER_MAPPER = new AuthorizerMapper(null) {
+  public static final AuthorizerMapper TEST_AUTHORIZER_MAPPER = new AuthorizerMapper(null)
+  {
     @Override
     public Authorizer getAuthorizer(String name)
     {
@@ -162,11 +163,13 @@ public Access authorize(
     }
   };
   public static final AuthenticatorMapper TEST_AUTHENTICATOR_MAPPER;
+
   static {
     final Map<String, Authenticator> defaultMap = Maps.newHashMap();
     defaultMap.put(
         AuthConfig.ALLOW_ALL_NAME,
-        new AllowAllAuthenticator() {
+        new AllowAllAuthenticator()
+        {
           @Override
           public AuthenticationResult authenticateJDBCContext(Map<String, Object> context)
           {
@@ -176,9 +179,12 @@ public AuthenticationResult authenticateJDBCContext(Map<String, Object> context)
     );
     TEST_AUTHENTICATOR_MAPPER = new AuthenticatorMapper(defaultMap);
   }
+
   public static final Escalator TEST_AUTHENTICATOR_ESCALATOR;
+
   static {
-    TEST_AUTHENTICATOR_ESCALATOR = new NoopEscalator() {
+    TEST_AUTHENTICATOR_ESCALATOR = new NoopEscalator()
+    {
 
       @Override
       public AuthenticationResult createEscalatedAuthenticationResult()
@@ -215,15 +221,14 @@ public void configure(final Binder binder)
 
           // This Module is just to get a LookupReferencesManager with a usable "lookyloo" lookup.
 
-          binder.bind(LookupReferencesManager.class)
-                .toInstance(
-                    LookupEnabledTestExprMacroTable.createTestLookupReferencesManager(
-                        ImmutableMap.of(
-                            "a", "xa",
-                            "abc", "xabc"
-                        )
-                    )
-            );
+          binder.bind(LookupReferencesManager.class).toInstance(
+              LookupEnabledTestExprMacroTable.createTestLookupReferencesManager(
+                  ImmutableMap.of(
+                      "a", "xa",
+                      "abc", "xabc"
+                  )
+              )
+          );
 
         }
       }


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscribe@druid.apache.org
For additional commands, e-mail: dev-help@druid.apache.org