You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ar...@apache.org on 2019/04/08 10:40:53 UTC

[drill] branch master updated (ac11a6b -> 771fd27)

This is an automated email from the ASF dual-hosted git repository.

arina pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from ac11a6b  DRILL-7048: Implement JDBC Statement.setMaxRows() with System Option
     new cd44d1e  DRILL-7143: Support default value for empty columns
     new 9ffd6d9  DRILL-7157: Wrap SchemaParsingException into UserException when creating schema
     new 9844b61  DRILL-7049 return VARBINARY as a string with escaped non printable bytes
     new 9cbfaad  DRILL-7049: REST API returns the toString of byte arrays (VARBINARY types)
     new a1986a3  DRILL-7045 UDF string_binary java.lang.IndexOutOfBoundsException
     new 771fd27  DRILL-7045: Updates to address review comments

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../apache/drill/common/types}/BooleanType.java    |  49 +-
 .../java/org/apache/drill/common/types/Types.java  |  55 +-
 .../exec/expr/fn/impl/SimpleCastFunctions.java     |   4 +-
 .../drill/exec/expr/fn/impl/StringFunctions.java   |   6 +-
 .../impl/scan/columns/ColumnsScanFramework.java    |   5 -
 .../impl/scan/project/NullColumnBuilder.java       |   2 +-
 .../impl/scan/project/NullColumnLoader.java        |  58 +-
 .../impl/scan/project/ResolvedNullColumn.java      |   8 +-
 .../impl/scan/project/ScanSchemaOrchestrator.java  |  21 +-
 .../exec/physical/rowSet/impl/ColumnBuilder.java   |   3 +-
 .../physical/rowSet/impl/ResultSetLoaderImpl.java  |  23 +-
 .../rowSet/impl/SchemaTransformerImpl.java         |  90 ++-
 .../exec/physical/rowSet/impl/TupleState.java      |   4 +-
 .../exec/planner/sql/handlers/SchemaHandler.java   |  12 +-
 .../record/metadata/AbstractColumnMetadata.java    |  11 +-
 .../drill/exec/record/metadata/MapBuilder.java     |  12 +
 .../drill/exec/record/metadata/MetadataUtils.java  |  43 +-
 .../record/metadata/PrimitiveColumnMetadata.java   |   9 +-
 .../drill/exec/record/metadata/SchemaBuilder.java  |  10 +-
 .../exec/store/dfs/easy/EasyFormatPlugin.java      |   4 +
 .../exec/store/easy/text/TextFormatPlugin.java     |   7 +
 .../exec/util/ValueVectorElementFormatter.java     |   5 +
 .../apache/drill/exec/work/foreman/Foreman.java    |   9 +-
 .../exec/fn/impl/TestByteComparisonFunctions.java  |   1 -
 .../impl/scan/project/TestNullColumnLoader.java    |  29 +-
 .../impl/TestResultSetLoaderOmittedValues.java     |  43 ++
 .../impl/TestResultSetLoaderTypeConversion.java    |  61 +-
 .../exec/record/metadata/TestTupleSchema.java      |   3 +-
 .../store/easy/text/compliant/BaseCsvTest.java     |   8 +
 .../easy/text/compliant/TestCsvWithSchema.java     | 819 ++++++++++++++++++---
 .../exec/util/TestValueVectorElementFormatter.java |  11 +
 .../exec/vector/complex/writer/TestJsonNanInf.java |  15 +-
 .../java/org/apache/drill/test/ClusterTest.java    |   2 +-
 .../apache/drill/test/rowSet/RowSetBuilder.java    |   1 +
 .../apache/drill/test/rowSet/RowSetUtilities.java  |  20 +
 .../test/rowSet/test/TestColumnConverter.java      | 188 ++++-
 .../{DummyWriterTest.java => TestDummyWriter.java} |   2 +-
 .../drill/test/rowSet/test/TestFillEmpties.java    | 233 +++++-
 .../test/rowSet/test/TestScalarAccessors.java      | 111 ++-
 .../drill/test/rowSet/test/TestSchemaBuilder.java  | 108 ++-
 .../org/apache/drill/exec/rpc/RequestIdMap.java    |  25 +-
 .../main/codegen/templates/ColumnAccessors.java    | 256 ++++---
 .../drill/exec/record/metadata/ColumnMetadata.java |  17 +-
 .../drill/exec/vector/accessor/ScalarReader.java   |   7 +
 .../drill/exec/vector/accessor/ScalarWriter.java   |  20 +
 .../drill/exec/vector/accessor/ValueType.java      |   6 +
 .../convert/AbstractConvertFromString.java         | 210 ++++++
 .../accessor/convert/AbstractWriteConverter.java   |  11 +
 .../accessor/convert/ConvertBooleanToString.java   |   6 +-
 .../accessor/convert/ConvertStringToBoolean.java   |  26 +-
 .../accessor/convert/ConvertStringToDate.java      |  25 +-
 .../accessor/convert/ConvertStringToDecimal.java   |  24 +-
 .../accessor/convert/ConvertStringToDouble.java    |  25 +-
 .../accessor/convert/ConvertStringToInt.java       |  25 +-
 .../accessor/convert/ConvertStringToInterval.java  |  24 +-
 .../accessor/convert/ConvertStringToLong.java      |  25 +-
 .../accessor/convert/ConvertStringToTime.java      |  25 +-
 .../accessor/convert/ConvertStringToTimeStamp.java |  25 +-
 .../accessor/convert/StandardConversions.java      |  35 +-
 .../exec/vector/accessor/impl/VectorPrinter.java   |   7 +-
 .../accessor/reader/AbstractScalarReader.java      |  24 +
 .../vector/accessor/reader/BitColumnReader.java}   |  35 +-
 .../accessor/writer/AbstractArrayWriter.java       |   9 +-
 .../accessor/writer/AbstractFixedWidthWriter.java  |  75 +-
 .../accessor/writer/AbstractObjectWriter.java      |   5 +-
 .../accessor/writer/AbstractScalarWriter.java      |   6 +-
 .../accessor/writer/AbstractTupleWriter.java       |  16 +-
 .../vector/accessor/writer/BaseScalarWriter.java   |  25 +
 .../vector/accessor/writer/BaseVarWidthWriter.java |  47 +-
 .../vector/accessor/writer/BitColumnWriter.java    | 121 +++
 .../accessor/writer/NullableScalarWriter.java      |  13 +
 .../accessor/writer/OffsetVectorWriterImpl.java    |  84 ++-
 .../vector/accessor/writer/ScalarArrayWriter.java  |   4 +-
 .../accessor/writer/dummy/DummyScalarWriter.java   |   6 +
 pom.xml                                            |   2 +-
 75 files changed, 2775 insertions(+), 626 deletions(-)
 rename {exec/java-exec/src/main/java/org/apache/drill/exec/expr => common/src/main/java/org/apache/drill/common/types}/BooleanType.java (65%)
 rename exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/{DummyWriterTest.java => TestDummyWriter.java} (99%)
 copy exec/{java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java => vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/BitColumnReader.java} (51%)
 create mode 100644 exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BitColumnWriter.java


[drill] 05/06: DRILL-7045 UDF string_binary java.lang.IndexOutOfBoundsException

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit a1986a3fec1634812712e47be0be2565b303ea2d
Author: Jean-Claude <jc...@gmail.com>
AuthorDate: Wed Feb 20 23:52:16 2019 -0500

    DRILL-7045 UDF string_binary java.lang.IndexOutOfBoundsException
    
    UDF string_binary was not reallocating the drillbuffer so it would fill
    up and throw and out of bounds exception
---
 .../apache/drill/exec/expr/fn/impl/StringFunctions.java  |  6 +++---
 .../drill/exec/vector/complex/writer/TestJsonNanInf.java | 16 ++++++++++++++++
 2 files changed, 19 insertions(+), 3 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
index 6353e55..70db585 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringFunctions.java
@@ -1662,12 +1662,12 @@ public class StringFunctions{
     @Override
     public void eval() {
       byte[] buf = org.apache.drill.common.util.DrillStringUtils.toBinaryString(in.buffer, in.start, in.end).getBytes(charset);
-      buffer.setBytes(0, buf);
-      buffer.setIndex(0, buf.length);
+      out.buffer = buffer.reallocIfNeeded(buf.length);
+      out.buffer.setBytes(0, buf);
+      out.buffer.setIndex(0, buf.length);
 
       out.start = 0;
       out.end = buf.length;
-      out.buffer = buffer;
     }
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
index 138d9b2..851ce0e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
@@ -237,6 +237,22 @@ public class TestJsonNanInf extends BaseTestQuery {
 
 
   @Test
+  public void testLargeStringBinary() throws Exception {
+    String chunk = "0123456789";
+    StringBuilder builder = new StringBuilder();
+    for (int i = 0; i < 1000; i++) {
+      builder.append(chunk);
+    }
+    String data = builder.toString();
+    String query = String.format("select string_binary(binary_string('%s')) from (values(1))", data);
+    List<QueryDataBatch> results = testSqlWithResults(query);
+    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
+    QueryDataBatch batch = results.get(0);
+    batch.release();
+    batchLoader.clear();
+ }
+
+  @Test
   public void testConvertToJsonFunction() throws Exception {
     String table = "nan_test.csv";
     File file = new File(dirTestWatcher.getRootDir(), table);


[drill] 01/06: DRILL-7143: Support default value for empty columns

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit cd44d1eb25127f775f7cbfa95eca8ec078caf6fa
Author: Paul Rogers <pr...@cloudera.com>
AuthorDate: Mon Apr 1 00:10:14 2019 -0700

    DRILL-7143: Support default value for empty columns
    
    Modifies the prior work to add default values for columns. The prior work added defaults
    when the entire column is missing from a reader (the old Nullable Int column). The Row
    Set mechanism now will also "fill empty" slots with the default value.
    
    Added default support for the column writers. The writers automatically obtain the
    default value from the column schema. The default can also be set explicitly on
    the column writer.
    
    Updated the null column mechanism to use this feature rather than the ad-hoc
    implemention in the prior commit.
    
    Semantics changed a bit. Only Required columns take a default. The default value
    is ignored or nullable columns since nullable columns already have a file default: NULL.
    
    Other changes:
    
    * Updated the CSV-with-schema tests to illustrate the new behavior.
    * Made multiple fixes for Boolean and Decimal columns and added unit tests.
    * Upgraded Fremarker to version 2.3.28 to allow use of the continue statement.
    * Reimplemented the Bit column reader and writer to use the BitVector directly since this vector is rather special.
    * Added get/set Boolean methods for column accessors
    * Moved the BooleanType class to the common package
    * Added more CSV unit tests to explore decimal types, booleans, and defaults
    * Add special handling for blank fields in from-string conversions
    * Added options to the conversion factory to specify blank-handling behavior.
      CSV uses a mapping of blanks to null (nullable) or default value (non-nullable)
    
    closes #1726
---
 .../apache/drill/common/types}/BooleanType.java    |  49 +-
 .../java/org/apache/drill/common/types/Types.java  |  55 +-
 .../exec/expr/fn/impl/SimpleCastFunctions.java     |   4 +-
 .../impl/scan/columns/ColumnsScanFramework.java    |   5 -
 .../impl/scan/project/NullColumnBuilder.java       |   2 +-
 .../impl/scan/project/NullColumnLoader.java        |  58 +-
 .../impl/scan/project/ResolvedNullColumn.java      |   8 +-
 .../impl/scan/project/ScanSchemaOrchestrator.java  |  21 +-
 .../exec/physical/rowSet/impl/ColumnBuilder.java   |   3 +-
 .../physical/rowSet/impl/ResultSetLoaderImpl.java  |  23 +-
 .../rowSet/impl/SchemaTransformerImpl.java         |  90 ++-
 .../exec/physical/rowSet/impl/TupleState.java      |   4 +-
 .../exec/planner/sql/handlers/SchemaHandler.java   |   5 +
 .../record/metadata/AbstractColumnMetadata.java    |  11 +-
 .../drill/exec/record/metadata/MapBuilder.java     |  12 +
 .../drill/exec/record/metadata/MetadataUtils.java  |  43 +-
 .../record/metadata/PrimitiveColumnMetadata.java   |   9 +-
 .../drill/exec/record/metadata/SchemaBuilder.java  |  10 +-
 .../exec/store/dfs/easy/EasyFormatPlugin.java      |   4 +
 .../exec/store/easy/text/TextFormatPlugin.java     |   7 +
 .../apache/drill/exec/work/foreman/Foreman.java    |   9 +-
 .../exec/fn/impl/TestByteComparisonFunctions.java  |   1 -
 .../impl/scan/project/TestNullColumnLoader.java    |  29 +-
 .../impl/TestResultSetLoaderOmittedValues.java     |  43 ++
 .../impl/TestResultSetLoaderTypeConversion.java    |  61 +-
 .../exec/record/metadata/TestTupleSchema.java      |   3 +-
 .../store/easy/text/compliant/BaseCsvTest.java     |   8 +
 .../easy/text/compliant/TestCsvWithSchema.java     | 819 ++++++++++++++++++---
 .../java/org/apache/drill/test/ClusterTest.java    |   2 +-
 .../apache/drill/test/rowSet/RowSetBuilder.java    |   1 +
 .../apache/drill/test/rowSet/RowSetUtilities.java  |  20 +
 .../test/rowSet/test/TestColumnConverter.java      | 188 ++++-
 .../{DummyWriterTest.java => TestDummyWriter.java} |   2 +-
 .../drill/test/rowSet/test/TestFillEmpties.java    | 233 +++++-
 .../test/rowSet/test/TestScalarAccessors.java      | 111 ++-
 .../drill/test/rowSet/test/TestSchemaBuilder.java  | 108 ++-
 .../org/apache/drill/exec/rpc/RequestIdMap.java    |  25 +-
 .../main/codegen/templates/ColumnAccessors.java    | 256 ++++---
 .../drill/exec/record/metadata/ColumnMetadata.java |  17 +-
 .../drill/exec/vector/accessor/ScalarReader.java   |   7 +
 .../drill/exec/vector/accessor/ScalarWriter.java   |  20 +
 .../drill/exec/vector/accessor/ValueType.java      |   6 +
 .../convert/AbstractConvertFromString.java         | 210 ++++++
 .../accessor/convert/AbstractWriteConverter.java   |  11 +
 .../accessor/convert/ConvertBooleanToString.java   |   6 +-
 .../accessor/convert/ConvertStringToBoolean.java   |  26 +-
 .../accessor/convert/ConvertStringToDate.java      |  25 +-
 .../accessor/convert/ConvertStringToDecimal.java   |  24 +-
 .../accessor/convert/ConvertStringToDouble.java    |  25 +-
 .../accessor/convert/ConvertStringToInt.java       |  25 +-
 .../accessor/convert/ConvertStringToInterval.java  |  24 +-
 .../accessor/convert/ConvertStringToLong.java      |  25 +-
 .../accessor/convert/ConvertStringToTime.java      |  25 +-
 .../accessor/convert/ConvertStringToTimeStamp.java |  25 +-
 .../accessor/convert/StandardConversions.java      |  35 +-
 .../exec/vector/accessor/impl/VectorPrinter.java   |   7 +-
 .../accessor/reader/AbstractScalarReader.java      |  24 +
 .../vector/accessor/reader/BitColumnReader.java    |  49 ++
 .../accessor/writer/AbstractArrayWriter.java       |   9 +-
 .../accessor/writer/AbstractFixedWidthWriter.java  |  75 +-
 .../accessor/writer/AbstractObjectWriter.java      |   5 +-
 .../accessor/writer/AbstractScalarWriter.java      |   6 +-
 .../accessor/writer/AbstractTupleWriter.java       |  16 +-
 .../vector/accessor/writer/BaseScalarWriter.java   |  25 +
 .../vector/accessor/writer/BaseVarWidthWriter.java |  47 +-
 .../vector/accessor/writer/BitColumnWriter.java    | 121 +++
 .../accessor/writer/NullableScalarWriter.java      |  13 +
 .../accessor/writer/OffsetVectorWriterImpl.java    |  84 ++-
 .../vector/accessor/writer/ScalarArrayWriter.java  |   4 +-
 .../accessor/writer/dummy/DummyScalarWriter.java   |   6 +
 pom.xml                                            |   2 +-
 71 files changed, 2765 insertions(+), 606 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/BooleanType.java b/common/src/main/java/org/apache/drill/common/types/BooleanType.java
similarity index 65%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/expr/BooleanType.java
rename to common/src/main/java/org/apache/drill/common/types/BooleanType.java
index 849ca78..f273472 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/BooleanType.java
+++ b/common/src/main/java/org/apache/drill/common/types/BooleanType.java
@@ -15,14 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr;
+package org.apache.drill.common.types;
 
-import org.apache.drill.common.map.CaseInsensitiveMap;
-
-import java.util.Arrays;
-import java.util.List;
 import java.util.Map;
 
+import org.apache.drill.common.map.CaseInsensitiveMap;
+
 /**
  * Enum that contains two boolean types: TRUE and FALSE.
  * Each has numeric representation and list of allowed literals.
@@ -30,30 +28,26 @@ import java.util.Map;
  * {@link <a href="https://www.postgresql.org/docs/9.6/static/datatype-boolean.html">Postgre Documentation</a>}
  */
 public enum BooleanType {
-  TRUE(1, Arrays.asList("t", "true", "y", "yes", "on", "1")),
-  FALSE(0, Arrays.asList("f", "false", "n", "no", "off", "0"));
+  TRUE(1, new String [] {"true", "1", "t", "y", "yes", "on",}),
+  FALSE(0, new String [] {"false", "0", "f", "n", "no", "off"});
 
   private final int numericValue;
-  private final List<String> literals;
+  private final String[] literals;
 
-  BooleanType(int numericValue, List<String> literals) {
+  BooleanType(int numericValue, String[] literals) {
     this.numericValue = numericValue;
     this.literals = literals;
   }
 
-  public int getNumericValue() {
-    return numericValue;
-  }
+  public int getNumericValue() { return numericValue; }
 
-  public List<String> getLiterals() {
-    return literals;
-  }
+  public String[] getLiterals() { return literals; }
 
   /** Contains all literals that are allowed to represent boolean type. */
   private static final Map<String, BooleanType> allLiterals = CaseInsensitiveMap.newHashMap();
   static {
-    for (BooleanType booleanType : BooleanType.values()) {
-      for (String literal : booleanType.getLiterals()) {
+    for (final BooleanType booleanType : BooleanType.values()) {
+      for (final String literal : booleanType.getLiterals()) {
         allLiterals.put(literal, booleanType);
       }
     }
@@ -76,4 +70,23 @@ public enum BooleanType {
     return booleanType;
   }
 
-}
\ No newline at end of file
+  /**
+   * Runtime form of Boolean conversion: allows any of the valid "true" values;
+   * assumes all other values are false. Does case-insensitive comparisons.
+   * If the string must be trimmed, the caller should do it.
+   *
+   * @param value non-null string value
+   * @return true (if one of the TRUE literals), else false
+   */
+
+  public static boolean fromString(final String value) {
+    // Optimized for runtime speed
+    final String lower = value.toLowerCase();
+    for (int i = 0; i < TRUE.literals.length; i++) {
+      if (TRUE.literals[i].equals(lower)) {
+        return true;
+      }
+    }
+    return false;
+  }
+}
diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java
index 177ba50..f5567e5 100644
--- a/common/src/main/java/org/apache/drill/common/types/Types.java
+++ b/common/src/main/java/org/apache/drill/common/types/Types.java
@@ -30,7 +30,6 @@ import org.apache.drill.common.types.TypeProtos.MinorType;
 
 import com.google.protobuf.TextFormat;
 
-@SuppressWarnings("WeakerAccess")
 public class Types {
 
   public static final int MAX_VARCHAR_LENGTH = 65535;
@@ -64,8 +63,11 @@ public class Types {
     if (type.getMode() == REPEATED) {
       return false;
     }
+    return isNumericType(type.getMinorType());
+  }
 
-    switch(type.getMinorType()) {
+  public static boolean isNumericType(final MinorType type) {
+    switch (type) {
     case BIGINT:
     case VARDECIMAL:
     case DECIMAL38SPARSE:
@@ -463,23 +465,29 @@ public class Types {
     default:
       return true;
     }
-
   }
 
   public static boolean isFixedWidthType(final MajorType type) {
-    switch(type.getMinorType()) {
+    return isFixedWidthType(type.getMinorType());
+  }
+
+  public static boolean isFixedWidthType(final MinorType type) {
+    return ! isVarWidthType(type);
+  }
+
+  public static boolean isVarWidthType(final MinorType type) {
+    switch (type) {
     case VARBINARY:
     case VAR16CHAR:
     case VARCHAR:
     case UNION:
     case VARDECIMAL:
-      return false;
-    default:
       return true;
+    default:
+      return false;
     }
   }
 
-
   /**
    * Checks if given major type is string scalar type.
    *
@@ -490,7 +498,7 @@ public class Types {
     if (type.getMode() == REPEATED) {
       return false;
     }
-    switch(type.getMinorType()) {
+    switch (type.getMinorType()) {
     case FIXEDCHAR:
     case FIXED16CHAR:
     case VARCHAR:
@@ -736,8 +744,8 @@ public class Types {
    */
   public static MajorType.Builder calculateTypePrecisionAndScale(MajorType leftType, MajorType rightType, MajorType.Builder typeBuilder) {
     if (leftType.getMinorType().equals(rightType.getMinorType())) {
-      boolean isScalarString = Types.isScalarStringType(leftType) && Types.isScalarStringType(rightType);
-      boolean isDecimal = isDecimalType(leftType);
+      final boolean isScalarString = Types.isScalarStringType(leftType) && Types.isScalarStringType(rightType);
+      final boolean isDecimal = isDecimalType(leftType);
 
       if ((isScalarString || isDecimal) && leftType.hasPrecision() && rightType.hasPrecision()) {
         typeBuilder.setPrecision(Math.max(leftType.getPrecision(), rightType.getPrecision()));
@@ -769,8 +777,8 @@ public class Types {
       return true;
     }
 
-    List<MinorType> subtypes1 = type1.getSubTypeList();
-    List<MinorType> subtypes2 = type2.getSubTypeList();
+    final List<MinorType> subtypes1 = type1.getSubTypeList();
+    final List<MinorType> subtypes2 = type2.getSubTypeList();
     if (subtypes1 == subtypes2) { // Only occurs if both are null
       return true;
     }
@@ -783,8 +791,8 @@ public class Types {
 
     // Now it gets slow because subtype lists are not ordered.
 
-    List<MinorType> copy1 = new ArrayList<>(subtypes1);
-    List<MinorType> copy2 = new ArrayList<>(subtypes2);
+    final List<MinorType> copy1 = new ArrayList<>(subtypes1);
+    final List<MinorType> copy2 = new ArrayList<>(subtypes2);
     Collections.sort(copy1);
     Collections.sort(copy2);
     return copy1.equals(copy2);
@@ -803,4 +811,23 @@ public class Types {
   public static String typeKey(MinorType type) {
     return type.name().toLowerCase();
   }
+
+  public static int maxPrecision(MinorType type) {
+    switch (type) {
+    case DECIMAL18:
+      return 18;
+    case DECIMAL28DENSE:
+    case DECIMAL28SPARSE:
+      return 28;
+    case DECIMAL38DENSE:
+    case DECIMAL38SPARSE:
+      return 38;
+    case DECIMAL9:
+      return 9;
+    case VARDECIMAL:
+      return 38;
+    default:
+      return 0;
+    }
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SimpleCastFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SimpleCastFunctions.java
index 196445e..a820edb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SimpleCastFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/SimpleCastFunctions.java
@@ -47,7 +47,7 @@ public class SimpleCastFunctions {
       byte[] buf = new byte[in.end - in.start];
       in.buffer.getBytes(in.start, buf, 0, in.end - in.start);
       String input = new String(buf, com.google.common.base.Charsets.UTF_8);
-      out.value = org.apache.drill.exec.expr.BooleanType.get(input).getNumericValue();
+      out.value = org.apache.drill.common.types.BooleanType.get(input).getNumericValue();
     }
   }
 
@@ -66,7 +66,7 @@ public class SimpleCastFunctions {
     public void setup() {}
 
     public void eval() {
-      byte[] outB = org.apache.drill.exec.expr.BooleanType.get(String.valueOf(in.value)).name().toLowerCase().getBytes();
+      byte[] outB = org.apache.drill.common.types.BooleanType.get(String.valueOf(in.value)).name().toLowerCase().getBytes();
       buffer.setBytes(0, outB);
       out.buffer = buffer;
       out.start = 0;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/columns/ColumnsScanFramework.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/columns/ColumnsScanFramework.java
index 7efe221..4dc9467 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/columns/ColumnsScanFramework.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/columns/ColumnsScanFramework.java
@@ -87,11 +87,6 @@ public class ColumnsScanFramework extends FileScanFramework {
        ((ColumnsScanBuilder) builder).requireColumnsArray);
     builder.addParser(columnsArrayManager.projectionParser());
     builder.addResolver(columnsArrayManager.resolver());
-
-    // This framework is (at present) used only for the text readers
-    // which use required Varchar columns to represent null columns.
-
-    builder.allowRequiredNullColumns(true);
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnBuilder.java
index 311f44e..84fce62 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnBuilder.java
@@ -137,7 +137,7 @@ public class NullColumnBuilder implements VectorSource {
           // use the default value from the output schema.
 
           col = new ResolvedNullColumn(name, type,
-              outputCol.decodeDefaultValue(), this, nullCols.size());
+              outputCol.defaultValue(), this, nullCols.size());
         } else {
 
           // Type and modes matches, just the output column
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnLoader.java
index 9e48d62..e91ce70 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/NullColumnLoader.java
@@ -24,8 +24,9 @@ import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.ResultVectorCache;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
-import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.record.metadata.ColumnMetadata;
+import org.apache.drill.exec.record.metadata.MetadataUtils;
 
 /**
  * Create and populate null columns for the case in which a SELECT statement
@@ -62,7 +63,8 @@ public class NullColumnLoader extends StaticColumnLoader {
     String name();
     MajorType type();
     void setType(MajorType type);
-    Object defaultValue();
+    String defaultValue();
+    ColumnMetadata metadata();
   }
 
   public static final MajorType DEFAULT_NULL_TYPE = MajorType.newBuilder()
@@ -72,14 +74,10 @@ public class NullColumnLoader extends StaticColumnLoader {
 
   private final MajorType nullType;
   private final boolean allowRequired;
-  private final List<? extends NullColumnSpec> colDefns;
-  private final int colsWithDefaultValues[];
-  private final Object defaultValues[];
 
   public NullColumnLoader(ResultVectorCache vectorCache, List<? extends NullColumnSpec> defns,
       MajorType nullType, boolean allowRequired) {
     super(vectorCache);
-    this.colDefns = defns;
 
     // Normally, null columns must be optional or arrays. However
     // we allow required columns either if the client requests it,
@@ -102,32 +100,15 @@ public class NullColumnLoader extends StaticColumnLoader {
     // Populate the loader schema from that provided
 
     RowSetLoader schema = loader.writer();
-    int defaultCount = 0;
     for (int i = 0; i < defns.size(); i++) {
       NullColumnSpec defn = defns.get(i);
-      MaterializedField colSchema = selectType(defn);
-      schema.addColumn(colSchema);
-      if (defn.defaultValue() != null) {
-        defaultCount++;
-      }
-    }
-
-    // Setup default values, if any
-
-    if (defaultCount == 0) {
-      colsWithDefaultValues = null;
-      defaultValues = null;
-      return;
-    }
-    colsWithDefaultValues = new int[defaultCount];
-    defaultValues = new Object[defaultCount];
-    int defIndex = 0;
-    for (int i = 0; i < defns.size(); i++) {
-      NullColumnSpec defn = defns.get(i);
-      if (defn.defaultValue() != null) {
-        colsWithDefaultValues[defIndex] = i;
-        defaultValues[defIndex++] = defn.defaultValue();
+      ColumnMetadata colSchema = selectType(defn);
+      if (defn.metadata() != null) {
+        colSchema.setProperties(defn.metadata().properties());
+      } else if (defn.defaultValue() != null) {
+        colSchema.setDefaultValue(defn.defaultValue());
       }
+      schema.addColumn(colSchema);
     }
   }
 
@@ -138,7 +119,7 @@ public class NullColumnLoader extends StaticColumnLoader {
    * @return type of the empty column that implements the definition
    */
 
-  private MaterializedField selectType(NullColumnSpec defn) {
+  private ColumnMetadata selectType(NullColumnSpec defn) {
 
     // Prefer the type of any previous occurrence of
     // this column.
@@ -178,7 +159,7 @@ public class NullColumnLoader extends StaticColumnLoader {
       type = nullType;
     }
     defn.setType(type);
-    return MaterializedField.create(defn.name(), type);
+    return MetadataUtils.newScalar(defn.name(), type);
   }
 
   public VectorContainer output() {
@@ -188,20 +169,7 @@ public class NullColumnLoader extends StaticColumnLoader {
   @Override
   public VectorContainer load(int rowCount) {
     loader.startBatch();
-    if (colsWithDefaultValues == null) {
-      loader.skipRows(rowCount);
-    } else {
-     // At least one column has a default value. Set values
-      // for all columns. Any null values are a no-op.
-      RowSetLoader writer = loader.writer();
-      for (int i = 0; i < rowCount; i++) {
-        writer.start();
-        for (int j = 0; j < colsWithDefaultValues.length; j++) {
-          writer.scalar(colsWithDefaultValues[j]).setValue(defaultValues[j]);
-        }
-        writer.save();
-      }
-    }
+    loader.skipRows(rowCount);
     return loader.harvest();
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedNullColumn.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedNullColumn.java
index 80f9f0b..0048813 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedNullColumn.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedNullColumn.java
@@ -32,9 +32,9 @@ public class ResolvedNullColumn extends ResolvedColumn implements NullColumnSpec
 
   private final String name;
   private MajorType type;
-  private Object defaultValue;
+  private String defaultValue;
 
-  public ResolvedNullColumn(String name, MajorType type, Object defaultValue,
+  public ResolvedNullColumn(String name, MajorType type, String defaultValue,
       VectorSource source, int sourceIndex) {
     super(source, sourceIndex);
     this.name = name;
@@ -47,7 +47,7 @@ public class ResolvedNullColumn extends ResolvedColumn implements NullColumnSpec
     super(colDefn, source, sourceIndex);
     this.name = colDefn.name();
     this.type = colDefn.majorType();
-    this.defaultValue = colDefn.decodeDefaultValue();
+    this.defaultValue = colDefn.defaultValue();
   }
 
   @Override
@@ -77,5 +77,5 @@ public class ResolvedNullColumn extends ResolvedColumn implements NullColumnSpec
   }
 
   @Override
-  public Object defaultValue() { return defaultValue; }
+  public String defaultValue() { return defaultValue; }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanSchemaOrchestrator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanSchemaOrchestrator.java
index a1bebb8..23b4b1b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanSchemaOrchestrator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanSchemaOrchestrator.java
@@ -18,7 +18,9 @@
 package org.apache.drill.exec.physical.impl.scan.project;
 
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -165,6 +167,7 @@ public class ScanSchemaOrchestrator {
     private List<SchemaPath> projection;
     private TupleMetadata outputSchema;
     private SchemaTransformer schemaTransformer;
+    private Map<String, String> conversionProps;
 
     /**
      * Specify an optional metadata manager. Metadata is a set of constant
@@ -245,6 +248,16 @@ public class ScanSchemaOrchestrator {
     public void setSchemaTransformer(SchemaTransformer transformer) {
       this.schemaTransformer = transformer;
     }
+
+    public void setConversionProperty(String key, String value) {
+      if (key == null || value == null) {
+        return;
+      }
+      if (conversionProps == null) {
+        conversionProps = new HashMap<>();
+      }
+      conversionProps.put(key, value);
+    }
   }
 
   public static class ScanSchemaOptions {
@@ -281,16 +294,20 @@ public class ScanSchemaOrchestrator {
       schemaResolvers = builder.schemaResolvers;
       projection = builder.projection;
       useSchemaSmoothing = builder.useSchemaSmoothing;
-      allowRequiredNullColumns = builder.allowRequiredNullColumns;
+      boolean allowRequiredNulls = builder.allowRequiredNullColumns;
       if (builder.schemaTransformer != null) {
         // Use client-provided conversions
         schemaTransformer = builder.schemaTransformer;
       } else if (builder.outputSchema != null) {
         // Use only implicit conversions
-        schemaTransformer = new SchemaTransformerImpl(builder.outputSchema);
+        schemaTransformer = new SchemaTransformerImpl(builder.outputSchema, builder.conversionProps);
+        if (builder.outputSchema.getBooleanProperty(TupleMetadata.IS_STRICT_SCHEMA_PROP)) {
+          allowRequiredNulls = true;
+        }
       } else {
         schemaTransformer = null;
       }
+      allowRequiredNullColumns = allowRequiredNulls;
     }
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java
index aae8fad..adf2362 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java
@@ -238,8 +238,7 @@ public class ColumnBuilder {
       vectorState = new RepeatedVectorState(colWriter.array(), (RepeatedValueVector) vector);
     } else if (columnSchema.isNullable()) {
       vectorState = new NullableVectorState(
-          colWriter,
-          (NullableVector) vector);
+          colWriter, (NullableVector) vector);
     } else {
       vectorState = SimpleVectorState.vectorState(columnSchema,
             colWriter.events(), vector);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java
index 201bd7c..1ad9304 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ResultSetLoaderImpl.java
@@ -45,13 +45,13 @@ public class ResultSetLoaderImpl implements ResultSetLoader, LoaderInternals {
    */
 
   public static class ResultSetOptions {
-    public final int vectorSizeLimit;
-    public final int rowCountLimit;
-    public final ResultVectorCache vectorCache;
-    public final RequestedTuple projectionSet;
-    public final TupleMetadata schema;
-    public final long maxBatchSize;
-    public final SchemaTransformer schemaTransformer;
+    protected final int vectorSizeLimit;
+    protected final int rowCountLimit;
+    protected final ResultVectorCache vectorCache;
+    protected final RequestedTuple projectionSet;
+    protected final TupleMetadata schema;
+    protected final long maxBatchSize;
+    protected final SchemaTransformer schemaTransformer;
 
     public ResultSetOptions() {
       vectorSizeLimit = ValueVector.MAX_BUFFER_SIZE;
@@ -87,7 +87,6 @@ public class ResultSetLoaderImpl implements ResultSetLoader, LoaderInternals {
         .startObject(this)
         .attribute("vectorSizeLimit", vectorSizeLimit)
         .attribute("rowCountLimit", rowCountLimit)
-//        .attribute("projection", projection)
         .endObject();
     }
   }
@@ -167,7 +166,7 @@ public class ResultSetLoaderImpl implements ResultSetLoader, LoaderInternals {
     CLOSED
   }
 
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ResultSetLoaderImpl.class);
+  protected static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ResultSetLoaderImpl.class);
 
   /**
    * Options provided to this loader.
@@ -179,20 +178,20 @@ public class ResultSetLoaderImpl implements ResultSetLoader, LoaderInternals {
    * Allocator for vectors created by this loader.
    */
 
-  final BufferAllocator allocator;
+  private final BufferAllocator allocator;
 
   /**
    * Builds columns (vector, writer, state).
    */
 
-  final ColumnBuilder columnBuilder;
+  private final ColumnBuilder columnBuilder;
 
   /**
    * Internal structure used to work with the vectors (real or dummy) used
    * by this loader.
    */
 
-  final RowState rootState;
+  private final RowState rootState;
 
   /**
    * Top-level writer index that steps through the rows as they are written.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/SchemaTransformerImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/SchemaTransformerImpl.java
index 8c92701..1812fa3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/SchemaTransformerImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/SchemaTransformerImpl.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.physical.rowSet.impl;
 
+import java.util.Map;
+
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.record.metadata.ProjectionType;
@@ -33,6 +35,9 @@ import org.apache.drill.exec.vector.accessor.convert.StandardConversions.Convers
  * column is defined and has a type or mode different than the input.
  * Else, assumes no transform is needed. Subclases can change or enhance
  * this policy. The subclass provides the actual per-column transform logic.
+ * <p>
+ * This class also handles setting default values for required vectors
+ * when a default value is available from the column schema.
  */
 
 public class SchemaTransformerImpl implements SchemaTransformer {
@@ -40,33 +45,44 @@ public class SchemaTransformerImpl implements SchemaTransformer {
   private static final org.slf4j.Logger logger =
       org.slf4j.LoggerFactory.getLogger(SchemaTransformerImpl.class);
 
-  /**
-   * A no-op transform that simply keeps the input column schema and
-   * writer without any changes.
-   */
-  public static class PassThroughColumnTransform implements ColumnTransform {
+  public static abstract class AbstractColumnTransform implements ColumnTransform {
 
-    private final ColumnMetadata colDefn;
+    private final ColumnMetadata inputSchema;
+    private final ColumnMetadata outputSchema;
     private final ProjectionType projType;
 
-    public PassThroughColumnTransform(ColumnMetadata colDefn, ProjectionType projType) {
-      this.colDefn = colDefn;
+    public AbstractColumnTransform(ColumnMetadata colDefn, ProjectionType projType,
+        ColumnMetadata outputDefn) {
+      inputSchema = colDefn;
+      outputSchema = outputDefn;
       this.projType = projType;
     }
 
     @Override
-    public AbstractWriteConverter newWriter(ScalarWriter baseWriter) {
-      return null;
-    }
+    public ProjectionType projectionType() { return projType; }
 
     @Override
-    public ProjectionType projectionType() { return projType; }
+    public ColumnMetadata inputSchema() { return inputSchema; }
 
     @Override
-    public ColumnMetadata inputSchema() { return colDefn; }
+    public ColumnMetadata outputSchema() { return outputSchema; }
+  }
+
+  /**
+   * A no-op transform that simply keeps the input column schema and
+   * writer without any changes.
+   */
+  public static class PassThroughColumnTransform extends AbstractColumnTransform {
+
+    public PassThroughColumnTransform(ColumnMetadata colDefn, ProjectionType projType,
+        ColumnMetadata outputDefn) {
+      super(colDefn, projType, outputDefn);
+    }
 
     @Override
-    public ColumnMetadata outputSchema() { return colDefn; }
+    public AbstractWriteConverter newWriter(ScalarWriter baseWriter) {
+      return null;
+    }
   }
 
   /**
@@ -75,18 +91,13 @@ public class SchemaTransformerImpl implements SchemaTransformer {
    * two. The conversion writer factory is provided via composition,
    * not by subclassing this class.
    */
-  public static class ColumnTransformImpl implements ColumnTransform {
+  public static class ColumnSchemaTransform extends AbstractColumnTransform {
 
-    private final ColumnMetadata inputSchema;
-    private final ColumnMetadata outputSchema;
-    private final ProjectionType projType;
     private final ColumnConversionFactory conversionFactory;
 
-    public ColumnTransformImpl(ColumnMetadata inputSchema, ColumnMetadata outputSchema,
+    public ColumnSchemaTransform(ColumnMetadata inputSchema, ColumnMetadata outputSchema,
         ProjectionType projType, ColumnConversionFactory conversionFactory) {
-      this.inputSchema = inputSchema;
-      this.outputSchema = outputSchema;
-      this.projType = projType;
+      super(inputSchema, projType, outputSchema);
       this.conversionFactory = conversionFactory;
     }
 
@@ -97,34 +108,15 @@ public class SchemaTransformerImpl implements SchemaTransformer {
       }
       return conversionFactory.newWriter(baseWriter);
     }
-
-    @Override
-    public ProjectionType projectionType() { return projType; }
-
-    @Override
-    public ColumnMetadata inputSchema() { return inputSchema; }
-
-    @Override
-    public ColumnMetadata outputSchema() { return outputSchema; }
   }
 
   protected final TupleMetadata outputSchema;
+  protected final Map<String, String> properties;
 
-  public SchemaTransformerImpl(TupleMetadata outputSchema) {
+  public SchemaTransformerImpl(TupleMetadata outputSchema,
+      Map<String, String> properties) {
     this.outputSchema = outputSchema;
-  }
-
-  /**
-   * Creates a "null" or "no-op" transform: just uses the input schema
-   * as the output schema.
-   *
-   * @param inputSchema the input schema from the reader
-   * @param projType projection type
-   * @return a no-op transform
-   */
-  protected ColumnTransform noOpTransform(ColumnMetadata inputSchema,
-      ProjectionType projType) {
-    return new PassThroughColumnTransform(inputSchema, projType);
+    this.properties = properties;
   }
 
   /**
@@ -149,7 +141,7 @@ public class SchemaTransformerImpl implements SchemaTransformer {
 
     ColumnMetadata outputCol = outputSchema.metadata(inputSchema.name());
     if (outputCol == null) {
-      return noOpTransform(inputSchema, projType);
+      return new PassThroughColumnTransform(inputSchema, projType, inputSchema);
     }
 
     ConversionDefn defn = StandardConversions.analyze(inputSchema, outputCol);
@@ -158,7 +150,7 @@ public class SchemaTransformerImpl implements SchemaTransformer {
       switch (defn.type) {
       case NONE:
       case IMPLICIT:
-        return noOpTransform(inputSchema, projType);
+        return new PassThroughColumnTransform(inputSchema, projType, outputCol);
       case EXPLICIT:
         if (defn.conversionClass == null) {
           throw UserException.validationError()
@@ -167,13 +159,13 @@ public class SchemaTransformerImpl implements SchemaTransformer {
             .addContext("Output type", outputCol.typeString())
             .build(logger);
         }
-        factory = StandardConversions.factory(defn.conversionClass);
+        factory = StandardConversions.factory(defn.conversionClass, properties);
         break;
       default:
         throw new IllegalStateException("Unexpected conversion type: " + defn.type);
       }
     }
-    return new ColumnTransformImpl(inputSchema, outputCol, projType, factory);
+    return new ColumnSchemaTransform(inputSchema, outputCol, projType, factory);
   }
 
   /**
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/TupleState.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/TupleState.java
index 7bc419f..1eb441d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/TupleState.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/TupleState.java
@@ -231,7 +231,7 @@ public abstract class TupleState extends ContainerState
 
     @Override
     public void dump(HierarchicalFormatter format) {
-      // TODO Auto-generated method stub
+      // TODO
     }
   }
 
@@ -390,7 +390,7 @@ public abstract class TupleState extends ContainerState
         ResultVectorCache vectorCache,
         RequestedTuple projectionSet) {
       super(events, vectorCache, projectionSet);
-     }
+    }
 
     /**
      * Return the tuple writer for the map. If this is a single
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java
index 9df0eca..f1f7353 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java
@@ -139,6 +139,11 @@ public abstract class SchemaHandler extends DefaultSqlHandler {
           .message(e.getMessage())
           .addContext("Error while preparing / creating schema for [%s]", schemaSource)
           .build(logger);
+      } catch (IllegalArgumentException e) {
+        throw UserException.validationError(e)
+          .message(e.getMessage())
+          .addContext("Error while preparing / creating schema for [%s]", schemaSource)
+          .build(logger);
       }
     }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
index 32be4f8..afab274 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
@@ -25,6 +25,7 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.metadata.schema.parser.SchemaExprParser;
 import org.joda.time.format.DateTimeFormatter;
@@ -75,8 +76,11 @@ public abstract class AbstractColumnMetadata extends AbstractPropertied implemen
   }
 
   public AbstractColumnMetadata(MaterializedField schema) {
-    name = schema.getName();
-    final MajorType majorType = schema.getType();
+    this(schema.getName(), schema.getType());
+  }
+
+  public AbstractColumnMetadata(String name, MajorType majorType) {
+    this.name = name;
     type = majorType.getMinorType();
     mode = majorType.getMode();
     precision = majorType.getPrecision();
@@ -151,8 +155,7 @@ public abstract class AbstractColumnMetadata extends AbstractPropertied implemen
 
   @Override
   public boolean isVariableWidth() {
-    final MinorType type = type();
-    return type == MinorType.VARCHAR || type == MinorType.VAR16CHAR || type == MinorType.VARBINARY;
+    return Types.isVarWidthType(type());
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java
index 408dcc4..6c8a2fe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java
@@ -78,6 +78,10 @@ public class MapBuilder implements SchemaContainer {
     return this;
   }
 
+  public MapBuilder add(String name, MinorType type, int precision, int scale) {
+    return addDecimal(name, type, DataMode.REQUIRED, precision, scale);
+  }
+
   public MapBuilder addNullable(String name, MinorType type) {
     tupleBuilder.addNullable(name,  type);
     return this;
@@ -88,6 +92,10 @@ public class MapBuilder implements SchemaContainer {
     return this;
   }
 
+  public MapBuilder addNullable(String name, MinorType type, int precision, int scale) {
+    return addDecimal(name, type, DataMode.OPTIONAL, precision, scale);
+  }
+
   public MapBuilder addArray(String name, MinorType type) {
     tupleBuilder.addArray(name, type);
     return this;
@@ -98,6 +106,10 @@ public class MapBuilder implements SchemaContainer {
     return this;
   }
 
+  public MapBuilder addArray(String name, MinorType type, int precision, int scale) {
+    return addDecimal(name, type, DataMode.REPEATED, precision, scale);
+  }
+
   public MapBuilder addDecimal(String name, MinorType type,
       DataMode mode, int precision, int scale) {
     tupleBuilder.addDecimal(name, type, mode, precision, scale);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
index 469c433..21577c0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
@@ -20,7 +20,9 @@ package org.apache.drill.exec.record.metadata;
 import java.util.List;
 
 import org.apache.drill.common.types.TypeProtos.DataMode;
+import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 
@@ -45,7 +47,8 @@ public class MetadataUtils {
    */
 
   public static ColumnMetadata fromField(MaterializedField field) {
-    MinorType type = field.getType().getMinorType();
+    MajorType majorType = field.getType();
+    MinorType type = majorType.getMinorType();
     switch (type) {
     case MAP:
       return MetadataUtils.newMap(field);
@@ -54,6 +57,10 @@ public class MetadataUtils {
         throw new UnsupportedOperationException(type.name() + " type must be nullable");
       }
       return new VariantColumnMetadata(field);
+    case VARDECIMAL:
+      int precision = majorType.hasPrecision() ? majorType.getPrecision() : Types.maxPrecision(type);
+      int scale = majorType.hasScale() ? majorType.getScale() : 0;
+      return MetadataUtils.newDecimal(field.getName(), type, majorType.getMode(), precision, scale);
     case LIST:
       switch (field.getType().getMode()) {
       case OPTIONAL:
@@ -162,4 +169,38 @@ public class MetadataUtils {
     assert type != MinorType.MAP && type != MinorType.UNION && type != MinorType.LIST;
     return new PrimitiveColumnMetadata(name, type, mode);
   }
+
+  public static PrimitiveColumnMetadata newScalar(String name, MajorType type) {
+    MinorType minorType = type.getMinorType();
+    assert minorType != MinorType.MAP && minorType != MinorType.UNION && minorType != MinorType.LIST;
+    return new PrimitiveColumnMetadata(name, type);
+  }
+
+  private static ColumnMetadata newDecimal(String name, MinorType type, DataMode mode,
+      int precision, int scale) {
+    if (precision < 0 ) {
+      throw new IllegalArgumentException("Precision cannot be negative : " +
+          precision);
+    }
+    if (scale < 0 ) {
+      throw new IllegalArgumentException("Scale cannot be negative : " +
+          scale);
+    }
+    int maxPrecision = Types.maxPrecision(type);
+    if (precision > maxPrecision) {
+      throw new IllegalArgumentException(String.format(
+          "%s(%d, %d) exceeds maximum suppored precision of %d",
+          type.toString(), precision, scale, maxPrecision));
+    }
+    if (scale > precision) {
+      throw new IllegalArgumentException(String.format(
+          "%s(%d, %d) scale exceeds precision",
+          type.toString(), precision, scale));
+    }
+    MaterializedField field = new ColumnBuilder(name, type)
+        .setMode(mode)
+        .setPrecisionAndScale(precision, scale)
+        .build();
+    return new PrimitiveColumnMetadata(field);
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
index 2187ffd..1e0b5b7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.record.metadata;
 
+import org.apache.drill.common.types.BooleanType;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -55,6 +56,10 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
     super(schema);
   }
 
+  public PrimitiveColumnMetadata(String name, MajorType type) {
+    super(name, type);
+  }
+
   public PrimitiveColumnMetadata(String name, MinorType type, DataMode mode) {
     super(name, type, mode);
   }
@@ -241,13 +246,13 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
         case BIGINT:
           return Long.parseLong(value);
         case FLOAT4:
-          return Float.parseFloat(value);
+          return (double) Float.parseFloat(value);
         case FLOAT8:
           return Double.parseDouble(value);
         case VARDECIMAL:
           return new BigDecimal(value);
         case BIT:
-          return Boolean.parseBoolean(value);
+          return BooleanType.fromString(value);
         case VARCHAR:
         case VARBINARY:
           return value;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
index 8b7f904..ff68dee 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
@@ -134,8 +134,7 @@ public class SchemaBuilder implements SchemaContainer {
   }
 
   public SchemaBuilder add(String name, MinorType type, int precision, int scale) {
-    tupleBuilder.addDecimal(name, type, DataMode.REQUIRED, precision, scale);
-    return this;
+    return addDecimal(name, type, DataMode.REQUIRED, precision, scale);
   }
 
   public SchemaBuilder addNullable(String name, MinorType type) {
@@ -149,8 +148,7 @@ public class SchemaBuilder implements SchemaContainer {
   }
 
   public SchemaBuilder addNullable(String name, MinorType type, int precision, int scale) {
-    tupleBuilder.addDecimal(name, type, DataMode.OPTIONAL, precision, scale);
-    return this;
+    return addDecimal(name, type, DataMode.OPTIONAL, precision, scale);
   }
 
   public SchemaBuilder addArray(String name, MinorType type) {
@@ -158,6 +156,10 @@ public class SchemaBuilder implements SchemaContainer {
     return this;
   }
 
+  public SchemaBuilder addArray(String name, MinorType type, int precision, int scale) {
+    return addDecimal(name, type, DataMode.REPEATED, precision, scale);
+  }
+
   public SchemaBuilder addDecimal(String name, MinorType type, DataMode mode, int precision, int scale) {
     tupleBuilder.addDecimal(name, type, mode, precision, scale);
     return this;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index a16edaf..d5abdb2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -248,6 +248,10 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
         builder.setProjection(scan.getColumns());
         builder.setFiles(scan.getWorkUnits());
         builder.setConfig(plugin.easyConfig().fsConf);
+
+        // The text readers use required Varchar columns to represent null columns.
+
+        builder.allowRequiredNullColumns(true);
         final Path selectionRoot = scan.getSelectionRoot();
         if (selectionRoot != null) {
           builder.metadataOptions().setSelectionRoot(selectionRoot);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
index fe0cbf5..59d0697 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
@@ -62,6 +62,7 @@ import org.apache.drill.exec.store.easy.text.compliant.v3.TextParsingSettingsV3;
 import org.apache.drill.exec.store.schedule.CompleteFileWork;
 import org.apache.drill.exec.store.text.DrillTextRecordReader;
 import org.apache.drill.exec.store.text.DrillTextRecordWriter;
+import org.apache.drill.exec.vector.accessor.convert.AbstractConvertFromString;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -237,6 +238,12 @@ public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextForm
 
       builder.setOutputSchema(scan.getSchema());
 
+      // CSV maps blank columns to nulls (for nullable non-string columns),
+      // or to the default value (for non-nullable non-string columns.)
+
+      builder.setConversionProperty(AbstractConvertFromString.BLANK_ACTION_PROP,
+          AbstractConvertFromString.BLANK_AS_NULL);
+
       return builder;
     }
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
index 276bae9..ce03303 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
@@ -298,7 +298,8 @@ public class Foreman implements Runnable {
          */
         FailureUtils.unrecoverableFailure(e, "Unable to handle out of memory condition in Foreman.", EXIT_CODE_HEAP_OOM);
       }
-
+    } catch (UserException e) {
+      queryStateProcessor.moveToState(QueryState.FAILED, e);
     } catch (AssertionError | Exception ex) {
       queryStateProcessor.moveToState(QueryState.FAILED,
           new ForemanException("Unexpected exception during fragment initialization: " + ex.getMessage(), ex));
@@ -782,7 +783,11 @@ public class Foreman implements Runnable {
       final UserException uex;
       if (resultException != null) {
         final boolean verbose = queryContext.getOptions().getOption(ExecConstants.ENABLE_VERBOSE_ERRORS_KEY).bool_val;
-        uex = UserException.systemError(resultException).addIdentity(queryContext.getCurrentEndpoint()).build(logger);
+        if (resultException instanceof UserException) {
+          uex = (UserException) resultException;
+        } else {
+          uex = UserException.systemError(resultException).addIdentity(queryContext.getCurrentEndpoint()).build(logger);
+        }
         resultBuilder.addError(uex.getOrCreatePBError(verbose));
       } else {
         uex = null;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestByteComparisonFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestByteComparisonFunctions.java
index 0318bda..346455c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestByteComparisonFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestByteComparisonFunctions.java
@@ -36,7 +36,6 @@ import org.junit.experimental.categories.Category;
 
 @Category({UnlikelyTest.class, VectorTest.class})
 public class TestByteComparisonFunctions extends ExecTest {
-  //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestByteComparisonFunctions.class);
 
   private static BufferAllocator allocator;
   private static VarCharHolder hello;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
index 2fb6fda..bf86f9d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
@@ -63,7 +63,7 @@ public class TestNullColumnLoader extends SubOperatorTest {
     return makeNullCol(name, null, null);
   }
 
-  private ResolvedNullColumn makeNullCol(String name, MajorType nullType, Object defaultValue) {
+  private ResolvedNullColumn makeNullCol(String name, MajorType nullType, String defaultValue) {
 
     // For this test, we don't need the projection, so just
     // set it to null.
@@ -159,16 +159,18 @@ public class TestNullColumnLoader extends SubOperatorTest {
   }
 
   /**
-   * Test the ability to provide a default value for a null column
+   * Test the ability to provide a default value for a "null" column.
+   * Default values are only allowed for required "null" columns. For
+   * nullable columns, NULL is already the default.
    */
 
   @Test
   public void testDefaultValue() {
 
     final List<ResolvedNullColumn> defns = new ArrayList<>();
-    defns.add(makeNullCol("int", Types.optional(MinorType.INT), 10));
-    defns.add(makeNullCol("str", Types.optional(MinorType.VARCHAR), "foo"));
-    defns.add(makeNullCol("dub", Types.optional(MinorType.FLOAT8), 20.0D));
+    defns.add(makeNullCol("int", Types.required(MinorType.INT), "10"));
+    defns.add(makeNullCol("str", Types.required(MinorType.VARCHAR), "foo"));
+    defns.add(makeNullCol("dub", Types.required(MinorType.FLOAT8), "20.0"));
 
     final ResultVectorCache cache = new NullResultVectorCacheImpl(fixture.allocator());
     final MajorType nullType = Types.optional(MinorType.VARCHAR);
@@ -181,9 +183,9 @@ public class TestNullColumnLoader extends SubOperatorTest {
     // Verify values and types
 
     final TupleMetadata expectedSchema = new SchemaBuilder()
-        .addNullable("int", MinorType.INT)
-        .addNullable("str", MinorType.VARCHAR)
-        .addNullable("dub", MinorType.FLOAT8)
+        .add("int", MinorType.INT)
+        .add("str", MinorType.VARCHAR)
+        .add("dub", MinorType.FLOAT8)
         .buildSchema();
     final SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(10, "foo", 20.0D)
@@ -402,8 +404,8 @@ public class TestNullColumnLoader extends SubOperatorTest {
         .addNullable("extra", MinorType.VARCHAR)
         .buildSchema();
     final SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
-        .addRow("foo", "bar", null, 10, 20, null)
-        .addRow("foo", "bar", null, 10, 20, null)
+        .addRow("foo", null, null, 10, null, null)
+        .addRow("foo", null, null, 10, null, null)
         .build();
 
     RowSetUtilities.verify(expected, fixture.wrap(builder.output()));
@@ -421,6 +423,9 @@ public class TestNullColumnLoader extends SubOperatorTest {
    * The type and mode provided to the builder is that which would result from
    * schema smoothing. The types and modes should usually match, but verify
    * the rules when they don't.
+   * <p>
+   * Defaults for null columns are ignored: null columns use NULL as the
+   * null value.
    */
   @Test
   public void testSchemaWithConflicts() {
@@ -464,8 +469,8 @@ public class TestNullColumnLoader extends SubOperatorTest {
         .add("intOpt", MinorType.INT)
         .buildSchema();
     final SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
-        .addRow(null, "bar", null, 20)
-        .addRow(null, "bar", null, 20)
+        .addRow(null, null, null, 20)
+        .addRow(null, null, null, 20)
         .build();
 
     RowSetUtilities.verify(expected, fixture.wrap(builder.output()));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java
index ec37a6d..9ede6a5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java
@@ -379,4 +379,47 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
 
     rsLoader.close();
   }
+
+  /**
+   * Verify that a default value set on the schema is used to fill missing
+   * required columns.
+   */
+  @Test
+  public void testDefaultValues() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .add("b", MinorType.VARCHAR)
+        .buildSchema();
+    schema.metadata("b").setDefaultValue("Foo");
+    ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder()
+        .setRowCountLimit(ValueVector.MAX_ROW_COUNT)
+        .setSchema(schema)
+        .build();
+    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
+    RowSetLoader rootWriter = rsLoader.writer();
+
+    rsLoader.startBatch();
+    for (int i = 0; i < 7; i++) {
+      rootWriter.start();
+      rootWriter.scalar(0).setInt(i + 1);
+      if (i % 3 != 0) {
+        rootWriter.scalar(1).setString("b-" + (i + 1));
+      }
+      rootWriter.save();
+    }
+
+    RowSet result = fixture.wrap(rsLoader.harvest());
+    SingleRowSet expected = fixture.rowSetBuilder(result.batchSchema())
+        .addRow( 1, "Foo")
+        .addRow( 2, "b-2")
+        .addRow( 3, "b-3")
+        .addRow( 4, "Foo")
+        .addRow( 5, "b-5")
+        .addRow( 6, "b-6")
+        .addRow( 7, "Foo")
+        .build();
+    RowSetUtilities.verify(expected, result);
+
+    rsLoader.close();
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTypeConversion.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTypeConversion.java
index a46ef9e..b9bb531 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTypeConversion.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderTypeConversion.java
@@ -26,6 +26,7 @@ import org.apache.drill.exec.physical.rowSet.RowSetLoader;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.test.SubOperatorTest;
 import org.apache.drill.test.rowSet.RowSet;
 import org.apache.drill.test.rowSet.RowSetUtilities;
@@ -118,7 +119,7 @@ public class TestResultSetLoaderTypeConversion extends SubOperatorTest {
     ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder()
         .setSchema(inputSchema)
         .setRowCountLimit(ValueVector.MAX_ROW_COUNT)
-        .setSchemaTransform(new SchemaTransformerImpl(outputSchema))
+        .setSchemaTransform(new SchemaTransformerImpl(outputSchema, null))
         .build();
     ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
     rsLoader.startBatch();
@@ -146,4 +147,62 @@ public class TestResultSetLoaderTypeConversion extends SubOperatorTest {
 
     RowSetUtilities.verify(expected, actual);
   }
+
+  /**
+   * Test using a type converter with a default value. The default value
+   * must be valid for the output type.
+   */
+  @Test
+  public void testTypeConversionWithDefault() {
+    TupleMetadata outputSchema = new SchemaBuilder()
+        .add("n1", MinorType.INT)
+        .add("n2", MinorType.INT)
+        .buildSchema();
+    outputSchema.metadata("n1").setDefaultValue("888");
+    outputSchema.metadata("n2").setDefaultValue("999");
+
+    TupleMetadata inputSchema = new SchemaBuilder()
+        .add("n1", MinorType.VARCHAR)
+        .add("n2", MinorType.VARCHAR)
+        .buildSchema();
+
+    ResultSetLoaderImpl.ResultSetOptions options = new OptionBuilder()
+        .setSchema(inputSchema)
+        .setRowCountLimit(ValueVector.MAX_ROW_COUNT)
+        .setSchemaTransform(new SchemaTransformerImpl(outputSchema, null))
+        .build();
+    ResultSetLoader rsLoader = new ResultSetLoaderImpl(fixture.allocator(), options);
+    rsLoader.startBatch();
+
+    // Write data as both a string as an integer
+
+    RowSetLoader rootWriter = rsLoader.writer();
+    ScalarWriter n1 = rootWriter.scalar("n1");
+    ScalarWriter n2 = rootWriter.scalar("n2");
+    rootWriter.start();
+    n1.setString("1");
+    rootWriter.save();
+    rootWriter.start();
+    n2.setString("22");
+    rootWriter.save();
+    rootWriter.start();
+    n1.setString("31");
+    n2.setString("32");
+    rootWriter.save();
+    RowSet actual = fixture.wrap(rsLoader.harvest());
+
+    // Build the expected vector without a type converter or defaults.
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .add("n1", MinorType.INT)
+        .add("n2", MinorType.INT)
+        .buildSchema();
+    final SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
+        .addRow(1, 999)
+        .addRow(888, 22)
+        .addRow(31, 32)
+        .build();
+
+    RowSetUtilities.verify(expected, actual);
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
index a33d8d4..11f60cb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
@@ -326,6 +326,7 @@ public class TestTupleSchema extends SubOperatorTest {
     MaterializedField field = SchemaBuilder.columnSchema("u", MinorType.UNION, DataMode.OPTIONAL);
     ColumnMetadata col = MetadataUtils.fromField(field);
     assertFalse(col.isArray());
+    assertTrue(col.isVariableWidth());
     doVariantTest(col);
   }
 
@@ -339,6 +340,7 @@ public class TestTupleSchema extends SubOperatorTest {
     // List modeled as a repeated element. Implementation is a bit
     // more complex, but does not affect this abstract description.
 
+    assertFalse(col.isVariableWidth());
     doVariantTest(col);
   }
 
@@ -347,7 +349,6 @@ public class TestTupleSchema extends SubOperatorTest {
     assertTrue(col instanceof VariantColumnMetadata);
 
     assertTrue(col.isNullable());
-    assertFalse(col.isVariableWidth());
     assertFalse(col.isMap());
     assertTrue(col.isVariant());
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/BaseCsvTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/BaseCsvTest.java
index f028e0e..c2aeac6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/BaseCsvTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/BaseCsvTest.java
@@ -99,6 +99,14 @@ public class BaseCsvTest extends ClusterTest {
     client.resetSession(ExecConstants.MIN_READER_WIDTH_KEY);
   }
 
+  protected void enableSchema(boolean enable) {
+    client.alterSession(ExecConstants.STORE_TABLE_USE_SCHEMA_FILE, enable);
+  }
+
+  protected void resetSchema() {
+    client.resetSession(ExecConstants.STORE_TABLE_USE_SCHEMA_FILE);
+  }
+
   protected static void buildFile(String fileName, String[] data) throws IOException {
     buildFile(new File(testDir, fileName), data);
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsvWithSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsvWithSchema.java
index 2f6448e..63e0988 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsvWithSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsvWithSchema.java
@@ -19,6 +19,8 @@ package org.apache.drill.exec.store.easy.text.compliant;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.apache.drill.test.rowSet.RowSetUtilities.dec;
 
 import java.io.File;
 import java.io.IOException;
@@ -27,7 +29,6 @@ import java.util.Iterator;
 import org.apache.drill.categories.RowSetTests;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.rpc.RpcException;
@@ -37,7 +38,10 @@ import org.apache.drill.test.rowSet.RowSetBuilder;
 import org.apache.drill.test.rowSet.RowSetComparison;
 import org.apache.drill.test.rowSet.RowSetReader;
 import org.apache.drill.test.rowSet.RowSetUtilities;
+import org.joda.time.Instant;
 import org.joda.time.LocalDate;
+import org.joda.time.LocalTime;
+import org.joda.time.Period;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -54,56 +58,51 @@ public class TestCsvWithSchema extends BaseCsvTest {
   protected static final String FILE1_NAME = "file1.csv";
   protected static final String FILE_N_NAME = "file%d.csv";
 
-  protected static String basicFileContents[] = {
-      "intcol,datecol,str,dub",
-      "10,2019-03-20,it works!,1234.5"
+  private static String basicFileContents[] = {
+    "intcol,datecol,str,dub",
+    "10,2019-03-20,it works!,1234.5"
   };
 
-  public static final String raggedMulti1Contents[] = {
-      "id,name,date,gender",
-      "1,wilma,2019-01-18,female",
-      "2,fred,2019-01-19,male",
-      "4,betty,2019-05-04"
+  private static final String raggedMulti1Contents[] = {
+    "id,name,date,gender",
+    "1,wilma,2019-01-18,female",
+    "2,fred,2019-01-19,male",
+    "4,betty,2019-05-04"
   };
 
-  public static final String multi1Contents[] = {
-      "id,name,date,gender",
-      "1,wilma,2019-01-18,female",
-      "2,fred,2019-01-19,male",
-      "4,betty,2019-05-04,NA"
+  private static final String multi1Contents[] = {
+    "id,name,date,gender",
+    "1,wilma,2019-01-18,female",
+    "2,fred,2019-01-19,male",
+    "4,betty,2019-05-04,NA"
   };
 
-  public static final String multi2Contents[] = {
-      "id,name,date",
-      "3,barney,2001-01-16"
+  private static final String multi2FullContents[] = {
+    "id,name,date",
+    "3,barney,2001-01-16,NA"
   };
 
-  public static final String multi2FullContents[] = {
-      "id,name,date",
-      "3,barney,2001-01-16,NA"
+  private static final String reordered2Contents[] = {
+    "name,id,date",
+    "barney,3,2001-01-16"
   };
 
-  public static final String reordered2Contents[] = {
-      "name,id,date",
-      "barney,3,2001-01-16"
+  private static final String multi3Contents[] = {
+    "name,date",
+    "dino,2018-09-01"
   };
 
-  public static final String multi3Contents[] = {
-      "name,date",
-      "dino,2018-09-01"
+  private static final String nameOnlyContents[] = {
+    "name",
+    "dino"
   };
 
-  public static final String nameOnlyContents[] = {
-      "name",
-      "dino"
-  };
-
-  public static final String SCHEMA_SQL = "create or replace schema (" +
-      "id int not null, " +
-      "`date` date format 'yyyy-MM-dd', " +
-      "gender varchar not null default 'NA', " +
-      "comment varchar not null default 'ABC') " +
-      "for table %s";
+  private static final String SCHEMA_SQL = "create or replace schema (" +
+    "id int not null, " +
+    "`date` date format 'yyyy-MM-dd', " +
+    "gender varchar not null default 'NA', " +
+    "comment varchar not null default 'ABC') " +
+    "for table %s";
 
   @BeforeClass
   public static void setup() throws Exception {
@@ -120,12 +119,14 @@ public class TestCsvWithSchema extends BaseCsvTest {
     return "`dfs.data`.`" + tableName + "`";
   }
 
-  private void enableSchema(boolean enable) {
-    client.alterSession(ExecConstants.STORE_TABLE_USE_SCHEMA_FILE, enable);
+  private void enableSchemaSupport() {
+    enableV3(true);
+    enableSchema(true);
   }
 
-  private void resetSchema() {
-    client.resetSession(ExecConstants.STORE_TABLE_USE_SCHEMA_FILE);
+  private void resetSchemaSupport() {
+    resetV3();
+    resetSchema();
   }
 
   /**
@@ -142,8 +143,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
     String tablePath = buildTable("basic", basicFileContents);
 
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       String schemaSql = "create schema (intcol int not null, datecol date not null, " +
           "`dub` double not null, `extra` bigint not null default '20') " +
           "for table " + tablePath;
@@ -164,8 +164,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .build();
       RowSetUtilities.verify(expected, actual);
     } finally {
-      resetV3();
-      resetSchema();
+      resetSchemaSupport();
     }
   }
 
@@ -181,8 +180,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
     String tablePath = buildTable(tableName, multi3Contents);
 
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       run(SCHEMA_SQL, tablePath);
       String sql = "SELECT id, `name` FROM " + tablePath;
       RowSet actual = client.queryBuilder().sql(sql).rowSet();
@@ -196,8 +194,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .build();
       RowSetUtilities.verify(expected, actual);
     } finally {
-      resetV3();
-      resetSchema();
+      resetSchemaSupport();
     }
   }
 
@@ -210,8 +207,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
     String tablePath = buildTable(tableName, multi3Contents);
 
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       String schemaSql = SCHEMA_SQL.replace("id int not null", "id int not null default '-1'");
       run(schemaSql, tablePath);
       String sql = "SELECT id, `name`, `date` FROM " + tablePath;
@@ -227,8 +223,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .build();
       RowSetUtilities.verify(expected, actual);
     } finally {
-      resetV3();
-      resetSchema();
+      resetSchemaSupport();
     }
   }
 
@@ -241,10 +236,9 @@ public class TestCsvWithSchema extends BaseCsvTest {
     String tablePath = buildTable(tableName, nameOnlyContents);
 
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       String schemaSql = SCHEMA_SQL.replace("`date` date format 'yyyy-MM-dd'",
-          "`date` date format 'yyyy-MM-dd' default '2001-02-03'");
+          "`date` date not null format 'yyyy-MM-dd' default '2001-02-03'");
       run(schemaSql, tablePath);
       String sql = "SELECT id, `name`, `date` FROM " + tablePath;
       RowSet actual = client.queryBuilder().sql(sql).rowSet();
@@ -252,15 +246,14 @@ public class TestCsvWithSchema extends BaseCsvTest {
       TupleMetadata expectedSchema = new SchemaBuilder()
           .add("id", MinorType.INT)
           .add("name", MinorType.VARCHAR)
-          .addNullable("date", MinorType.DATE)
+          .add("date", MinorType.DATE)
           .buildSchema();
       RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
           .addRow(0, "dino", new LocalDate(2001, 2, 3))
           .build();
       RowSetUtilities.verify(expected, actual);
     } finally {
-      resetV3();
-      resetSchema();
+      resetSchemaSupport();
     }
   }
 
@@ -277,8 +270,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
     RowSet expected1 = null;
     RowSet expected2 = null;
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       enableMultiScan();
       String tablePath = buildTable("multiFileSchema", raggedMulti1Contents, reordered2Contents);
       run(SCHEMA_SQL, tablePath);
@@ -298,7 +290,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
       expected1 = new RowSetBuilder(client.allocator(), expectedSchema)
           .addRow(1, "wilma", new LocalDate(2019, 1, 18), "female", "ABC")
           .addRow(2, "fred", new LocalDate(2019, 1, 19), "male", "ABC")
-          .addRow(4, "betty", new LocalDate(2019, 5, 4), "", "ABC")
+          .addRow(4, "betty", new LocalDate(2019, 5, 4), "NA", "ABC")
           .build();
       expected2 = new RowSetBuilder(client.allocator(), expectedSchema)
           .addRow(3, "barney", new LocalDate(2001, 1, 16), "NA", "ABC")
@@ -333,11 +325,11 @@ public class TestCsvWithSchema extends BaseCsvTest {
     } finally {
       expected1.clear();
       expected2.clear();
-      client.resetSession(ExecConstants.ENABLE_V3_TEXT_READER_KEY);
-      client.resetSession(ExecConstants.STORE_TABLE_USE_SCHEMA_FILE);
-      client.resetSession(ExecConstants.MIN_READER_WIDTH_KEY);
+      resetSchemaSupport();
+      resetMultiScan();
     }
   }
+
   /**
    * Test the schema we get in V2 when the table read order is random.
    * Worst-case: the two files have different column counts and
@@ -575,9 +567,8 @@ public class TestCsvWithSchema extends BaseCsvTest {
   @Test
   public void testSchemaExplicitSort() throws Exception {
     try {
-      enableSchema(true);
+      enableSchemaSupport();
       enableMultiScan();
-      enableV3(true);
       // V3 handles ragged columns
       String tablePath = buildTable("v3ExplictSort", raggedMulti1Contents, reordered2Contents);
       run(SCHEMA_SQL, tablePath);
@@ -591,7 +582,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .addRow(1, "wilma", "female")
           .addRow(2, "fred", "male")
           .addRow(3, "barney", "NA")
-          .addRow(4, "betty", "")
+          .addRow(4, "betty", "NA")
           .build();
       for (int i = 0; i < 10; i++) {
         RowSet result = client.queryBuilder().sql(sql).rowSet();
@@ -599,8 +590,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
       }
       expected.clear();
     } finally {
-      resetV3();
-      resetSchema();
+      resetSchemaSupport();
       resetMultiScan();
     }
   }
@@ -624,8 +614,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
   public void testMultiFileSchemaMissingCol() throws Exception {
     RowSet expected = null;
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       enableMultiScan();
       String tablePath = buildTable("schemaMissingCols", raggedMulti1Contents,
           reordered2Contents, multi3Contents);
@@ -649,7 +638,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .addRow(1, "wilma", new LocalDate(2019, 1, 18), "female", "ABC")
           .addRow(2, "fred", new LocalDate(2019, 1, 19), "male", "ABC")
           .addRow(3, "barney", new LocalDate(2001, 1, 16), "NA", "ABC")
-          .addRow(4, "betty", new LocalDate(2019, 5, 4), "", "ABC")
+          .addRow(4, "betty", new LocalDate(2019, 5, 4), "NA", "ABC")
           .build();
 
       // Loop 10 times so that, as the two reader fragments read the two
@@ -662,9 +651,8 @@ public class TestCsvWithSchema extends BaseCsvTest {
       }
     } finally {
       expected.clear();
-      resetV3();
+      resetSchemaSupport();
       resetMultiScan();
-      resetSchema();
     }
   }
 
@@ -682,8 +670,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
         reordered2Contents, nameOnlyContents);
 
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       run(SCHEMA_SQL, tablePath);
       String sql = "SELECT * FROM " + tablePath + "ORDER BY id";
       RowSet actual = client.queryBuilder().sql(sql).rowSet();
@@ -704,8 +691,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .build();
       RowSetUtilities.verify(expected, actual);
     } finally {
-      resetV3();
-      resetSchema();
+      resetSchemaSupport();
     }
   }
 
@@ -720,8 +706,7 @@ public class TestCsvWithSchema extends BaseCsvTest {
         reordered2Contents, nameOnlyContents);
 
     try {
-      enableV3(true);
-      enableSchema(true);
+      enableSchemaSupport();
       String sql = SCHEMA_SQL +
           " PROPERTIES ('" + TupleMetadata.IS_STRICT_SCHEMA_PROP + "'='true')";
       run(sql, tablePath);
@@ -743,14 +728,13 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .build();
       RowSetUtilities.verify(expected, actual);
     } finally {
-      resetV3();
-      resetSchema();
+      resetSchemaSupport();
     }
   }
 
   /**
    * Test a strict schema where it is needed most: in a scan with multiple
-   * fragments, each of which sees a diffrent reader schema. The output schema
+   * fragments, each of which sees a different reader schema. The output schema
    * ensures that each scan independently reports the same schema, so that the
    * downstream sort operator gets a single consistent batch schema.
    */
@@ -761,9 +745,8 @@ public class TestCsvWithSchema extends BaseCsvTest {
         reordered2Contents, nameOnlyContents);
 
     try {
-      enableV3(true);
+      enableSchemaSupport();
       enableMultiScan();
-      enableSchema(true);
       String sql = SCHEMA_SQL +
           " PROPERTIES ('" + TupleMetadata.IS_STRICT_SCHEMA_PROP + "'='true')";
       run(sql, tablePath);
@@ -785,9 +768,671 @@ public class TestCsvWithSchema extends BaseCsvTest {
           .build();
       RowSetUtilities.verify(expected, actual);
     } finally {
-      resetV3();
+      resetSchemaSupport();
       resetMultiScan();
+    }
+  }
+
+  private static final String boolContents[] = {
+    "id,bool_col",
+    "1,true",
+    "2,false",
+    "3,TRUE",
+    "4,FALSE",
+    "5,t",
+    "6,T",
+    "7,1",
+    "8,0",
+    "9",
+    "10,y",
+    "11,Y",
+    "12,yes",
+    "13,yEs",
+    "14,on",
+    "15,ON",
+    "16,foo"
+  };
+
+  /**
+   * Test the many ways to specify True for boolean columns. Anything that
+   * is not true is false.
+   */
+  @Test
+  public void testBool() throws Exception {
+    String tableName = "bool";
+    String tablePath = buildTable(tableName, boolContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "bool_col boolean not null default `true` " +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .add("bool_col", MinorType.BIT)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow( 1, true)
+          .addRow( 2, false)
+          .addRow( 3, true)
+          .addRow( 4, false)
+          .addRow( 5, true)
+          .addRow( 6, true)
+          .addRow( 7, true)
+          .addRow( 8, false)
+          .addRow( 9, true)
+          .addRow(10, true)
+          .addRow(11, true)
+          .addRow(12, true)
+          .addRow(13, true)
+          .addRow(14, true)
+          .addRow(15, true)
+          .addRow(16, false)
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  private static final String decimalContents[] = {
+    "id,decimal_col",
+    "1,12.34",
+    "2,-56.789",
+    "3,0",
+    "4,8",
+    "5",
+    "6,0.00",
+    "7,-0.00",
+  };
+
+  /**
+   * Basic decimal sanity test showing rounding, using default values,
+   * and so on.
+   */
+  @Test
+  public void testDecimal() throws Exception {
+    String tableName = "decimal";
+    String tablePath = buildTable(tableName, decimalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "decimal_col decimal(5,2) not null default `100.00` " +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .add("decimal_col", MinorType.VARDECIMAL, 5, 2)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, dec("12.34"))
+          .addRow(2, dec("-56.79"))
+          .addRow(3, dec("0"))
+          .addRow(4, dec("8"))
+          .addRow(5, dec("100.00"))
+          .addRow(6, dec("0.00"))
+          .addRow(7, dec("0.00"))
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  /**
+   * Verify that a decimal type without precision or scale defaults
+   * to precision of 38, scale of 0.
+   */
+  @Test
+  public void testDecimalNoPrecOrScale() throws Exception {
+    String tableName = "noPrecOrScale";
+    String tablePath = buildTable(tableName, decimalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "decimal_col decimal not null default `100.00` " +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .add("decimal_col", MinorType.VARDECIMAL, 38, 0)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, dec("12"))
+          .addRow(2, dec("-57"))
+          .addRow(3, dec("0"))
+          .addRow(4, dec("8"))
+          .addRow(5, dec("100"))
+          .addRow(6, dec("0"))
+          .addRow(7, dec("0"))
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  /**
+   * Verify that a decimal type with no scale defaults to a scale of 0.
+   */
+  @Test
+  public void testDecimalNoScale() throws Exception {
+    String tableName = "noScale";
+    String tablePath = buildTable(tableName, decimalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "decimal_col decimal(5) not null default `100.00` " +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .add("decimal_col", MinorType.VARDECIMAL, 5, 0)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, dec("12"))
+          .addRow(2, dec("-57"))
+          .addRow(3, dec("0"))
+          .addRow(4, dec("8"))
+          .addRow(5, dec("100"))
+          .addRow(6, dec("0"))
+          .addRow(7, dec("0"))
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  private static final String raggedDecimalContents[] = {
+    "id,decimal_col",
+    "1,1234.5678",
+    "2",
+    "3,-12.345"
+  };
+
+  /**
+   * Verify that the decimal default value is rounded according
+   * to the scale specified in the decimal type.
+   */
+  @Test
+  public void testDecimalDefaultRound() throws Exception {
+    String tableName = "defaultRound";
+    String tablePath = buildTable(tableName, raggedDecimalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "decimal_col decimal(5) not null default `1111.56789` " +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .add("decimal_col", MinorType.VARDECIMAL, 5, 0)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, dec("1235"))
+          .addRow(2, dec("1112"))
+          .addRow(3, dec("-12"))
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  private static final String decimalOverflowContents[] = {
+    "id,decimal_col",
+    "1,99999.9",
+  };
+
+  /**
+   * Test decimal overflow during data reads. The overflow occurs after
+   * rounding the data value of 99999.9 to 100000.
+  */
+  @Test
+  public void testDecimalOverflow() throws Exception {
+    String tableName = "decimalOverflow";
+    String tablePath = buildTable(tableName, decimalOverflowContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "decimal_col decimal(5) not null" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      try {
+        client.queryBuilder().sql(sql).run();
+        fail();
+      } catch (UserRemoteException e) {
+        assertTrue(e.getMessage().contains("VALIDATION ERROR"));
+        assertTrue(e.getMessage().contains("Value 100000 overflows specified precision 5 with scale 0"));
+      }
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  /**
+   * Test proper handling of overflow for a default value. In this case,
+   * the overflow occurs after rounding.
+   */
+  @Test
+  public void testDecimalDefaultOverflow() throws Exception {
+    String tableName = "decimalDefaultOverflow";
+    String tablePath = buildTable(tableName, raggedDecimalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "decimal_col decimal(5) not null default `99999.9` " +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      try {
+        client.queryBuilder().sql(sql).run();
+        fail();
+      } catch (UserRemoteException e) {
+        assertTrue(e.getMessage().contains("VALIDATION ERROR"));
+        assertTrue(e.getMessage().contains("Value 100000 overflows specified precision 5 with scale 0"));
+      }
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  @Test
+  public void testInvalidDecimalSchema() throws Exception {
+    String tableName = "invalidDecimal";
+    String tablePath = buildTable(tableName, raggedDecimalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, " +
+          "decimal_col decimal(39) not null" +
+          ") for table %s";
+      try {
+        run(sql, tablePath);
+        fail();
+      } catch (UserRemoteException e) {
+        assertTrue(e.getMessage().contains("VALIDATION ERROR"));
+        assertTrue(e.getMessage().contains("VARDECIMAL(39, 0) exceeds maximum suppored precision of 38"));
+      }
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  private static final String trivalContents[] = {
+    "id",
+    "1"
+  };
+
+  @Test
+  public void testMissingCols() throws Exception {
+    String tableName = "missingCols";
+    String tablePath = buildTable(tableName, trivalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "col_int integer, " +
+          "col_bigint bigint, " +
+          "col_double double, " +
+          "col_float float, " +
+          "col_var varchar, " +
+          "col_boolean boolean, " +
+          "col_interval interval, " +
+          "col_time time, " +
+          "col_date date, " +
+          "col_timestamp timestamp" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .addNullable("col_int", MinorType.INT)
+          .addNullable("col_bigint", MinorType.BIGINT)
+          .addNullable("col_double", MinorType.FLOAT8)
+          .addNullable("col_float", MinorType.FLOAT4)
+          .addNullable("col_var", MinorType.VARCHAR)
+          .addNullable("col_boolean", MinorType.BIT)
+          .addNullable("col_interval", MinorType.INTERVAL)
+          .addNullable("col_time", MinorType.TIME)
+          .addNullable("col_date", MinorType.DATE)
+          .addNullable("col_timestamp", MinorType.TIMESTAMP)
+          .add("id", MinorType.VARCHAR)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(null, null, null, null, null, null, null, null, null, null, "1")
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetV3();
       resetSchema();
     }
   }
+
+  /**
+   * Verify that, if a schema is provided, a column is missing,
+   * and there is no default, that the mode is left at required and
+   * the column is filled with zeros. Note that this behavior is
+   * specific to the text readers: if have no schema, even an missing
+   * VARCHAR column will be REQUIRED and set to an empty string
+   * (reason: if the column does appear it will be a required VARCHAR,
+   * so, to be consistent, missing columns are also required.)
+   */
+  @Test
+  public void testMissingColsReq() throws Exception {
+    String tableName = "missingColsStrict";
+    String tablePath = buildTable(tableName, trivalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "col_int integer not null, " +
+          "col_bigint bigint not null, " +
+          "col_double double not null, " +
+          "col_float float not null, " +
+          "col_var varchar not null, " +
+          "col_boolean boolean not null, " +
+          "col_interval interval not null, " +
+          "col_time time not null, " +
+          "col_date date not null, " +
+          "col_timestamp timestamp not null" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("col_int", MinorType.INT)
+          .add("col_bigint", MinorType.BIGINT)
+          .add("col_double", MinorType.FLOAT8)
+          .add("col_float", MinorType.FLOAT4)
+          .add("col_var", MinorType.VARCHAR)
+          .add("col_boolean", MinorType.BIT)
+          .add("col_interval", MinorType.INTERVAL)
+          .add("col_time", MinorType.TIME)
+          .add("col_date", MinorType.DATE)
+          .add("col_timestamp", MinorType.TIMESTAMP)
+          .add("id", MinorType.VARCHAR)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(0, 0L, 0.0, 0D, "", false, new Period(0), 0, 0L, 0L, "1")
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  /**
+   * Verify the behavior of missing columns, not null mode, with
+   * a default value.
+   */
+  @Test
+  public void testMissingColsReqDefault() throws Exception {
+    String tableName = "missingColsDefault";
+    String tablePath = buildTable(tableName, trivalContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "col_int integer not null default '10', " +
+          "col_bigint bigint not null default '10', " +
+          "col_double double not null default '10.5', " +
+          "col_float float not null default '10.5', " +
+          "col_var varchar not null default 'foo', " +
+          "col_boolean boolean not null default '1', " +
+          "col_interval interval not null default 'P10D', " +
+          "col_time time not null default '12:34:56', " +
+          "col_date date not null default '2019-03-28', " +
+          "col_timestamp timestamp not null format 'yyyy-MM-dd HH:mm:ss' default '2019-03-28 12:34:56'" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("col_int", MinorType.INT)
+          .add("col_bigint", MinorType.BIGINT)
+          .add("col_double", MinorType.FLOAT8)
+          .add("col_float", MinorType.FLOAT4)
+          .add("col_var", MinorType.VARCHAR)
+          .add("col_boolean", MinorType.BIT)
+          .add("col_interval", MinorType.INTERVAL)
+          .add("col_time", MinorType.TIME)
+          .add("col_date", MinorType.DATE)
+          .add("col_timestamp", MinorType.TIMESTAMP)
+          .add("id", MinorType.VARCHAR)
+          .buildSchema();
+      LocalTime lt = new LocalTime(12, 34, 56);
+      LocalDate ld = new LocalDate(2019, 3, 28);
+      Instant ts = ld.toDateTime(lt).toInstant();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(10, 10L, 10.5, 10.5D, "foo", true, new Period(0).plusDays(10),
+              lt, ld, ts, "1")
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  private static final String missingColContents[] = {
+    "id,amount,start_date",
+    "1,20,2019-01-01",
+    "2",
+    "3,30"
+  };
+
+  /**
+   * Demonstrate that CSV works for a schema with nullable types when columns
+   * are missing (there is no comma to introduce an empty field in the data.)
+   */
+  @Test
+  public void testMissingColsNullable() throws Exception {
+    String tableName = "missingColsNullable";
+    String tablePath = buildTable(tableName, missingColContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, amount int, start_date date" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .addNullable("amount", MinorType.INT)
+          .addNullable("start_date", MinorType.DATE)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, 20, new LocalDate(2019, 1, 1))
+          .addRow(2, null, null)
+          .addRow(3, 30, null)
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  private static final String blankColContents[] = {
+    "id,amount,start_date",
+    "1,20,2019-01-01",
+    "2, ,",    // Spaces intentional
+    "3, 30 ,"  // Spaces intentional
+  };
+
+  /**
+   * Demonstrate that CSV uses a comma to introduce a column,
+   * even if that column has no data. In this case, CSV assumes the
+   * value of the column is a blank string.
+   * <p>
+   * Such a schema cannot be converted to a number or date column,
+   * even nullable, because a blank string is neither a valid number nor
+   * a valid date.
+   */
+
+  @Test
+  public void testBlankCols() throws Exception {
+    String tableName = "blankCols";
+    String tablePath = buildTable(tableName, blankColContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.VARCHAR)
+          .add("amount", MinorType.VARCHAR)
+          .add("start_date", MinorType.VARCHAR)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow("1", "20", "2019-01-01")
+          .addRow("2", " ", "")
+          .addRow("3", " 30 ", "")
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  /**
+   * Use the same data set as above tests, but use a schema to do type
+   * conversion. Blank columns become 0 for numeric non-nullable, nulls for
+   * nullable non-numeric.
+   */
+  @Test
+  public void testBlankColsWithSchema() throws Exception {
+    String tableName = "blankColsSchema";
+    String tablePath = buildTable(tableName, blankColContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, amount int not null, start_date date" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .add("amount", MinorType.INT)
+          .addNullable("start_date", MinorType.DATE)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, 20, new LocalDate(2019, 1, 1))
+          .addRow(2, 0, null)
+          .addRow(3, 30, null)
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  /**
+   * As above, but with a nullable numeric column. Here, by default,
+   * blank values become nulls.
+   */
+  @Test
+  public void testBlankColsWithNullableSchema() throws Exception {
+    String tableName = "blankColsNullableSchema";
+    String tablePath = buildTable(tableName, blankColContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, amount int, start_date date" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .addNullable("amount", MinorType.INT)
+          .addNullable("start_date", MinorType.DATE)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, 20, new LocalDate(2019, 1, 1))
+          .addRow(2, null, null)
+          .addRow(3, 30, null)
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
+
+  /**
+   * As above, but with a non-nullable numeric column with a default
+   * value.
+   */
+  @Test
+  public void testBlankColsWithNDefaultValue() throws Exception {
+    String tableName = "blankColsNullableSchema";
+    String tablePath = buildTable(tableName, blankColContents);
+
+    try {
+      enableSchemaSupport();
+      String sql = "create or replace schema (" +
+          "id int not null, amount int not null default '-1', start_date date" +
+          ") for table %s";
+      run(sql, tablePath);
+      sql = "SELECT * FROM " + tablePath + "ORDER BY id";
+      RowSet actual = client.queryBuilder().sql(sql).rowSet();
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .add("id", MinorType.INT)
+          .add("amount", MinorType.INT)
+          .addNullable("start_date", MinorType.DATE)
+          .buildSchema();
+      RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
+          .addRow(1, 20, new LocalDate(2019, 1, 1))
+          .addRow(2, -1, null)
+          .addRow(3, 30, null)
+          .build();
+      RowSetUtilities.verify(expected, actual);
+    } finally {
+      resetSchemaSupport();
+    }
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
index 57ba711..542ad73 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
@@ -118,7 +118,7 @@ public class ClusterTest extends DrillTest {
   }
 
   public static void run(String query, Object... args) throws Exception {
-    client.queryBuilder().sql(query, args).run( );
+    client.queryBuilder().sql(query, args).run();
   }
 
   public QueryBuilder queryBuilder( ) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java
index ae120d8..91eb7d0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetBuilder.java
@@ -83,6 +83,7 @@ public final class RowSetBuilder {
    * set as <br><tt>add(10, "foo");</tt><br> Values of arrays can be expressed as a Java
    * array. A schema of (a:int, b:int[]) can be set as<br>
    * <tt>add(10, new int[] {100, 200});</tt><br>
+   *
    * @param values column values in column index order
    * @return this builder
    * @throws IllegalStateException if the batch, or any vector in the batch,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetUtilities.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetUtilities.java
index 7c332a1..7fe606f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetUtilities.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/RowSetUtilities.java
@@ -30,6 +30,9 @@ import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.exec.vector.accessor.ValueType;
 import org.bouncycastle.util.Arrays;
 import org.joda.time.Duration;
+import org.joda.time.Instant;
+import org.joda.time.LocalDate;
+import org.joda.time.LocalTime;
 import org.joda.time.Period;
 
 /**
@@ -73,6 +76,10 @@ public class RowSetUtilities {
     writer.setObject(testDataFromInt(writer.valueType(), field.getType(), value));
   }
 
+  /**
+   * Create a test value that can be passed to setObject(). This value matches the
+   * value type for a writer.
+   */
   public static Object testDataFromInt(ValueType valueType, MajorType dataType, int value) {
     switch (valueType) {
     case BYTES:
@@ -102,6 +109,12 @@ public class RowSetUtilities {
       return BigDecimal.valueOf(value, dataType.getScale());
     case PERIOD:
       return periodFromInt(dataType.getMinorType(), value);
+    case DATE:
+      return new LocalDate(value);
+    case TIME:
+      return new LocalTime(value);
+    case TIMESTAMP:
+      return new Instant(value);
     default:
       throw new IllegalStateException("Unknown writer type: " + valueType);
     }
@@ -158,6 +171,9 @@ public class RowSetUtilities {
      case LONG:
      case STRING:
      case DECIMAL:
+     case DATE:
+     case TIME:
+     case TIMESTAMP:
        assertEquals(msg, expectedObj, actualObj);
        break;
      case PERIOD: {
@@ -245,4 +261,8 @@ public class RowSetUtilities {
     new RowSetComparison(expected).verifyAndClearAll(actual);
   }
 
+  public static BigDecimal dec(String value) {
+    return new BigDecimal(value);
+  }
+
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestColumnConverter.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestColumnConverter.java
index 22ede4d..b5635fe 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestColumnConverter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestColumnConverter.java
@@ -24,14 +24,19 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.fail;
 
 import java.math.BigDecimal;
+import java.util.HashMap;
+import java.util.Map;
 
 import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
+import org.apache.drill.exec.vector.accessor.convert.AbstractConvertFromString;
 import org.apache.drill.exec.vector.accessor.convert.AbstractWriteConverter;
 import org.apache.drill.exec.vector.accessor.convert.ColumnConversionFactory;
 import org.apache.drill.exec.vector.accessor.convert.StandardConversions;
@@ -197,10 +202,16 @@ public class TestColumnConverter extends SubOperatorTest {
    */
   private static class ConversionTestFixture implements ColumnConversionFactory {
 
-    private TupleMetadata inputSchema;
+    private final TupleMetadata inputSchema;
+    private final Map<String, String> properties;
 
     public ConversionTestFixture(TupleMetadata inputSchema) {
+      this(inputSchema, null);
+    }
+
+    public ConversionTestFixture(TupleMetadata inputSchema, Map<String, String> props) {
       this.inputSchema = inputSchema;
+      this.properties = props;
     }
 
     @Override
@@ -208,8 +219,11 @@ public class TestColumnConverter extends SubOperatorTest {
       ColumnMetadata inputCol = inputSchema.metadata(baseWriter.schema().name());
       assertNotNull(inputCol);
       ConversionDefn defn = StandardConversions.analyze(inputCol, baseWriter.schema());
+      if (defn.type == ConversionType.NONE) {
+        return null;
+      }
       assertNotNull(defn.conversionClass);
-      return StandardConversions.newInstance(defn.conversionClass, baseWriter);
+      return StandardConversions.newInstance(defn.conversionClass, baseWriter, properties);
     }
   }
 
@@ -230,7 +244,7 @@ public class TestColumnConverter extends SubOperatorTest {
         .add("bi", MinorType.BIGINT)
         .add("fl", MinorType.FLOAT4)
         .add("db", MinorType.FLOAT8)
-         .buildSchema();
+        .buildSchema();
     TupleMetadata inputSchema = new SchemaBuilder()
         .add("ti", MinorType.VARCHAR)
         .add("si", MinorType.VARCHAR)
@@ -643,8 +657,9 @@ public class TestColumnConverter extends SubOperatorTest {
   }
 
   /**
-   * Test conversion two-from Java-style booleans.
+   * Test conversion to/from Java-style Booleans.
    */
+
   @Test
   public void testBooleanToFromString() {
 
@@ -662,16 +677,86 @@ public class TestColumnConverter extends SubOperatorTest {
         new ConversionTestFixture(inputSchema))
         .addRow("true", false)
         .addRow("false", true)
+        .addRow("TRUE", false)
+        .addRow("FALSE", true)
         .build();
 
     final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
         .addRow(true, "false")
         .addRow(false, "true")
+        .addRow(true, "false")
+        .addRow(false, "true")
         .build();
 
     RowSetUtilities.verify(expected, actual);
   }
 
+  private static BigDecimal dec(String value) {
+    return new BigDecimal(value);
+  }
+
+  @Test
+  public void testDecimalFromString() {
+
+    TupleMetadata outputSchema = new SchemaBuilder()
+        .add("id", MinorType.INT)
+        .add("dec", MinorType.VARDECIMAL, 4, 2)
+        .buildSchema();
+
+    TupleMetadata inputSchema = new SchemaBuilder()
+        .add("id", MinorType.INT)
+        .add("dec", MinorType.VARCHAR)
+        .buildSchema();
+
+    RowSet actual = new RowSetBuilder(fixture.allocator(), outputSchema,
+        new ConversionTestFixture(inputSchema))
+        .addRow(1, "0")
+        .addRow(2, "-0")
+        .addRow(3, "0.12")
+        .addRow(4, "1.23")
+        .addRow(5, "12.34")
+        // Rounding occurs for VARDECIMAL
+        .addRow(6, "23.456")
+        .addRow(7, "-99.99")
+        .build();
+
+    final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
+        .addRow(1, dec("0"))
+        .addRow(2, dec("-0"))
+        .addRow(3, dec("0.12"))
+        .addRow(4, dec("1.23"))
+        .addRow(5, dec("12.34"))
+        .addRow(6, dec("23.46"))
+        .addRow(7, dec("-99.99"))
+        .build();
+
+    RowSetUtilities.verify(expected, actual);
+  }
+
+  @Test
+  public void testDecimalOverflow() {
+
+    TupleMetadata outputSchema = new SchemaBuilder()
+        .add("id", MinorType.INT)
+        .add("dec", MinorType.VARDECIMAL, 4, 2)
+        .buildSchema();
+
+    TupleMetadata inputSchema = new SchemaBuilder()
+        .add("id", MinorType.INT)
+        .add("dec", MinorType.VARCHAR)
+        .buildSchema();
+
+    RowSetBuilder rsBuilder = new RowSetBuilder(fixture.allocator(), outputSchema,
+        new ConversionTestFixture(inputSchema));
+    try {
+      rsBuilder.addRow(1, "1234567.89");
+      fail();
+    } catch (UserException e) {
+      // Expected
+    }
+    rsBuilder.build().clear();
+  }
+
   private static void expect(ConversionType type, ConversionDefn defn) {
     assertEquals(type, defn.type);
   }
@@ -849,4 +934,99 @@ public class TestColumnConverter extends SubOperatorTest {
     expect(ConversionType.EXPLICIT, StandardConversions.analyze(dayCol, stringCol));
     expect(ConversionType.EXPLICIT, StandardConversions.analyze(stringCol, dayCol));
   }
+
+  /**
+   * Test the properties for how to handle blanks on string-to-number
+   * conversions.
+   */
+
+  @Test
+  public void testBlankOptions() {
+
+    // Nullable
+
+    try {
+      doTestBlanks(DataMode.OPTIONAL, null, null, null);
+    } catch (InvalidConversionError e) {
+      // Expected
+    }
+    doTestBlanks(DataMode.OPTIONAL, AbstractConvertFromString.BLANK_AS_NULL,
+        null, null);
+    doTestBlanks(DataMode.OPTIONAL, AbstractConvertFromString.BLANK_AS_ZERO,
+        null, 0);
+    doTestBlanks(DataMode.OPTIONAL, AbstractConvertFromString.BLANK_AS_SKIP,
+        null, null);
+
+    // Non-nullable
+
+    try {
+      doTestBlanks(DataMode.REQUIRED, null, null, 99);
+    } catch (InvalidConversionError e) {
+      // Expected
+    }
+    doTestBlanks(DataMode.REQUIRED, AbstractConvertFromString.BLANK_AS_NULL,
+        null, 20);
+    doTestBlanks(DataMode.REQUIRED, AbstractConvertFromString.BLANK_AS_ZERO,
+        null, 0);
+    doTestBlanks(DataMode.REQUIRED, AbstractConvertFromString.BLANK_AS_SKIP,
+        null, 0);
+
+    // Property on column
+
+    doTestBlanks(DataMode.REQUIRED, null,
+        AbstractConvertFromString.BLANK_AS_NULL, 20);
+    doTestBlanks(DataMode.REQUIRED, null,
+        AbstractConvertFromString.BLANK_AS_ZERO, 0);
+
+    // Properties on both: column takes precedence
+
+    doTestBlanks(DataMode.REQUIRED, AbstractConvertFromString.BLANK_AS_ZERO,
+        AbstractConvertFromString.BLANK_AS_NULL, 20);
+    doTestBlanks(DataMode.REQUIRED, AbstractConvertFromString.BLANK_AS_NULL,
+        AbstractConvertFromString.BLANK_AS_ZERO, 0);
+  }
+
+  private void doTestBlanks(DataMode mode, String frameworkOption, String colOption, Integer value) {
+    TupleMetadata outputSchema = new SchemaBuilder()
+        .add("col", MinorType.INT, mode)
+        .buildSchema();
+    ColumnMetadata colSchema = outputSchema.metadata("col");
+    if (colOption != null) {
+      colSchema.setProperty(ColumnMetadata.BLANK_AS_PROP, colOption);
+    }
+    colSchema.setProperty(ColumnMetadata.DEFAULT_VALUE_PROP, "20");
+
+    TupleMetadata inputSchema = new SchemaBuilder()
+        .addNullable("col", MinorType.VARCHAR)
+        .buildSchema();
+
+    Map<String, String> props = null;
+    if (frameworkOption != null) {
+      props = new HashMap<>();
+      props.put(AbstractConvertFromString.BLANK_ACTION_PROP, frameworkOption);
+    }
+    RowSetBuilder builder = new RowSetBuilder(fixture.allocator(), outputSchema,
+        new ConversionTestFixture(inputSchema, props));
+    try {
+      builder
+        .addSingleCol("")
+        .addSingleCol("  ")
+        .addSingleCol("10")
+        .addSingleCol(" 11  ");
+    }
+    catch (Exception e) {
+      builder.build().clear();
+      throw e;
+    }
+    SingleRowSet actual = builder.build();
+
+    final SingleRowSet expected = fixture.rowSetBuilder(outputSchema)
+        .addSingleCol(value)
+        .addSingleCol(value)
+        .addSingleCol(10)
+        .addSingleCol(11)
+        .build();
+
+    RowSetUtilities.verify(expected, actual);
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/DummyWriterTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestDummyWriter.java
similarity index 99%
rename from exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/DummyWriterTest.java
rename to exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestDummyWriter.java
index 1329a86..5f8dd16 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/DummyWriterTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestDummyWriter.java
@@ -34,7 +34,7 @@ import org.apache.drill.exec.vector.accessor.writer.MapWriter;
 import org.apache.drill.test.SubOperatorTest;
 import org.junit.Test;
 
-public class DummyWriterTest extends SubOperatorTest {
+public class TestDummyWriter extends SubOperatorTest {
 
   /**
    * Test only, bare-bones tuple writer used to gather the dummy
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFillEmpties.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFillEmpties.java
index ac18c62..1ecb1dc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFillEmpties.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestFillEmpties.java
@@ -18,12 +18,15 @@
 package org.apache.drill.test.rowSet.test;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import org.apache.drill.categories.RowSetTests;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.accessor.ArrayReader;
@@ -89,30 +92,42 @@ public class TestFillEmpties extends SubOperatorTest {
     testFillEmpties(DataMode.REPEATED);
   }
 
+  private boolean isSupported(MinorType type) {
+    switch (type) {
+    case DECIMAL28DENSE:
+    case DECIMAL38DENSE:
+      // Not yet supported, now deprecated
+      return false;
+    case GENERIC_OBJECT:
+    case LATE:
+    case LIST:
+    case MAP:
+    case NULL:
+    case UNION:
+      // Writer N/A
+      return false;
+    case FIXEDBINARY:
+    case FIXEDCHAR:
+    case FIXED16CHAR:
+    case MONEY:
+    case TIMESTAMPTZ:
+    case TIMETZ:
+      // Not supported in Drill
+      return false;
+    case BIT:
+      // Requires special test
+      return false;
+    default:
+      return true;
+    }
+  }
+
   private void testFillEmpties(DataMode mode) {
     for (MinorType type : MinorType.values()) {
+      if (! isSupported(type)) {
+        continue;
+      }
       switch (type) {
-      case DECIMAL28DENSE:
-      case DECIMAL38DENSE:
-        // Not yet supported
-        break;
-      case GENERIC_OBJECT:
-      case LATE:
-      case LIST:
-      case MAP:
-      case NULL:
-      case UNION:
-        // Writer N/A
-        break;
-      case BIT:
-      case FIXEDBINARY:
-      case FIXEDCHAR:
-      case FIXED16CHAR:
-      case MONEY:
-      case TIMESTAMPTZ:
-      case TIMETZ:
-        // Not supported in Drill
-        break;
       case DECIMAL18:
       case DECIMAL28SPARSE:
       case DECIMAL9:
@@ -171,9 +186,7 @@ public class TestFillEmpties extends SubOperatorTest {
     RowSetReader reader = result.reader();
     ScalarReader colReader = reader.scalar(0);
     MinorType type = majorType.getMinorType();
-    boolean isVariable = (type == MinorType.VARCHAR ||
-                          type == MinorType.VAR16CHAR ||
-                          type == MinorType.VARBINARY);
+    boolean isVariable = Types.isVarWidthType(type);
     for (int i = 0; i < ROW_COUNT; i++) {
       assertTrue(reader.next());
       if (i % 5 != 0) {
@@ -244,4 +257,176 @@ public class TestFillEmpties extends SubOperatorTest {
     }
     result.clear();
   }
+
+  /**
+   * Test each vector type to ensure it supports setting a default value.
+   * Sets the default directly on the write to avoid the need to serialize
+   * the default value to string, which is awkward for some types when
+   * using the generic "test value from int" tool.
+   */
+
+  @Test
+  public void testDefaultValue() {
+    doTestDefaultValue(Types.required(MinorType.VARCHAR));
+    for (MinorType type : MinorType.values()) {
+      if (! isSupported(type)) {
+        continue;
+      }
+      switch (type) {
+      case DECIMAL18:
+      case DECIMAL28SPARSE:
+      case DECIMAL9:
+      case DECIMAL38SPARSE:
+      case VARDECIMAL:
+        MajorType majorType = MajorType.newBuilder()
+          .setMinorType(type)
+          .setMode(DataMode.REQUIRED)
+          .setPrecision(9)
+          .setScale(2)
+          .build();
+        doTestDefaultValue(majorType);
+        break;
+      default:
+        doTestDefaultValue(Types.required(type));
+      }
+    }
+  }
+
+  private void doTestDefaultValue(MajorType majorType) {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", majorType)
+        .buildSchema();
+    ExtendableRowSet rs = fixture.rowSet(schema);
+    RowSetWriter writer = rs.writer();
+    ScalarWriter colWriter = writer.scalar(0);
+    ValueType valueType = colWriter.extendedType();
+    Object defaultValue = RowSetUtilities.testDataFromInt(valueType, majorType, 100);
+    colWriter.setDefaultValue(defaultValue);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      if (i % 5 == 0) {
+        colWriter.setObject(RowSetUtilities.testDataFromInt(valueType, majorType, i));
+      }
+      writer.save();
+    }
+    SingleRowSet result = writer.done();
+    RowSetReader reader = result.reader();
+    ScalarReader colReader = reader.scalar(0);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      assertTrue(reader.next());
+      Object actual = colReader.getValue();
+      Object expected = i % 5 == 0 ? RowSetUtilities.testDataFromInt(valueType, majorType, i) : defaultValue;
+      RowSetUtilities.assertEqualValues(
+          majorType.toString().replace('\n', ' ') + "[" + i + "]",
+          valueType, expected, actual);
+    }
+    result.clear();
+  }
+
+  /**
+   * Test the more typical case in which the default value is set in the
+   * column metadata. The reader mechanism will automatically set the default
+   * for the column writer from the (properly formed) default value in the
+   * column metadata.
+   */
+
+  @Test
+  public void testDefaultInSchema() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .buildSchema();
+    schema.metadata("a").setDefaultValue("11");
+    ExtendableRowSet rs = fixture.rowSet(schema);
+    RowSetWriter writer = rs.writer();
+    ScalarWriter colWriter = writer.scalar(0);
+    ValueType valueType = colWriter.extendedType();
+    for (int i = 0; i < ROW_COUNT; i++) {
+      if (i % 5 == 0) {
+        colWriter.setInt(i);
+      }
+      writer.save();
+    }
+    SingleRowSet result = writer.done();
+    RowSetReader reader = result.reader();
+    ScalarReader colReader = reader.scalar(0);
+    Object defaultValue = schema.metadata("a").decodeDefaultValue();
+    assertNotNull(defaultValue);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      assertTrue(reader.next());
+      Object actual = colReader.getValue();
+      Object expected = i % 5 == 0 ? i : defaultValue;
+      RowSetUtilities.assertEqualValues(
+          MinorType.INT.toString() + "[" + i + "]",
+          valueType, expected, actual);
+    }
+    result.clear();
+  }
+
+  @Test
+  public void testInvalidDefault() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .buildSchema();
+    schema.metadata("a").setDefaultValue("bogus");
+    ExtendableRowSet rs = fixture.rowSet(schema);
+    try {
+      rs.writer();
+      fail();
+    } catch (IllegalArgumentException e) {
+      // Expected
+    }
+    rs.clear();
+  }
+
+  /**
+   * Bit vector is special; packs 8 values per byte. Use custom
+   * logic to ship entire bytes.
+   */
+  @Test
+  public void testBitFillEmpties() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.BIT)
+        .buildSchema();
+    ExtendableRowSet rs = fixture.rowSet(schema);
+    RowSetWriter writer = rs.writer();
+    ScalarWriter colWriter = writer.scalar(0);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      if (i % 43 == 0) {
+        colWriter.setInt(1);
+      }
+      writer.save();
+    }
+    SingleRowSet result = writer.done();
+    RowSetReader reader = result.reader();
+    ScalarReader colReader = reader.scalar(0);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      assertTrue(reader.next());
+      assertEquals(i % 43 == 0 ? 1 : 0, colReader.getInt());
+    }
+    result.clear();
+  }
+
+  @Test
+  public void testBitDefaultValue() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.BIT)
+        .buildSchema();
+    ExtendableRowSet rs = fixture.rowSet(schema);
+    RowSetWriter writer = rs.writer();
+    ScalarWriter colWriter = writer.scalar(0);
+    colWriter.setDefaultValue(true);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      if (i % 43 == 0) {
+        colWriter.setInt(0);
+      }
+      writer.save();
+    }
+    SingleRowSet result = writer.done();
+    RowSetReader reader = result.reader();
+    ScalarReader colReader = reader.scalar(0);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      assertTrue(reader.next());
+      assertEquals(i % 43 == 0 ? 0 : 1, colReader.getInt());
+    }
+    result.clear();
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestScalarAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestScalarAccessors.java
index fe0720d..56c6af2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestScalarAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestScalarAccessors.java
@@ -21,19 +21,24 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.apache.drill.test.rowSet.RowSetUtilities.dec;
 
 import java.math.BigDecimal;
 import java.util.Arrays;
 
 import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.DateUtilities;
+import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.accessor.ArrayReader;
 import org.apache.drill.exec.vector.accessor.ScalarReader;
+import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.exec.vector.accessor.ValueType;
 import org.apache.drill.test.SubOperatorTest;
 import org.apache.drill.test.rowSet.RowSetReader;
@@ -43,6 +48,7 @@ import org.joda.time.LocalDate;
 import org.joda.time.LocalTime;
 import org.joda.time.Period;
 import org.apache.drill.test.rowSet.RowSet.SingleRowSet;
+import org.apache.drill.test.rowSet.RowSetBuilder;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
@@ -1639,27 +1645,128 @@ public class TestScalarAccessors extends SubOperatorTest {
     SingleRowSet rs = fixture.rowSetBuilder(schema)
         .addSingleCol(true)
         .addSingleCol(false)
+        .addSingleCol(0)
         .addSingleCol(1)
         .addSingleCol(2)
         .addSingleCol(3)
         .build();
-    assertEquals(5, rs.rowCount());
+    assertEquals(6, rs.rowCount());
 
     RowSetReader reader = rs.reader();
     ScalarReader colReader = reader.scalar(0);
 
     assertTrue(reader.next());
+    assertEquals(true, colReader.getBoolean());
     assertEquals(1, colReader.getInt());
     assertTrue(reader.next());
+    assertEquals(false, colReader.getBoolean());
     assertEquals(0, colReader.getInt());
     assertTrue(reader.next());
+    assertEquals(false, colReader.getBoolean());
+    assertEquals(0, colReader.getInt());
+    assertTrue(reader.next());
+    assertEquals(true, colReader.getBoolean());
     assertEquals(1, colReader.getInt());
     assertTrue(reader.next());
-    assertEquals(0, colReader.getInt());
+    assertEquals(true, colReader.getBoolean());
+    assertEquals(1, colReader.getInt());
     assertTrue(reader.next());
+    assertEquals(true, colReader.getBoolean());
     assertEquals(1, colReader.getInt());
 
     assertFalse(reader.next());
     rs.clear();
   }
+
+  /**
+   * The bit reader/writer are special and use the BitVector directly.
+   * Ensure that resize works in this special case.
+   */
+
+  @Test
+  public void testBitResize() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("col", MinorType.BIT)
+        .buildSchema();
+
+    RowSetBuilder rsb = new RowSetBuilder(fixture.allocator(), schema, 100);
+    ScalarWriter bitWriter = rsb.writer().scalar(0);
+    for (int i = 0; i < ValueVector.MAX_ROW_COUNT; i++) {
+      bitWriter.setBoolean((i % 5) == 0);
+      rsb.writer().save();
+    }
+
+    SingleRowSet rs = rsb.build();
+    RowSetReader reader = rs.reader();
+    ScalarReader bitReader = reader.scalar(0);
+    for (int i = 0; i < ValueVector.MAX_ROW_COUNT; i++) {
+      reader.next();
+      assertEquals((i % 5) == 0, bitReader.getBoolean());
+    }
+    rs.clear();
+  }
+
+  private static String repeat(String str, int n) {
+    StringBuilder buf = new StringBuilder();
+    for (int i = 0; i < n; i++) {
+      buf.append(str);
+    }
+    return str.toString();
+  }
+
+  @Test
+  public void testVarDecimalRange() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("col", MinorType.VARDECIMAL, 38, 4)
+        .buildSchema();
+
+    String big = repeat("9", 34) + ".9999";
+    SingleRowSet rs = new RowSetBuilder(fixture.allocator(), schema)
+        .addSingleCol(dec("0"))
+        .addSingleCol(dec("-0.0000"))
+        .addSingleCol(dec("0.0001"))
+        .addSingleCol(dec("-0.0001"))
+        .addSingleCol(dec(big))
+        .addSingleCol(dec("-" + big))
+        .addSingleCol(dec("1234.56789"))
+        .build();
+
+    RowSetReader reader = rs.reader();
+    ScalarReader decReader = reader.scalar(0);
+    assertTrue(reader.next());
+    assertEquals(dec("0.0000"), decReader.getDecimal());
+    assertTrue(reader.next());
+    assertEquals(dec("0.0000"), decReader.getDecimal());
+    assertTrue(reader.next());
+    assertEquals(dec("0.0001"), decReader.getDecimal());
+    assertTrue(reader.next());
+    assertEquals(dec("-0.0001"), decReader.getDecimal());
+    assertTrue(reader.next());
+    assertEquals(dec(big), decReader.getDecimal());
+    assertTrue(reader.next());
+    assertEquals(dec("-" + big), decReader.getDecimal());
+    assertTrue(reader.next());
+    assertEquals(dec("1234.5679"), decReader.getDecimal());
+    assertFalse(reader.next());
+    rs.clear();
+  }
+
+  @Test
+  public void testVarDecimalOverflow() {
+    TupleMetadata schema = new SchemaBuilder()
+        .add("col", MinorType.VARDECIMAL, 8, 4)
+        .buildSchema();
+
+    RowSetBuilder rsb = new RowSetBuilder(fixture.allocator(), schema, 100);
+    try {
+
+      // With rounding due to scale, value exceeds allowed precision.
+
+      rsb.addSingleCol(dec("9999.99999"));
+      fail();
+    } catch (UserException e) {
+      // Expected
+    }
+    rsb.build().clear();
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestSchemaBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestSchemaBuilder.java
index 9950197..724bec4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestSchemaBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestSchemaBuilder.java
@@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -317,11 +318,11 @@ public class TestSchemaBuilder extends DrillTest {
   @Test
   public void testDecimal() {
     TupleMetadata schema = new SchemaBuilder()
-        .addDecimal("a", MinorType.DECIMAL18, DataMode.OPTIONAL, 5, 2)
-        .addDecimal("b", MinorType.DECIMAL18, DataMode.REQUIRED, 6, 3)
-        .addDecimal("c", MinorType.DECIMAL18, DataMode.REPEATED, 7, 4)
+        .addNullable("a", MinorType.DECIMAL18, 5, 2)
+        .add("b", MinorType.DECIMAL18, 6, 3)
+        .addArray("c", MinorType.DECIMAL18, 7, 4)
         .addMap("m")
-          .addDecimal("d", MinorType.DECIMAL18, DataMode.OPTIONAL, 8, 1)
+          .addNullable("d", MinorType.DECIMAL18, 8, 1)
           .resumeSchema()
         .buildSchema();
 
@@ -348,6 +349,105 @@ public class TestSchemaBuilder extends DrillTest {
     assertEquals(1, d.scale());
   }
 
+  @Test
+  public void testVarDecimal() {
+    TupleMetadata schema = new SchemaBuilder()
+        .addNullable("a", MinorType.VARDECIMAL, 5, 2)
+        .add("b", MinorType.VARDECIMAL, 6, 3)
+        .addArray("c", MinorType.VARDECIMAL, 7, 4)
+        .add("e", MinorType.VARDECIMAL)
+        .add("g", MinorType.VARDECIMAL, 38, 4)
+        .addMap("m")
+          .addNullable("d", MinorType.VARDECIMAL, 8, 1)
+          .add("f", MinorType.VARDECIMAL)
+          .resumeSchema()
+        .buildSchema();
+
+    // Use name methods, just for variety
+
+    ColumnMetadata a = schema.metadata("a");
+    assertEquals(MinorType.VARDECIMAL, a.type());
+    assertEquals(DataMode.OPTIONAL, a.mode());
+    assertEquals(5, a.precision());
+    assertEquals(2, a.scale());
+
+    ColumnMetadata b = schema.metadata("b");
+    assertEquals(MinorType.VARDECIMAL, b.type());
+    assertEquals(DataMode.REQUIRED, b.mode());
+    assertEquals(6, b.precision());
+    assertEquals(3, b.scale());
+
+    ColumnMetadata c = schema.metadata("c");
+    assertEquals(MinorType.VARDECIMAL, c.type());
+    assertEquals(DataMode.REPEATED, c.mode());
+    assertEquals(7, c.precision());
+    assertEquals(4, c.scale());
+
+    ColumnMetadata e = schema.metadata("e");
+    assertEquals(MinorType.VARDECIMAL, e.type());
+    assertEquals(DataMode.REQUIRED, e.mode());
+    assertEquals(38, e.precision());
+    assertEquals(0, e.scale());
+
+    ColumnMetadata g = schema.metadata("g");
+    assertEquals(MinorType.VARDECIMAL, g.type());
+    assertEquals(DataMode.REQUIRED, g.mode());
+    assertEquals(38, g.precision());
+    assertEquals(4, g.scale());
+
+    ColumnMetadata d = schema.metadata("m").mapSchema().metadata("d");
+    assertEquals(MinorType.VARDECIMAL, d.type());
+    assertEquals(DataMode.OPTIONAL, d.mode());
+    assertEquals(8, d.precision());
+    assertEquals(1, d.scale());
+
+    ColumnMetadata f = schema.metadata("m").mapSchema().metadata("f");
+    assertEquals(MinorType.VARDECIMAL, f.type());
+    assertEquals(DataMode.REQUIRED, f.mode());
+    assertEquals(38, f.precision());
+    assertEquals(0, f.scale());
+  }
+
+  @Test
+  public void testVarDecimalOverflow() {
+
+    try {
+      new SchemaBuilder()
+        .add("a", MinorType.VARDECIMAL, 39, 0)
+        .buildSchema();
+      fail();
+    } catch (IllegalArgumentException e) {
+      // Expected
+    }
+
+    try {
+      new SchemaBuilder()
+        .add("a", MinorType.VARDECIMAL, -1, 0)
+        .buildSchema();
+      fail();
+    } catch (IllegalArgumentException e) {
+      // Expected
+    }
+
+    try {
+      new SchemaBuilder()
+        .add("a", MinorType.VARDECIMAL, 38, -1)
+        .buildSchema();
+      fail();
+    } catch (IllegalArgumentException e) {
+      // Expected
+    }
+
+    try {
+      new SchemaBuilder()
+        .add("a", MinorType.VARDECIMAL, 5, 6)
+        .buildSchema();
+      fail();
+    } catch (IllegalArgumentException e) {
+      // Expected
+    }
+  }
+
   /**
    * Verify that the map-in-map plumbing works.
    */
diff --git a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RequestIdMap.java b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RequestIdMap.java
index 2d28159..83380e2 100644
--- a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RequestIdMap.java
+++ b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/RequestIdMap.java
@@ -22,10 +22,10 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
+import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 
 import com.carrotsearch.hppc.IntObjectHashMap;
 import com.carrotsearch.hppc.procedures.IntObjectProcedure;
-import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 
 import io.netty.buffer.ByteBuf;
 import io.netty.channel.ChannelFuture;
@@ -74,11 +74,10 @@ class RequestIdMap {
     public void apply(int key, RpcOutcome<?> value) {
       try{
         value.setException(exception);
-      }catch(Exception e){
+      }catch (final Exception e){
         logger.warn("Failure while attempting to fail rpc response.", e);
       }
     }
-
   }
 
   public <V> ChannelListenerWithCoordinationId createNewRpcListener(RpcOutcomeListener<V> handler, Class<V> clazz,
@@ -101,6 +100,7 @@ class RequestIdMap {
     final RpcOutcomeListener<T> handler;
     final Class<T> clazz;
     final int coordinationId;
+    @SuppressWarnings("unused")
     final RemoteConnection connection;
 
     public RpcListener(RpcOutcomeListener<T> handler, Class<T> clazz, int coordinationId, RemoteConnection connection) {
@@ -140,15 +140,10 @@ class RequestIdMap {
     }
 
     @Override
-    public Class<T> getOutcomeType() {
-      return clazz;
-    }
+    public Class<T> getOutcomeType() { return clazz; }
 
     @Override
-    public int getCoordinationId() {
-      return coordinationId;
-    }
-
+    public int getCoordinationId() { return coordinationId; }
   }
 
   private RpcOutcome<?> removeFromMap(int coordinationId) {
@@ -165,9 +160,9 @@ class RequestIdMap {
 
   public <V> RpcOutcome<V> getAndRemoveRpcOutcome(int rpcType, int coordinationId, Class<V> clazz) {
 
-    RpcOutcome<?> rpc = removeFromMap(coordinationId);
+    final RpcOutcome<?> rpc = removeFromMap(coordinationId);
     // logger.debug("Got rpc from map {}", rpc);
-    Class<?> outcomeClass = rpc.getOutcomeType();
+    final Class<?> outcomeClass = rpc.getOutcomeType();
 
     if (outcomeClass != clazz) {
       throw new IllegalStateException(String.format(
@@ -178,6 +173,7 @@ class RequestIdMap {
     }
 
     @SuppressWarnings("unchecked")
+    final
     RpcOutcome<V> crpc = (RpcOutcome<V>) rpc;
 
     // logger.debug("Returning casted future");
@@ -187,11 +183,10 @@ class RequestIdMap {
   public void recordRemoteFailure(int coordinationId, DrillPBError failure) {
     // logger.debug("Updating failed future.");
     try {
-      RpcOutcome<?> rpc = removeFromMap(coordinationId);
+      final RpcOutcome<?> rpc = removeFromMap(coordinationId);
       rpc.setException(new UserRemoteException(failure));
-    } catch (Exception ex) {
+    } catch (final Exception ex) {
       logger.warn("Failed to remove from map.  Not a problem since we were updating on failed future.", ex);
     }
   }
-
 }
diff --git a/exec/vector/src/main/codegen/templates/ColumnAccessors.java b/exec/vector/src/main/codegen/templates/ColumnAccessors.java
index 1c9654d..0891e13 100644
--- a/exec/vector/src/main/codegen/templates/ColumnAccessors.java
+++ b/exec/vector/src/main/codegen/templates/ColumnAccessors.java
@@ -34,15 +34,14 @@
       return ValueType.${label?upper_case};
   </#if>
     }
-</#macro>
-<#macro extendedType drillType>
-  <#if drillType == "Time" || drillType == "Date" || drillType == "TimeStamp">
+  <#if drillType == "Date" || drillType == "Time" || drillType == "TimeStamp">
+
     @Override
     public ValueType extendedType() {
-    <#if drillType == "Time">
-      return ValueType.TIME;
-    <#elseif drillType == "Date">
+    <#if drillType == "Date">
       return ValueType.DATE;
+    <#elseif drillType == "Time">
+      return ValueType.TIME;
     <#elseif drillType == "TimeStamp">
       return ValueType.TIMESTAMP;
     <#else>
@@ -50,7 +49,6 @@
       return valueType();
     </#if>
     }
-
   </#if>
 </#macro>
 <#macro build types vectorType accessorType>
@@ -80,14 +78,55 @@
   </#list>
   }
 </#macro>
+<#macro writeBuf buffer drillType minor putType doCast >
+  <#if varWidth>
+      ${buffer}.setBytes(offset, value, 0, len);
+  <#elseif drillType == "Decimal9">
+      ${buffer}.setInt(offset,
+        DecimalUtility.getDecimal9FromBigDecimal(value,
+            type.getScale()));
+  <#elseif drillType == "Decimal18">
+      ${buffer}.setLong(offset,
+          DecimalUtility.getDecimal18FromBigDecimal(value,
+              type.getScale()));
+  <#elseif drillType == "Decimal38Sparse">
+      <#-- Hard to optimize this case. Just use the available tools. -->
+      DecimalUtility.getSparseFromBigDecimal(value, ${buffer},
+          offset, type.getScale(), 6);
+  <#elseif drillType == "Decimal28Sparse">
+      <#-- Hard to optimize this case. Just use the available tools. -->
+      DecimalUtility.getSparseFromBigDecimal(value, ${buffer},
+          offset, type.getScale(), 5);
+  <#elseif drillType == "IntervalYear">
+      ${buffer}.setInt(offset,
+          value.getYears() * 12 + value.getMonths());
+  <#elseif drillType == "IntervalDay">
+      ${buffer}.setInt(offset, value.getDays());
+      ${buffer}.setInt(offset + ${minor.millisecondsOffset}, DateUtilities.periodToMillis(value));
+  <#elseif drillType == "Interval">
+      ${buffer}.setInt(offset, DateUtilities.periodToMonths(value));
+      ${buffer}.setInt(offset + ${minor.daysOffset}, value.getDays());
+      ${buffer}.setInt(offset + ${minor.millisecondsOffset}, DateUtilities.periodToMillis(value));
+  <#elseif drillType == "Float4">
+      ${buffer}.setInt(offset, Float.floatToRawIntBits((float) value));
+  <#elseif drillType == "Float8">
+      ${buffer}.setLong(offset, Double.doubleToRawLongBits(value));
+  <#elseif drillType == "Bit">
+      ${buffer}.setByte(offset, (byte)(value & 0x01));
+  <#else>
+      ${buffer}.set${putType?cap_first}(offset, <#if doCast>(${putType}) </#if>value);
+  </#if>
+</#macro>
 <@copyright />
 
 package org.apache.drill.exec.vector.accessor;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
+import java.math.RoundingMode;
 
 import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.vector.DateUtilities;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.vector.*;
@@ -96,6 +135,7 @@ import org.apache.drill.exec.vector.accessor.reader.BaseScalarReader.BaseVarWidt
 import org.apache.drill.exec.vector.accessor.reader.BaseScalarReader.BaseFixedWidthReader;
 import org.apache.drill.exec.vector.accessor.reader.VectorAccessor;
 import org.apache.drill.exec.vector.accessor.writer.AbstractFixedWidthWriter.BaseFixedWidthWriter;
+import org.apache.drill.exec.vector.accessor.writer.AbstractFixedWidthWriter.BaseIntWriter;
 import org.apache.drill.exec.vector.accessor.writer.BaseVarWidthWriter;
 
 import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
@@ -127,22 +167,33 @@ public class ColumnAccessors {
 <#list vv.types as type>
   <#list type.minor as minor>
     <#assign drillType=minor.class>
+    <#if drillType == "Bit">
+      <#-- Bit is special, handled outside of codegen. -->
+      <#continue>
+    </#if>
     <#assign javaType=minor.javaType!type.javaType>
     <#assign accessorType=minor.accessorType!type.accessorType!minor.friendlyType!javaType>
     <#assign label=minor.accessorLabel!type.accessorLabel!accessorType?capitalize>
     <#assign notyet=minor.accessorDisabled!type.accessorDisabled!false>
-    <#assign cast=minor.accessorCast!minor.accessorCast!type.accessorCast!"none">
+    <#if notyet>
+      <#continue>
+    </#if>
+   <#assign cast=minor.accessorCast!minor.accessorCast!type.accessorCast!"none">
     <#assign friendlyType=minor.friendlyType!"">
     <#if accessorType=="BigDecimal">
       <#assign label="Decimal">
     </#if>
-    <#assign varWidth = drillType == "VarChar" || drillType == "Var16Char" || drillType == "VarBinary"  || drillType == "VarDecimal"/>
+    <#assign varWidth = drillType == "VarChar" || drillType == "Var16Char" ||
+                        drillType == "VarBinary" || drillType == "VarDecimal" />
     <#assign decimal = drillType == "Decimal9" || drillType == "Decimal18" ||
-                       drillType == "Decimal28Sparse" || drillType == "Decimal38Sparse" || drillType == "VarDecimal"/>
+                       drillType == "Decimal28Sparse" || drillType == "Decimal38Sparse" ||
+                       drillType == "VarDecimal" />
+    <#assign intType = drillType == "TinyInt" || drillType == "SmallInt" || drillType == "Int" ||
+                       drillType == "Bit" || drillType == "UInt1" || drillType = "UInt2" />
     <#if varWidth>
       <#assign accessorType = "byte[]">
       <#assign label = "Bytes">
-      <#assign putArgs = ", int len">
+      <#assign putArgs = ", final int len">
     <#else>
       <#assign putArgs = "">
     </#if>
@@ -153,7 +204,6 @@ public class ColumnAccessors {
       <#assign putType = javaType />
       <#assign doCast = (cast == "set") />
     </#if>
-    <#if ! notyet>
   //------------------------------------------------------------------------
   // ${drillType} readers and writers
 
@@ -179,12 +229,11 @@ public class ColumnAccessors {
 
     </#if>
     <@getType drillType label />
-
-    <@extendedType drillType />
     <#if ! varWidth>
-    @Override public int width() { return VALUE_WIDTH; }
 
+    @Override public int width() { return VALUE_WIDTH; }
     </#if>
+
     @Override
     public ${accessorType} get${label}() {
     <#assign getObject ="getObject"/>
@@ -302,97 +351,70 @@ public class ColumnAccessors {
     <#if varWidth>
   public static class ${drillType}ColumnWriter extends BaseVarWidthWriter {
     <#else>
+      <#if intType>
+  public static class ${drillType}ColumnWriter extends BaseIntWriter {
+      <#else>
   public static class ${drillType}ColumnWriter extends BaseFixedWidthWriter {
+      </#if>
 
     private static final int VALUE_WIDTH = ${drillType}Vector.VALUE_WIDTH;
     </#if>
 
     private final ${drillType}Vector vector;
     <#if drillType == "VarDecimal">
+    private int precision;
     private int scale;
     <#elseif decimal>
     private MajorType type;
     </#if>
 
     public ${drillType}ColumnWriter(final ValueVector vector) {
-      <#if varWidth>
+    <#if varWidth>
       super(((${drillType}Vector) vector).getOffsetVector());
-      <#else>
-      </#if>
-      <#if drillType == "VarDecimal">
+    </#if>
+    <#if drillType == "VarDecimal">
       // VarDecimal requires a scale. If not set, assume 0
       MajorType type = vector.getField().getType();
+      precision = type.hasPrecision() ? type.getPrecision() : Types.maxPrecision(type.getMinorType());
       scale = type.hasScale() ? type.getScale() : 0;
-      <#elseif decimal>
+    <#elseif decimal>
       type = vector.getField().getType();
-      </#if>
+    </#if>
       this.vector = (${drillType}Vector) vector;
     }
 
     @Override public BaseDataValueVector vector() { return vector; }
 
-       <#if ! varWidth>
+     <#if ! varWidth>
     @Override public int width() { return VALUE_WIDTH; }
 
-      </#if>
-      <@getType drillType label />
-      <#if ! varWidth>
     </#if>
+      <@getType drillType label />
 
     @Override
     public final void set${label}(final ${accessorType} value${putArgs}) {
-      <#-- Must compute the write offset first; can't be inline because the
-           writeOffset() function has a side effect of possibly changing the buffer
-           address (bufAddr). -->
-      <#if ! varWidth>
-      final int writeOffset = prepareWrite();
-      <#assign putOffset = "writeOffset * VALUE_WIDTH">
-      </#if>
-      <#if varWidth>
+    <#-- Must compute the write offset first; can't be inline because the
+         writeOffset() function has a side effect of possibly changing the buffer
+         address (bufAddr). -->
+    <#if varWidth>
       final int offset = prepareWrite(len);
-      drillBuf.setBytes(offset, value, 0, len);
+    <#else>
+      final int offset = prepareWrite() * VALUE_WIDTH;
+    </#if>
+      <@writeBuf "drillBuf", drillType minor putType doCast />
+    <#if varWidth>
       offsetsWriter.setNextOffset(offset + len);
-      <#elseif drillType == "Decimal9">
-      drillBuf.setInt(${putOffset},
-          DecimalUtility.getDecimal9FromBigDecimal(value,
-              type.getScale()));
-      <#elseif drillType == "Decimal18">
-      drillBuf.setLong(${putOffset},
-          DecimalUtility.getDecimal18FromBigDecimal(value,
-              type.getScale()));
-      <#elseif drillType == "Decimal38Sparse">
-      <#-- Hard to optimize this case. Just use the available tools. -->
-      DecimalUtility.getSparseFromBigDecimal(value, drillBuf,
-          ${putOffset},
-          type.getScale(), 6);
-      <#elseif drillType == "Decimal28Sparse">
-      <#-- Hard to optimize this case. Just use the available tools. -->
-      DecimalUtility.getSparseFromBigDecimal(value, drillBuf,
-          ${putOffset},
-          type.getScale(), 5);
-      <#elseif drillType == "IntervalYear">
-      drillBuf.setInt(${putOffset},
-          value.getYears() * 12 + value.getMonths());
-      <#elseif drillType == "IntervalDay">
-      final int offset = ${putOffset};
-      drillBuf.setInt(offset, value.getDays());
-      drillBuf.setInt(offset + ${minor.millisecondsOffset}, DateUtilities.periodToMillis(value));
-      <#elseif drillType == "Interval">
-      final int offset = ${putOffset};
-      drillBuf.setInt(offset, DateUtilities.periodToMonths(value));
-      drillBuf.setInt(offset + ${minor.daysOffset}, value.getDays());
-      drillBuf.setInt(offset + ${minor.millisecondsOffset}, DateUtilities.periodToMillis(value));
-      <#elseif drillType == "Float4">
-      drillBuf.setInt(${putOffset}, Float.floatToRawIntBits((float) value));
-      <#elseif drillType == "Float8">
-      drillBuf.setLong(${putOffset}, Double.doubleToRawLongBits(value));
-      <#elseif drillType == "Bit">
-      drillBuf.setByte(writeOffset, (byte)(value & 0x01));
-      <#else>
-      drillBuf.set${putType?cap_first}(${putOffset}, <#if doCast>(${putType}) </#if>value);
-      </#if>
+    </#if>
       vectorIndex.nextElement();
     }
+    <#if ! varWidth>
+
+    public final void write${label}(final DrillBuf buf, final ${accessorType} value) {
+      final int offset = 0;
+      <@writeBuf "buf", drillType minor putType doCast />
+      buf.writerIndex(VALUE_WIDTH);
+    }
+    </#if>
     <#if drillType == "VarChar">
 
     @Override
@@ -407,39 +429,6 @@ public class ColumnAccessors {
       final byte bytes[] = value.getBytes(Charsets.UTF_16);
       setBytes(bytes, bytes.length);
     }
-
-    <#elseif drillType == "TinyInt" || drillType == "SmallInt" || drillType == "Int">
-
-    @Override
-    public final void setLong(final long value) {
-      try {
-        // Catches int overflow. Does not catch overflow for smaller types.
-        setInt(Math.toIntExact(value));
-      } catch (ArithmeticException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
-    }
-
-    @Override
-    public final void setDouble(final double value) {
-      try {
-        // Catches int overflow. Does not catch overflow from
-        // double. See Math.round for details.
-        setInt(Math.toIntExact(Math.round(value)));
-      } catch (ArithmeticException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
-    }
-
-    @Override
-    public final void setDecimal(final BigDecimal value) {
-      try {
-        // Catches int overflow.
-        setInt(value.intValueExact());
-      } catch (ArithmeticException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
-    }
     <#elseif drillType == "BigInt">
 
     @Override
@@ -499,8 +488,14 @@ public class ColumnAccessors {
 
     @Override
     public final void setDecimal(final BigDecimal value) {
-      final byte[] barr = value.setScale(scale).unscaledValue().toByteArray();
-      setBytes(barr,  barr.length);
+      try {
+        final BigDecimal rounded = value.setScale(scale, RoundingMode.HALF_UP);
+        DecimalUtility.checkValueOverflow(rounded, precision, scale);
+        final byte[] barr = rounded.unscaledValue().toByteArray();
+        setBytes(barr,  barr.length);
+      } catch (ArithmeticException e) {
+        throw new InvalidConversionError("Decimal conversion failed for " + value, e);
+      }
     }
       </#if>
     <#elseif drillType == "Date">
@@ -528,6 +523,7 @@ public class ColumnAccessors {
       setLong(value.getMillis());
     }
     </#if>
+    <#if ! intType>
 
     @Override
     public final void setValue(final Object value) {
@@ -537,20 +533,56 @@ public class ColumnAccessors {
       setDate((LocalDate) value);
       <#elseif drillType = "Time">
       setTime((LocalTime) value);
-      <#elseif drillType = "Timestamp">
+      <#elseif drillType = "TimeStamp">
       setTimestamp((Instant) value);
       <#elseif putArgs != "">
       throw new InvalidConversionError("Generic object not supported for type ${drillType}, "
           + "set${label}(${accessorType}${putArgs})");
       <#else>
       if (value != null) {
-        set${label}((${accessorType}${putArgs}) value);
+        set${label}((${accessorType}) value);
       }
       </#if>
     }
-  }
+    </#if>
 
+    <#-- Default value logic is a bit convoluted because we want to reuse the same
+         (complex) template here to generate both the set-value code and the set-default
+         code. This means we need a (temporary) DrillBuf for most cases except those
+         where a byte array value is directly available. -->
+    @Override
+    public final void setDefaultValue(final Object value) {
+    <#if drillType == "VarBinary">
+      emptyValue = (byte[]) value;
+    <#elseif drillType == "VarChar">
+      emptyValue = ((String) value).getBytes(Charsets.UTF_8);
+    <#elseif drillType == "Var16Char">
+      emptyValue = ((String) value).getBytes(Charsets.UTF_16);
+    <#elseif drillType == "VarDecimal">
+      final BigDecimal rounded = ((BigDecimal) value).setScale(scale, RoundingMode.HALF_UP);
+      DecimalUtility.checkValueOverflow(rounded, precision, scale);
+      emptyValue = rounded.unscaledValue().toByteArray();
+    <#else>
+      try (DrillBuf buf = vector.getAllocator().buffer(VALUE_WIDTH)) {
+      <#if drillType = "Date">
+        writeLong(buf, ((LocalDate) value).toDateTimeAtStartOfDay(DateTimeZone.UTC).toInstant().getMillis());
+      <#elseif drillType = "Time">
+        writeInt(buf, ((LocalTime) value).getMillisOfDay());
+      <#elseif drillType = "TimeStamp">
+        writeLong(buf, ((Instant) value).getMillis());
+      <#elseif putArgs != "">
+        throw new InvalidConversionError("Generic object not supported for type ${drillType}, "
+            + "set${label}(${accessorType}${putArgs})");
+      <#else>
+        write${label}(buf, (${accessorType}) value);
+      </#if>
+        emptyValue = new byte[VALUE_WIDTH];
+        buf.getBytes(0, emptyValue);
+      }
     </#if>
+    }
+  }
+
   </#list>
 </#list>
 }
@@ -562,7 +594,9 @@ package org.apache.drill.exec.vector.accessor;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.vector.accessor.ColumnAccessors.*;
 import org.apache.drill.exec.vector.accessor.reader.BaseScalarReader;
+import org.apache.drill.exec.vector.accessor.reader.BitColumnReader;
 import org.apache.drill.exec.vector.accessor.writer.BaseScalarWriter;
+import org.apache.drill.exec.vector.accessor.writer.BitColumnWriter;
 
 public class ColumnAccessorUtils {
 
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java
index 7ebfa24..fda8fe9 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java
@@ -34,28 +34,35 @@ public interface ColumnMetadata extends Propertied {
    * Predicted number of elements per array entry. Default is
    * taken from the often hard-coded value of 10.
    */
-  public static final String EXPECTED_CARDINALITY_PROP = DRILL_PROP_PREFIX + "cardinality";
+  String EXPECTED_CARDINALITY_PROP = DRILL_PROP_PREFIX + "cardinality";
 
   /**
    * Default value represented as a string.
    */
-  public static final String DEFAULT_VALUE_PROP = DRILL_PROP_PREFIX + "default";
+  String DEFAULT_VALUE_PROP = DRILL_PROP_PREFIX + "default";
 
   /**
    * Expected (average) width for variable-width columns.
    */
-  public static final String EXPECTED_WIDTH_PROP = DRILL_PROP_PREFIX + "width";
+  String EXPECTED_WIDTH_PROP = DRILL_PROP_PREFIX + "width";
 
   /**
    * Optional format to use when converting to/from string values.
    */
-  public static final String FORMAT_PROP = DRILL_PROP_PREFIX + "format";
+  String FORMAT_PROP = DRILL_PROP_PREFIX + "format";
 
   /**
    * Indicates if the column is projected. Used only for internal
    * reader-provided schemas.
    */
-  public static final String PROJECTED_PROP = DRILL_PROP_PREFIX + "projected";
+  String PROJECTED_PROP = DRILL_PROP_PREFIX + "projected";
+
+  /**
+   * Indicates how to handle blanks. Must be one of the valid values defined
+   * in AbstractConvertFromString. Normally set on the converter by the plugin
+   * rather than by the user in the schema.
+   */
+  String BLANK_AS_PROP = DRILL_PROP_PREFIX + "blank-as";
 
   /**
    * Rough characterization of Drill types into metadata categories.
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarReader.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarReader.java
index ba91a52..30d0c3f 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarReader.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarReader.java
@@ -76,7 +76,9 @@ public interface ScalarReader extends ColumnReader {
    */
 
   ValueType extendedType();
+
   int getInt();
+  boolean getBoolean();
   long getLong();
   double getDouble();
   String getString();
@@ -86,4 +88,9 @@ public interface ScalarReader extends ColumnReader {
   LocalDate getDate();
   LocalTime getTime();
   Instant getTimestamp();
+
+  /**
+   * Return the value of the object using the extended type.
+   */
+  Object getValue();
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarWriter.java
index 586d22b..55a645e 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ScalarWriter.java
@@ -56,6 +56,15 @@ public interface ScalarWriter extends ColumnWriter {
    */
 
   ValueType valueType();
+
+  /**
+   * The extended type of the value, describes the secondary type
+   * for DATE, TIME and TIMESTAMP for which the value type is
+   * int or long.
+   */
+
+  ValueType extendedType();
+  void setBoolean(boolean value);
   void setInt(int value);
   void setLong(long value);
   void setDouble(double value);
@@ -81,4 +90,15 @@ public interface ScalarWriter extends ColumnWriter {
    */
 
   void setValue(Object value);
+
+  /**
+   * Set the default value to be used to fill empties for this writer.
+   * Only valid for required writers: null writers set this is-set bit
+   * to 0 and set the data value to 0.
+   *
+   * @param value the value to set. Cannot be null. The type of the value
+   * must match that legal for {@link #setValue(Object)}
+   */
+
+  void setDefaultValue(Object value);
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ValueType.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ValueType.java
index 748ece9..0dde3ba 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ValueType.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/ValueType.java
@@ -29,6 +29,12 @@ package org.apache.drill.exec.vector.accessor;
 public enum ValueType {
 
   /**
+   * The value is set from a boolean: BIT.
+   */
+
+  BOOLEAN,
+
+  /**
    * The value is set from an integer: TINYINT,
    * SMALLINT, INT, UINT1, and UINT2.
    */
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractConvertFromString.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractConvertFromString.java
index 4390dbb..bc190ac 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractConvertFromString.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractConvertFromString.java
@@ -17,6 +17,11 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+import java.util.function.Function;
+
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
 
@@ -26,9 +31,214 @@ import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
  */
 public abstract class AbstractConvertFromString extends AbstractWriteConverter {
 
+  /**
+   * Property to control how the conversion handles blanks. Blanks are
+   * zero-length text fields (after triming whitespace.)
+   */
+  public static final String BLANK_ACTION_PROP = "blank-as";
+
+  /**
+   * Convert blanks to null values (if the column is nullable), or
+   * fill with the default value (non-nullable.)
+   */
+  public static final String BLANK_AS_NULL = "null";
+
+  /**
+   * Convert blanks for numeric fields to 0. For non-numeric
+   * fields, convert to null (for nullable) or the default value
+   * (for non-nullable). Works best if non-numeric fields are declared
+   * as nullable.
+   */
+  public static final String BLANK_AS_ZERO = "0";
+
+  /**
+   * Skip blank values. This will result in the column being set to null for
+   * nullable columns, and to the default value (else 0 for numeric columns)
+   * for non-nullable columns.
+   */
+  public static final String BLANK_AS_SKIP = "skip";
+
+  protected static final org.slf4j.Logger logger =
+      org.slf4j.LoggerFactory.getLogger(AbstractConvertFromString.class);
+
+  protected final Function<String,String> prepare;
 
   public AbstractConvertFromString(ScalarWriter baseWriter) {
+    this(baseWriter, null);
+  }
+
+  public AbstractConvertFromString(ScalarWriter baseWriter, Map<String, String> properties) {
     super(baseWriter);
+    this.prepare = buildPrepare(baseWriter.schema(), properties);
+  }
+
+  /**
+   * Create a function to prepare the string. This turns out to be surprisingly
+   * complex. Behavior is determined by a property which can be:
+   * <p>
+   * <ul>
+   * <li>Property is set in the column schema.</li>
+   * <li>Property is set via the framework to a plugin-specific value.</li>
+   * <li>Properties are provided but the blank property is not set.</li>
+   * <li>No framework properties provided.</li>
+   * </ul>
+   *
+   * In the last two cases, no special handling is done for blank properties.
+   * <p>
+   * In all cases, nulls:
+   * <p>
+   * <ul>
+   * <li>Cause the column to be set to null, if the column is nullable. (This step
+   * is actually optional, but is done explicitly to allow a column to be rewritten.)</li>
+   * <li>Cause the column to be skipped, if the column is not nullable.</li>
+   * </ul>
+   *
+   * In all cases, the (non-null) string value is trimmed. Next, there are
+   * multiple ways to handle nulls, depending on context:
+   *
+   * <ul>
+   * <li>Leave nulls unchanged (default if no property is set.)</li>
+   * <li>{@link #BLANK_AS_DEFAULT}: skip the value for the row, letting the
+   * fill-empties logic fill in the default value.</li>
+   * <li>{@link #BLANK_AS_ZERO}: for numeric types, replace the null value with
+   * "0", which will then be parsed to a numeric zero.</li>
+   * <li>{@link #BLANK_AS_NULL}: for nullable modes, set the column to null
+   * and ignore the value.</li>
+   * </ul>
+   * <p>
+   * The preparation function handles setting the column to null if needed.
+   * The function returns a non-null string if the converter should handle it,
+   * or null if the converter should do nothing.
+   * <p>
+   * The logic is handled as a lambda function for two reasons:
+   * <p>
+   * <ul>
+   * <li>Perform all column-wide conditional checks once on writer creation
+   * rather than on every row.</li>
+   * <li>Allow the function to call this writers {@link #setNull()} method.</li>
+   * </ul>
+   *
+   * @param schema the output schema, with user properties optionally set
+   * @param properties optional framework-specific properties
+   * @return a function to call to prepare each string value for conversion
+   */
+
+  private Function<String,String> buildPrepare(ColumnMetadata schema,
+      Map<String, String> properties) {
+
+    String blankProp = schema.property(ColumnMetadata.BLANK_AS_PROP);
+    if (blankProp == null && properties != null) {
+      blankProp = properties.get(BLANK_ACTION_PROP);
+    }
+
+    if (blankProp != null) {
+      switch (blankProp.toLowerCase()) {
+      case BLANK_AS_NULL:
+        return skipBlankFn(schema);
+      case BLANK_AS_ZERO:
+        return blankAsZeroFn(schema);
+     case BLANK_AS_SKIP:
+        if (schema.isNullable()) {
+          return blankToNullFn();
+        } else {
+          return blankAsZeroFn(schema);
+        }
+      default:
+        // Silently ignore invalid values
+        logger.warn("Invalid conversion option '{}', skipping", blankProp);
+        break;
+      }
+    }
+
+    // Else, if the mode, then if the string is null, set the
+    // column to null, else trim the string.
+
+    if (schema.isNullable()) {
+      return nullableStrFn();
+    }
+
+    // Otherwise, trim the string, but have the converter skip this row if the
+    // string is blank. The column loader will then fill in the default value.
+
+    return skipBlanksFn();
+  }
+
+  private Function<String, String> blankAsZeroFn(ColumnMetadata schema) {
+    if (! Types.isNumericType(schema.type())) {
+      return skipBlankFn(schema);
+    } else if (schema.isNullable()) {
+      return nullableBlankToZeroFn();
+    } else {
+      return blankToZeroFn();
+    }
+  }
+
+  private Function<String, String> skipBlankFn(ColumnMetadata schema) {
+    if (schema.isNullable()) {
+      return blankToNullFn();
+    } else {
+      return skipBlankFn();
+    }
+  }
+
+  private static Function<String, String> skipBlanksFn() {
+     return (String s) -> s == null ? s : s.trim();
+  }
+
+  private Function<String, String> nullableStrFn() {
+    return (String s) -> {
+      if (s == null) {
+        setNull();
+        return null;
+      }
+      return s.trim();
+     };
+  }
+
+  private Function<String, String> blankToNullFn() {
+    return (String s) -> {
+      if (s == null) {
+        setNull();
+        return null;
+      }
+      s = s.trim();
+      if (s.isEmpty()) {
+        setNull();
+        return null;
+      }
+      return s;
+    };
+  }
+
+  private Function<String, String> skipBlankFn() {
+    return (String s) -> {
+      if (s == null) {
+        return null;
+      }
+      s = s.trim();
+      return s.isEmpty() ? null : s;
+    };
+  }
+
+  private Function<String, String> nullableBlankToZeroFn() {
+    return (String s) -> {
+      if (s == null) {
+        setNull();
+        return null;
+      }
+      s = s.trim();
+      return s.isEmpty() ? "0" : s;
+    };
+  }
+
+  private Function<String, String> blankToZeroFn() {
+    return (String s) -> {
+      if (s == null) {
+        return null;
+      }
+      s = s.trim();
+      return s.isEmpty() ? "0" : s;
+    };
   }
 
   @Override
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractWriteConverter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractWriteConverter.java
index 8f99773..b98e8e0 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractWriteConverter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/AbstractWriteConverter.java
@@ -69,11 +69,22 @@ public abstract class AbstractWriteConverter extends AbstractScalarWriter {
   }
 
   @Override
+  public void setDefaultValue(Object value) {
+    throw new IllegalStateException(
+        "Cannot set a default value through a shim; types conflict: " + value);
+  }
+
+  @Override
   public void setNull() {
     baseWriter.setNull();
   }
 
   @Override
+  public void setBoolean(boolean value) {
+    baseWriter.setBoolean(value);
+  }
+
+  @Override
   public void setInt(int value) {
     baseWriter.setInt(value);
   }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertBooleanToString.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertBooleanToString.java
index 41c0a6e..159d81d 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertBooleanToString.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertBooleanToString.java
@@ -26,8 +26,8 @@ public class ConvertBooleanToString extends AbstractWriteConverter {
   }
 
   @Override
-  public void setInt(int value) {
-    baseWriter.setString(Boolean.toString(value != 0));
+  public void setBoolean(boolean value) {
+    baseWriter.setString(Boolean.toString(value));
   }
 
   @Override
@@ -35,7 +35,7 @@ public class ConvertBooleanToString extends AbstractWriteConverter {
     if (value == null) {
       setNull();
     } else {
-      setInt((int) value);
+      setBoolean((boolean) value);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToBoolean.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToBoolean.java
index bebee51..d7d4937 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToBoolean.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToBoolean.java
@@ -17,6 +17,9 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+
+import org.apache.drill.common.types.BooleanType;
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 
@@ -27,21 +30,22 @@ import org.apache.drill.exec.vector.accessor.ScalarWriter;
  */
 public class ConvertStringToBoolean extends AbstractConvertFromString {
 
-  public ConvertStringToBoolean(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToBoolean(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setInt(Boolean.parseBoolean(value) ? 1 : 0);
-      }
-      catch (final NumberFormatException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setBoolean(BooleanType.fromString(prepared));
+    }
+    catch (final NumberFormatException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDate.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDate.java
index 042f1b8..a3b0ba8 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDate.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDate.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.joda.time.LocalDate;
@@ -31,22 +33,23 @@ public class ConvertStringToDate extends AbstractConvertFromString {
 
   private final DateTimeFormatter dateTimeFormatter;
 
-  public ConvertStringToDate(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToDate(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
     dateTimeFormatter = baseWriter.schema().dateTimeFormatter();
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setDate(LocalDate.parse(value, dateTimeFormatter));
-      }
-      catch (final IllegalStateException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setDate(LocalDate.parse(prepared, dateTimeFormatter));
+    }
+    catch (final IllegalStateException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDecimal.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDecimal.java
index 892aa43..6c4944c 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDecimal.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDecimal.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.vector.accessor.convert;
 
 import java.math.BigDecimal;
+import java.util.Map;
 
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
@@ -30,21 +31,22 @@ import org.apache.drill.exec.vector.accessor.ScalarWriter;
  */
 public class ConvertStringToDecimal extends AbstractConvertFromString {
 
-  public ConvertStringToDecimal(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToDecimal(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setDecimal(new BigDecimal(value));
-      }
-      catch (final NumberFormatException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setDecimal(new BigDecimal(prepared));
+    }
+    catch (final NumberFormatException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDouble.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDouble.java
index 3ce65d7..9023b04 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDouble.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToDouble.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 
@@ -26,21 +28,22 @@ import org.apache.drill.exec.vector.accessor.ScalarWriter;
  */
 public class ConvertStringToDouble extends AbstractConvertFromString {
 
-  public ConvertStringToDouble(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToDouble(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setDouble(Double.parseDouble(value));
-      }
-      catch (final NumberFormatException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setDouble(Double.parseDouble(prepared));
+    }
+    catch (final NumberFormatException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInt.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInt.java
index ece970f..f5f7531 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInt.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInt.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 
@@ -27,21 +29,22 @@ import org.apache.drill.exec.vector.accessor.ScalarWriter;
  */
 public class ConvertStringToInt extends AbstractConvertFromString {
 
-  public ConvertStringToInt(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToInt(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setInt(Integer.parseInt(value));
-      }
-      catch (final NumberFormatException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setInt(Integer.parseInt(prepared));
+    }
+    catch (final NumberFormatException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInterval.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInterval.java
index 74dd685..ae61b9a 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInterval.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToInterval.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.vector.accessor.convert;
 
 import java.time.format.DateTimeParseException;
+import java.util.Map;
 
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
@@ -29,21 +30,22 @@ import org.joda.time.Period;
  */
 public class ConvertStringToInterval extends AbstractConvertFromString {
 
-  public ConvertStringToInterval(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToInterval(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setPeriod(Period.parse(value));
-      }
-      catch (final DateTimeParseException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setPeriod(Period.parse(prepared));
+    }
+    catch (final DateTimeParseException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToLong.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToLong.java
index a2a5b17..a2bd704 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToLong.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToLong.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 
@@ -26,21 +28,22 @@ import org.apache.drill.exec.vector.accessor.ScalarWriter;
  */
 public class ConvertStringToLong extends AbstractConvertFromString {
 
-  public ConvertStringToLong(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToLong(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setLong(Long.parseLong(value));
-      }
-      catch (final NumberFormatException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setLong(Long.parseLong(prepared));
+    }
+    catch (final NumberFormatException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTime.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTime.java
index 094948a..44c6412 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTime.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTime.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.joda.time.LocalTime;
@@ -31,22 +33,23 @@ public class ConvertStringToTime extends AbstractConvertFromString {
 
   private final DateTimeFormatter dateTimeFormatter;
 
-  public ConvertStringToTime(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToTime(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
     dateTimeFormatter = baseWriter.schema().dateTimeFormatter();
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setTime(LocalTime.parse(value, dateTimeFormatter));
-      }
-      catch (final IllegalStateException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setTime(LocalTime.parse(prepared, dateTimeFormatter));
+    }
+    catch (final IllegalStateException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTimeStamp.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTimeStamp.java
index a029554..21c517e 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTimeStamp.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/ConvertStringToTimeStamp.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.vector.accessor.convert;
 
+import java.util.Map;
+
 import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.joda.time.Instant;
@@ -31,22 +33,23 @@ public class ConvertStringToTimeStamp extends AbstractConvertFromString {
 
   private final DateTimeFormatter dateTimeFormatter;
 
-  public ConvertStringToTimeStamp(ScalarWriter baseWriter) {
-    super(baseWriter);
+  public ConvertStringToTimeStamp(ScalarWriter baseWriter,
+      Map<String, String> properties) {
+    super(baseWriter, properties);
     dateTimeFormatter = baseWriter.schema().dateTimeFormatter();
   }
 
   @Override
   public void setString(final String value) {
-    if (value == null) {
-      baseWriter.setNull();
-    } else {
-      try {
-        baseWriter.setTimestamp(Instant.parse(value, dateTimeFormatter));
-      }
-      catch (final IllegalStateException e) {
-        throw InvalidConversionError.writeError(schema(), value, e);
-      }
+    final String prepared = prepare.apply(value);
+    if (prepared == null) {
+      return;
+    }
+    try {
+      baseWriter.setTimestamp(Instant.parse(prepared, dateTimeFormatter));
+    }
+    catch (final IllegalStateException e) {
+      throw InvalidConversionError.writeError(schema(), value, e);
     }
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/StandardConversions.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/StandardConversions.java
index 122e7ce..0068686 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/StandardConversions.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/convert/StandardConversions.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.vector.accessor.convert;
 
 import java.lang.reflect.Constructor;
+import java.util.Map;
 
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
@@ -101,22 +102,50 @@ public class StandardConversions {
 
   /**
    * Column conversion factory for the case where a conversion class is provided.
+   * Also holds an optional set of properties to be passed to the converter instance.
    */
   public static class SimpleWriterConverterFactory implements ColumnConversionFactory {
     private final Class<? extends AbstractWriteConverter> conversionClass;
+    private final Map<String, String> properties;
 
-    SimpleWriterConverterFactory(Class<? extends AbstractWriteConverter> conversionClass) {
+    SimpleWriterConverterFactory(Class<? extends AbstractWriteConverter> conversionClass,
+        Map<String, String> properties) {
       this.conversionClass = conversionClass;
+      this.properties = properties;
     }
 
     @Override
     public AbstractWriteConverter newWriter(ScalarWriter baseWriter) {
-      return newInstance(conversionClass, baseWriter);
+      return newInstance(conversionClass, baseWriter, properties);
     }
   }
 
   public static ColumnConversionFactory factory(Class<? extends AbstractWriteConverter> converterClass) {
-    return new SimpleWriterConverterFactory(converterClass);
+    return new SimpleWriterConverterFactory(converterClass, null);
+  }
+
+  public static ColumnConversionFactory factory(Class<? extends AbstractWriteConverter> converterClass,
+      Map<String, String> properties) {
+    return new SimpleWriterConverterFactory(converterClass, properties);
+  }
+
+  public static AbstractWriteConverter newInstance(
+      Class<? extends AbstractWriteConverter> conversionClass, ScalarWriter baseWriter,
+      Map<String,String> properties) {
+
+    // Try the Converter(ScalerWriter writer, Map<String, String> props) constructor first.
+    // This first form is optional.
+
+    try {
+      final Constructor<? extends AbstractWriteConverter> ctor = conversionClass.getDeclaredConstructor(ScalarWriter.class, Map.class);
+      return ctor.newInstance(baseWriter, properties);
+    } catch (final ReflectiveOperationException e) {
+      // Ignore
+    }
+
+    // Then try the Converter(ScalarSriter writer) constructor.
+
+    return newInstance(conversionClass, baseWriter);
   }
 
   public static AbstractWriteConverter newInstance(
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/impl/VectorPrinter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/impl/VectorPrinter.java
index cabfbff..838c749 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/impl/VectorPrinter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/impl/VectorPrinter.java
@@ -20,7 +20,6 @@ package org.apache.drill.exec.vector.accessor.impl;
 import org.apache.drill.exec.vector.UInt4Vector;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.VarCharVector;
-
 import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
 
 /**
@@ -33,7 +32,10 @@ public class VectorPrinter {
   public static void printOffsets(UInt4Vector vector, int start, int length) {
     header(vector, start, length);
     for (int i = start, j = 0; j < length; i++, j++) {
-      if (j > 0) {
+      if (j % 40 == 0) {
+        System.out.print("\n          ");
+      }
+      else if (j > 0) {
         System.out.print(" ");
       }
       System.out.print(vector.getAccessor().get(i));
@@ -68,5 +70,4 @@ public class VectorPrinter {
   public static String stringAt(VarCharVector vector, int i) {
     return new String(vector.getAccessor().get(i), Charsets.UTF_8);
   }
-
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/AbstractScalarReader.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/AbstractScalarReader.java
index ac71487..7d8190d 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/AbstractScalarReader.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/AbstractScalarReader.java
@@ -125,6 +125,11 @@ public abstract class AbstractScalarReader implements ScalarReader, ReaderEvents
   }
 
   @Override
+  public boolean getBoolean() {
+    throw conversionError("boolean");
+  }
+
+  @Override
   public int getInt() {
     throw conversionError("int");
   }
@@ -180,6 +185,8 @@ public abstract class AbstractScalarReader implements ScalarReader, ReaderEvents
       return null;
     }
     switch (valueType()) {
+    case BOOLEAN:
+      return getBoolean();
     case BYTES:
       return getBytes();
     case DECIMAL:
@@ -206,6 +213,23 @@ public abstract class AbstractScalarReader implements ScalarReader, ReaderEvents
   }
 
   @Override
+  public Object getValue() {
+    if (isNull()) {
+      return null;
+    }
+    switch (extendedType()) {
+    case DATE:
+      return getDate();
+    case TIME:
+      return getTime();
+    case TIMESTAMP:
+      return getTimestamp();
+    default:
+      return getObject();
+    }
+  }
+
+  @Override
   public String getAsString() {
     if (isNull()) {
       return "null";
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/BitColumnReader.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/BitColumnReader.java
new file mode 100644
index 0000000..859a377
--- /dev/null
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/reader/BitColumnReader.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.accessor.reader;
+
+import org.apache.drill.exec.vector.BitVector;
+import org.apache.drill.exec.vector.accessor.ValueType;
+import org.apache.drill.exec.vector.accessor.reader.BaseScalarReader.BaseFixedWidthReader;
+
+/**
+ * Specialized reader for bit columns. Bits are packed 8 per byte.
+ * Rather than duplicate that logic here, this reader just delegates
+ * to the vector's own accessor.
+ */
+public class BitColumnReader extends BaseFixedWidthReader {
+
+  @Override
+  public ValueType valueType() {
+    return ValueType.BOOLEAN;
+  }
+
+  @Override public int width() { return BitVector.VALUE_WIDTH; }
+
+  @Override
+  public boolean getBoolean() {
+    final BitVector.Accessor accessor = ((BitVector) vectorAccessor.vector()).getAccessor();
+    final int readOffset = vectorIndex.offset();
+    return accessor.get(readOffset) != 0;
+  }
+
+  @Override
+  public int getInt() {
+    return getBoolean() ? 1 : 0;
+  }
+}
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractArrayWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractArrayWriter.java
index 585696f..1b42169 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractArrayWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractArrayWriter.java
@@ -257,13 +257,14 @@ public abstract class AbstractArrayWriter implements ArrayWriter, WriterEvents {
     }
   }
 
-  protected final ColumnMetadata schema;
+  private final ColumnMetadata schema;
   protected AbstractObjectWriter elementObjWriter;
   protected final OffsetVectorWriter offsetsWriter;
   protected ColumnWriterIndex outerIndex;
   protected ArrayElementWriterIndex elementIndex;
 
-  public AbstractArrayWriter(ColumnMetadata schema, AbstractObjectWriter elementObjWriter, OffsetVectorWriter offsetVectorWriter) {
+  public AbstractArrayWriter(ColumnMetadata schema, AbstractObjectWriter elementObjWriter,
+      OffsetVectorWriter offsetVectorWriter) {
     this.schema = schema;
     this.elementObjWriter = elementObjWriter;
     this.offsetsWriter = offsetVectorWriter;
@@ -337,7 +338,7 @@ public abstract class AbstractArrayWriter implements ArrayWriter, WriterEvents {
 
   @Override
   public void setNull(boolean isNull) {
-    if (isNull == true) {
+    if (isNull) {
       throw new UnsupportedOperationException();
     }
   }
@@ -357,7 +358,7 @@ public abstract class AbstractArrayWriter implements ArrayWriter, WriterEvents {
     format
       .startObject(this)
       .attribute("elementObjWriter");
-      elementObjWriter.dump(format);
+    elementObjWriter.dump(format);
     format.endObject();
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractFixedWidthWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractFixedWidthWriter.java
index 921cb00..b07c297 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractFixedWidthWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractFixedWidthWriter.java
@@ -17,14 +17,29 @@
  */
 package org.apache.drill.exec.vector.accessor.writer;
 
+import java.math.BigDecimal;
+
 import org.apache.drill.exec.memory.BaseAllocator;
 import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.accessor.InvalidConversionError;
 import org.apache.drill.exec.vector.accessor.impl.HierarchicalFormatter;
 
 /**
  * Base class for writers for fixed-width vectors. Handles common
  * tasks, leaving the generated code to handle only type-specific
  * operations.
+ * <p>
+ * Fixed-width writers provide default values for empty (unused) slots.
+ * Suppose a client writes to slot 0, skips a few rows, then writes to slot
+ * 5. We could leave the intermediate values unwritten. But, since Drill
+ * reuses buffers, the slots may contain garbage. Instead, we implement
+ * "fill empties" logic. When we write to slot 5, we notice that the last
+ * slot written was 0, and we fill in slots 1, 2, 3 and 4 with values.
+ * <p>
+ * The fill value defaults to 0, and is defined as a block of zero-bytes
+ * (at least) the same length as each data value. Derived classes also
+ * allow setting a default value. In this case, the default value is encoded
+ * into a byte array, and that array is copied to each slot as the fill value.
  */
 
 public abstract class AbstractFixedWidthWriter extends BaseScalarWriter {
@@ -38,6 +53,10 @@ public abstract class AbstractFixedWidthWriter extends BaseScalarWriter {
 
     private static final byte ZERO_BUF[] = new byte[256];
 
+    public BaseFixedWidthWriter() {
+      emptyValue = ZERO_BUF;
+    }
+
     /**
      * Determine the write index, growing, overflowing and back-filling
      * the vector as needed.
@@ -93,18 +112,68 @@ public abstract class AbstractFixedWidthWriter extends BaseScalarWriter {
     @Override
     protected final void fillEmpties(final int writeIndex) {
       final int width = width();
-      final int stride = ZERO_BUF.length / width;
+      final int stride = emptyValue.length / width;
       int dest = lastWriteIndex + 1;
       while (dest < writeIndex) {
         int length = writeIndex - dest;
         length = Math.min(length, stride);
-        drillBuf.setBytes(dest * width, ZERO_BUF, 0, length * width);
+        drillBuf.setBytes(dest * width, emptyValue, 0, length * width);
         dest += length;
       }
     }
   }
 
   /**
+   * Base class for writers that use the Java int type as their native
+   * type. Handles common implicit conversions from other types to int.
+   */
+  public static abstract class BaseIntWriter extends BaseFixedWidthWriter {
+
+    @Override
+    public final void setBoolean(final boolean value) {
+      setInt(value ? 1 : 0);
+    }
+
+    @Override
+    public final void setLong(final long value) {
+      try {
+        // Catches int overflow. Does not catch overflow for smaller types.
+        setInt(Math.toIntExact(value));
+      } catch (final ArithmeticException e) {
+        throw InvalidConversionError.writeError(schema(), value, e);
+      }
+    }
+
+    @Override
+    public final void setDouble(final double value) {
+      try {
+        // Catches int overflow. Does not catch overflow from
+        // double. See Math.round for details.
+        setInt(Math.toIntExact(Math.round(value)));
+      } catch (final ArithmeticException e) {
+        throw InvalidConversionError.writeError(schema(), value, e);
+      }
+    }
+
+    @Override
+    public final void setDecimal(final BigDecimal value) {
+      try {
+        // Catches int overflow.
+        setInt(value.intValueExact());
+      } catch (final ArithmeticException e) {
+        throw InvalidConversionError.writeError(schema(), value, e);
+      }
+    }
+
+    @Override
+    public final void setValue(final Object value) {
+      if (value != null) {
+        setInt((int) value);
+      }
+    }
+  }
+
+  /**
    * The largest position to which the writer has written data. Used to allow
    * "fill-empties" (AKA "back-fill") of missing values one each value write
    * and at the end of a batch. Note that this is the position of the last
@@ -225,7 +294,7 @@ public abstract class AbstractFixedWidthWriter extends BaseScalarWriter {
 
   @Override
   public void postRollover() {
-    int newIndex = Math.max(lastWriteIndex - vectorIndex.rowStartIndex(), -1);
+    final int newIndex = Math.max(lastWriteIndex - vectorIndex.rowStartIndex(), -1);
     startWrite();
     lastWriteIndex = newIndex;
   }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractObjectWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractObjectWriter.java
index 96710d7..c4b5149 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractObjectWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractObjectWriter.java
@@ -88,9 +88,6 @@ public abstract class AbstractObjectWriter implements ObjectWriter {
       return baseWriter;
     }
     final AbstractWriteConverter shim = conversionFactory.newWriter(baseWriter);
-    if (shim == null) {
-      return baseWriter;
-    }
-    return shim;
+    return shim == null ? baseWriter : shim;
   }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractScalarWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractScalarWriter.java
index 0844ec6..7803e21 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractScalarWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractScalarWriter.java
@@ -21,6 +21,7 @@ import java.math.BigDecimal;
 
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.exec.vector.accessor.UnsupportedConversionError;
+import org.apache.drill.exec.vector.accessor.ValueType;
 import org.apache.drill.exec.vector.accessor.writer.WriterEvents.ColumnWriterListener;
 import org.joda.time.Instant;
 import org.joda.time.LocalDate;
@@ -35,6 +36,9 @@ import org.joda.time.Period;
 public abstract class AbstractScalarWriter implements ScalarWriter {
 
   @Override
+  public ValueType extendedType() { return valueType(); }
+
+  @Override
   public void setObject(Object value) {
     if (value == null) {
       setNull();
@@ -66,7 +70,7 @@ public abstract class AbstractScalarWriter implements ScalarWriter {
     } else if (value instanceof Short) {
       setInt((Short) value);
     } else if (value instanceof Boolean) {
-      setInt(((boolean) value) ? 1 : 0);
+      setBoolean((boolean) value);
     } else {
       throw conversionError(value.getClass().getSimpleName());
     }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java
index 4a05cfe..0a7c594 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java
@@ -248,7 +248,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
   public void startWrite() {
     assert state == State.IDLE;
     state = State.IN_WRITE;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().startWrite();
     }
   }
@@ -260,7 +260,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
 
     assert state == State.IN_WRITE;
     state = State.IN_ROW;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().startRow();
     }
   }
@@ -268,7 +268,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
   @Override
   public void endArrayValue() {
     assert state == State.IN_ROW;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().endArrayValue();
     }
   }
@@ -285,7 +285,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
     // the current row.
 
     assert state == State.IN_ROW;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().restartRow();
     }
   }
@@ -293,7 +293,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
   @Override
   public void saveRow() {
     assert state == State.IN_ROW;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().saveRow();
     }
     state = State.IN_WRITE;
@@ -305,7 +305,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
     // Rollover can only happen while a row is in progress.
 
     assert state == State.IN_ROW;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().preRollover();
     }
   }
@@ -316,7 +316,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
     // Rollover can only happen while a row is in progress.
 
     assert state == State.IN_ROW;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().postRollover();
     }
   }
@@ -324,7 +324,7 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
   @Override
   public void endWrite() {
     assert state != State.IDLE;
-    for (int i = 0; i < writers.size();  i++) {
+    for (int i = 0; i < writers.size(); i++) {
       writers.get(i).events().endWrite();
     }
     state = State.IDLE;
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseScalarWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseScalarWriter.java
index 5bc38bb..0083ece 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseScalarWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseScalarWriter.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.vector.accessor.writer;
 
 import java.math.BigDecimal;
 
+import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.vector.accessor.UnsupportedConversionError;
 import org.apache.drill.exec.vector.accessor.impl.HierarchicalFormatter;
 import org.joda.time.Instant;
@@ -143,6 +144,13 @@ public abstract class BaseScalarWriter extends AbstractScalarWriterImpl {
 
   protected ColumnWriterListener listener;
 
+  /**
+   * Value to use to fill empties. Must be at least as wide as each
+   * value.
+   */
+
+  protected byte emptyValue[];
+
   protected DrillBuf drillBuf;
 
   /**
@@ -158,6 +166,18 @@ public abstract class BaseScalarWriter extends AbstractScalarWriterImpl {
     this.listener = listener;
   }
 
+  @Override
+  public void bindSchema(ColumnMetadata schema) {
+    super.bindSchema(schema);
+
+    // Set the default value, if any, from the schema.
+
+    final Object defaultValue = schema.decodeDefaultValue();
+    if (defaultValue != null) {
+      setDefaultValue(defaultValue);
+    }
+  }
+
   /**
    * All change of buffer comes through this function to allow capturing
    * the buffer address and capacity. Only two ways to set the buffer:
@@ -214,6 +234,11 @@ public abstract class BaseScalarWriter extends AbstractScalarWriterImpl {
   }
 
   @Override
+  public void setBoolean(boolean value) {
+    throw conversionError("boolean");
+  }
+
+  @Override
   public void setInt(int value) {
     throw conversionError("int");
   }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseVarWidthWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseVarWidthWriter.java
index 9db767d..70de95a 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseVarWidthWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BaseVarWidthWriter.java
@@ -33,6 +33,13 @@ import org.apache.drill.exec.vector.accessor.impl.HierarchicalFormatter;
  * vector.
  * <p>
  * Most and value events are forwarded to the offset vector.
+ * <p>
+ * This class handles filling empty values with a default value.
+ * Doing so is trick as we must coordinate both this vector and
+ * the offset vector; checking for resize and overflow on each step.
+ * Also, when filling empties, we cannot use the normal "set" functions
+ * as they are what trigger the empty filling. Instead, we have to
+ * write to the "last write" position, not the current row positon.
  */
 
 public abstract class BaseVarWidthWriter extends BaseScalarWriter {
@@ -60,14 +67,22 @@ public abstract class BaseVarWidthWriter extends BaseScalarWriter {
   protected final int prepareWrite(final int width) {
 
     // This is performance critical code; every operation counts.
-    // Please be thoughtful when changing the code.
+    // Please be thoughtful when making changes.
+
+    fillEmpties();
+    return writeOffset(width);
+  }
 
-    int writeOffset = offsetsWriter.nextOffset();
+  private final int writeOffset(final int width) {
+    final int writeOffset = offsetsWriter.nextOffset;
     if (writeOffset + width < capacity) {
       return writeOffset;
     }
     resize(writeOffset + width);
-    return offsetsWriter.nextOffset();
+
+    // Offset will change if overflow occurred on resize.
+
+    return offsetsWriter.nextOffset;
   }
 
   @Override
@@ -128,6 +143,31 @@ public abstract class BaseVarWidthWriter extends BaseScalarWriter {
   @Override
   public int lastWriteIndex() { return offsetsWriter.lastWriteIndex(); }
 
+  /**
+   * Fill an empty slot with the default value set via a call to
+   * {@link #setDefaultValue(Object)}. This is an implementation of the
+   * {@link EmptyValueFiller} interface and is registered with the offset
+   * writer when setting the default value. The offset vector writer calls
+   * this method for each value that is to be filled. Note that the value
+   * being filled is <b>earlier</b> in the vector than the current row
+   * position: that is the very nature of empty filling.
+   */
+  private void fillEmpties() {
+    if (emptyValue == null) {
+      return;
+    }
+    final int fillCount = offsetsWriter.prepareFill() - offsetsWriter.lastWriteIndex - 1;
+    if (fillCount == 0) {
+      return;
+    }
+    final int len = emptyValue.length;
+    for (int i = 0; i < fillCount; i++) {
+      final int writeOffset = writeOffset(len);
+      drillBuf.setBytes(writeOffset, emptyValue, 0, len);
+      offsetsWriter.fillOffset(writeOffset + len);
+    }
+  }
+
   @Override
   public final void preRollover() {
     vector().getBuffer().writerIndex(offsetsWriter.rowStartOffset());
@@ -142,6 +182,7 @@ public abstract class BaseVarWidthWriter extends BaseScalarWriter {
 
   @Override
   public final void endWrite() {
+    fillEmpties();
     vector().getBuffer().writerIndex(offsetsWriter.nextOffset());
     offsetsWriter.endWrite();
   }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BitColumnWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BitColumnWriter.java
new file mode 100644
index 0000000..40fc6ac
--- /dev/null
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/BitColumnWriter.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.accessor.writer;
+
+import org.apache.drill.exec.vector.BaseDataValueVector;
+import org.apache.drill.exec.vector.BitVector;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.accessor.ValueType;
+
+/**
+ * Specialized writer for bit columns. Bits are packed 8 per byte.
+ * Rather than duplicate that logic here, this writer just delegates
+ * to the vector's own mutator.
+ */
+
+public class BitColumnWriter extends AbstractFixedWidthWriter {
+
+  private final BitVector vector;
+  private final BitVector.Mutator mutator;
+  private int defaultValue;
+
+  public BitColumnWriter(final ValueVector vector) {
+    this.vector = (BitVector) vector;
+    mutator = this.vector.getMutator();
+  }
+
+  @Override public BaseDataValueVector vector() { return vector; }
+
+  @Override public int width() { return BitVector.VALUE_WIDTH; }
+
+  @Override
+  public ValueType valueType() {
+    return ValueType.BOOLEAN;
+  }
+
+  protected int prepareWrite() {
+
+    // "Fast path" for the normal case of no fills, no overflow.
+    // This is the only bounds check we want to do for the entire
+    // set operation.
+
+    // This is performance critical code; every operation counts.
+    // Please be thoughtful when changing the code.
+
+    final int writeIndex = vectorIndex.vectorIndex();
+    prepareWrite(writeIndex);
+
+    // Track the last write location for zero-fill use next time around.
+
+    lastWriteIndex = writeIndex;
+    return writeIndex;
+  }
+
+  private void prepareWrite(int writeIndex) {
+    final int byteIndex = writeIndex >> 3;
+    if (byteIndex >= capacity) {
+
+      // Bit vector can never overflow
+
+      resize(byteIndex);
+    }
+    if (lastWriteIndex + 1 < writeIndex) {
+      fillEmpties(writeIndex);
+    }
+    lastWriteIndex = writeIndex;
+  }
+
+  @Override
+  public void setValueCount(int valueCount) {
+    prepareWrite(valueCount);
+    mutator.setValueCount(valueCount);
+  }
+
+  /**
+   * Fill empties. This is required because the allocated memory is not
+   * zero-filled.
+   */
+
+  @Override
+  protected final void fillEmpties(final int writeIndex) {
+    for (int dest = lastWriteIndex + 1; dest < writeIndex; dest++) {
+      mutator.set(dest, defaultValue);
+    }
+  }
+
+  @Override
+  public final void setBoolean(final boolean value) {
+    mutator.set(prepareWrite(), value ? 1 : 0);
+    vectorIndex.nextElement();
+  }
+
+  @Override
+  public void setInt(final int value) {
+    setBoolean(value != 0);
+  }
+
+  @Override
+  public void setValue(Object value) {
+    setInt((Integer) value);
+  }
+
+  @Override
+  public final void setDefaultValue(final Object value) {
+    defaultValue = ((Boolean) value) ? 1 : 0;
+  }
+}
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/NullableScalarWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/NullableScalarWriter.java
index aa3508d..b3c2ea5 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/NullableScalarWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/NullableScalarWriter.java
@@ -131,6 +131,13 @@ public class NullableScalarWriter extends AbstractScalarWriterImpl {
   }
 
   @Override
+  public void setBoolean(boolean value) {
+    baseWriter.setBoolean(value);
+    isSetWriter.setInt(1);
+    writerIndex.nextElement();
+  }
+
+  @Override
   public void setInt(int value) {
     baseWriter.setInt(value);
     isSetWriter.setInt(1);
@@ -278,4 +285,10 @@ public class NullableScalarWriter extends AbstractScalarWriterImpl {
     baseWriter.dump(format);
     format.endObject();
   }
+
+  @Override
+  public void setDefaultValue(Object value) {
+    throw new UnsupportedOperationException(
+        "Default values not supported for nullable types:" + value);
+  }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/OffsetVectorWriterImpl.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/OffsetVectorWriterImpl.java
index e46a7c5..f4ee0ab 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/OffsetVectorWriterImpl.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/OffsetVectorWriterImpl.java
@@ -117,6 +117,24 @@ import org.apache.drill.exec.vector.accessor.impl.HierarchicalFormatter;
  * <p>
  * See {@link ObjectArrayWriter} for information about arrays of
  * maps (arrays of multiple columns.)
+ *
+ * <h4>Empty Slots</h4>
+ *
+ * The offset vector writer handles empty slots in two distinct ways.
+ * First, the writer handles its own empties. Suppose that this is the offset
+ * vector for a VarChar column. Suppose we write "Foo" in the first slot. Now
+ * we have an offset vector with the values <tt>[ 0 3 ]</tt>. Suppose the client
+ * skips several rows and next writes at slot 5. We must copy the latest
+ * offset (3) into all the skipped slots: <tt>[ 0 3 3 3 3 3 ]</tt>. The result
+ * is a set of four empty VarChars in positions 1, 2, 3 and 4. (Here, remember
+ * that the offset vector always has one more value than the the number of rows.)
+ * <p>
+ * The second way to fill empties is in the data vector. The data vector may choose
+ * to fill the four "empty" slots with a value, say "X". In this case, it is up to
+ * the data vector to fill in the values, calling into this vector to set each
+ * offset. Note that when doing this, the calls are a bit different than for writing
+ * a regular value because we want to write at the "last write position", not the
+ * current row position. See {@link BaseVarWidthWriter} for an example.
  */
 
 public class OffsetVectorWriterImpl extends AbstractFixedWidthWriter implements OffsetVectorWriter {
@@ -140,7 +158,7 @@ public class OffsetVectorWriterImpl extends AbstractFixedWidthWriter implements
    * committed in {@link @endValue()}.
    */
 
-  private int nextOffset;
+  protected int nextOffset;
 
   public OffsetVectorWriterImpl(UInt4Vector vector) {
     this.vector = vector;
@@ -176,7 +194,7 @@ public class OffsetVectorWriterImpl extends AbstractFixedWidthWriter implements
   }
 
   @Override
-  public int nextOffset() { return nextOffset; }
+  public int nextOffset() {return nextOffset; }
 
   @Override
   public int rowStartOffset() { return rowStartOffset; }
@@ -194,48 +212,37 @@ public class OffsetVectorWriterImpl extends AbstractFixedWidthWriter implements
 
   protected final int prepareWrite() {
 
-    // "Fast path" for the normal case of no fills, no overflow.
-    // This is the only bounds check we want to do for the entire
-    // set operation.
-
     // This is performance critical code; every operation counts.
     // Please be thoughtful when changing the code.
 
-    final int valueIndex = vectorIndex.vectorIndex();
-    int writeIndex = valueIndex + 1;
-    if (lastWriteIndex + 1 < valueIndex || writeIndex >= capacity) {
-      writeIndex =  prepareWrite(writeIndex);
+    final int valueIndex = prepareFill();
+    final int fillCount = valueIndex - lastWriteIndex - 1;
+    if (fillCount > 0) {
+      fillEmpties(fillCount);
     }
 
     // Track the last write location for zero-fill use next time around.
 
     lastWriteIndex = valueIndex;
-    return writeIndex;
+    return valueIndex + 1;
   }
 
-  protected int prepareWrite(int writeIndex) {
-
-    // Either empties must be filed or the vector is full.
-
-    resize(writeIndex);
-
-    // Call to resize may cause rollover, so reset write index
-    // afterwards.
-
+  public final int prepareFill() {
     final int valueIndex = vectorIndex.vectorIndex();
+    if (valueIndex + 1 < capacity) {
+      return valueIndex;
+    }
+    resize(valueIndex + 1);
 
-    // Fill empties to the write position.
-    // Fill empties works of the row index, not the write
-    // index. (Yes, this is complex...)
+    // Call to resize may cause rollover, so get new write index afterwards.
 
-    fillEmpties(valueIndex);
-    return valueIndex + 1;
+    return vectorIndex.vectorIndex();
   }
 
   @Override
-  protected final void fillEmpties(final int valueIndex) {
-    while (lastWriteIndex < valueIndex - 1) {
-      drillBuf.setInt((++lastWriteIndex + 1) * VALUE_WIDTH, nextOffset);
+  protected final void fillEmpties(final int fillCount) {
+    for (int i = 0; i < fillCount; i++) {
+      fillOffset(nextOffset);
     }
   }
 
@@ -246,16 +253,21 @@ public class OffsetVectorWriterImpl extends AbstractFixedWidthWriter implements
     nextOffset = newOffset;
   }
 
+  public final void fillOffset(final int newOffset) {
+    drillBuf.setInt((++lastWriteIndex + 1) * VALUE_WIDTH, newOffset);
+    nextOffset = newOffset;
+  }
+
   @Override
   public final void setValue(final Object value) {
-    throw new InvalidConversionError("setValue() not supported for the offset vector writer");
+    throw new InvalidConversionError(
+        "setValue() not supported for the offset vector writer: " + value);
   }
 
   @Override
   public void skipNulls() {
 
-    // Nothing to do. Fill empties logic will fill in missing
-    // offsets.
+    // Nothing to do. Fill empties logic will fill in missing offsets.
   }
 
   @Override
@@ -286,10 +298,9 @@ public class OffsetVectorWriterImpl extends AbstractFixedWidthWriter implements
   public void setValueCount(int valueCount) {
     mandatoryResize(valueCount);
 
-    // Value count are in offset vector positions. Fill empties
-    // works in row positions.
+    // Value count is in row positions.
 
-    fillEmpties(valueCount);
+    fillEmpties(valueCount - lastWriteIndex - 1);
     vector().getBuffer().writerIndex((valueCount + 1) * VALUE_WIDTH);
   }
 
@@ -302,4 +313,9 @@ public class OffsetVectorWriterImpl extends AbstractFixedWidthWriter implements
       .attribute("nextOffset", nextOffset)
       .endObject();
   }
+
+  @Override
+  public void setDefaultValue(Object value) {
+    throw new UnsupportedOperationException("Encoding not supported for offset vectors");
+  }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/ScalarArrayWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/ScalarArrayWriter.java
index 16d827f..8bacdf4 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/ScalarArrayWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/ScalarArrayWriter.java
@@ -122,7 +122,7 @@ public class ScalarArrayWriter extends BaseArrayWriter {
     if (! objClass.startsWith("[")) {
       throw new IllegalArgumentException(
           String.format("Argument must be an array. Column `%s`, value = %s",
-              schema.name(), array.toString()));
+              schema().name(), array.toString()));
     }
 
     // Figure out type
@@ -140,7 +140,7 @@ public class ScalarArrayWriter extends BaseArrayWriter {
       default:
         throw new IllegalArgumentException(
             String.format("Unknown Java array type: %s, for column `%s`",
-                objClass, schema.name()));
+                objClass, schema().name()));
       }
       break;
     case  'B':
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/dummy/DummyScalarWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/dummy/DummyScalarWriter.java
index ae0f273..852bd0d 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/dummy/DummyScalarWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/dummy/DummyScalarWriter.java
@@ -54,6 +54,9 @@ public class DummyScalarWriter extends AbstractScalarWriterImpl {
   public void setNull() { }
 
   @Override
+  public void setBoolean(boolean value) { }
+
+  @Override
   public void setInt(int value) { }
 
   @Override
@@ -109,4 +112,7 @@ public class DummyScalarWriter extends AbstractScalarWriterImpl {
 
   @Override
   public void setValue(Object value) { }
+
+  @Override
+  public void setDefaultValue(Object value) { }
 }
diff --git a/pom.xml b/pom.xml
index c2eefd8..222af61 100644
--- a/pom.xml
+++ b/pom.xml
@@ -79,7 +79,7 @@
     <hadoop.version>2.7.4</hadoop.version>
     <hbase.version>2.1.1</hbase.version>
     <fmpp.version>1.0</fmpp.version>
-    <freemarker.version>2.3.26-incubating</freemarker.version>
+    <freemarker.version>2.3.28</freemarker.version>
     <javassist.version>3.24.0-GA</javassist.version>
     <msgpack.version>0.6.6</msgpack.version>
     <reflections.version>0.9.10</reflections.version>


[drill] 02/06: DRILL-7157: Wrap SchemaParsingException into UserException when creating schema

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 9ffd6d933cdd16e10a5818991dd80ab3c36cf88f
Author: Arina Ielchiieva <ar...@gmail.com>
AuthorDate: Mon Apr 8 10:32:11 2019 +0300

    DRILL-7157: Wrap SchemaParsingException into UserException when creating schema
    
    closes #1740
---
 .../org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java  | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java
index f1f7353..4e7c8ae 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SchemaHandler.java
@@ -35,6 +35,7 @@ import org.apache.drill.exec.record.metadata.schema.FsMetastoreSchemaProvider;
 import org.apache.drill.exec.record.metadata.schema.PathSchemaProvider;
 import org.apache.drill.exec.record.metadata.schema.SchemaContainer;
 import org.apache.drill.exec.record.metadata.schema.SchemaProvider;
+import org.apache.drill.exec.record.metadata.schema.parser.SchemaParsingException;
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.StorageStrategy;
 import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory;
@@ -133,7 +134,11 @@ public abstract class SchemaHandler extends DefaultSqlHandler {
           ExecConstants.PERSISTENT_TABLE_UMASK).string_val, false);
         schemaProvider.store(schemaString, sqlCall.getProperties(), storageStrategy);
         return DirectPlan.createDirectPlan(context, true, String.format("Created schema for [%s]", schemaSource));
-
+      } catch (SchemaParsingException e) {
+        throw UserException.parseError(e)
+          .message(e.getMessage())
+          .addContext("Schema: " + schemaString)
+          .build(logger);
       } catch (IOException e) {
         throw UserException.resourceError(e)
           .message(e.getMessage())


[drill] 04/06: DRILL-7049: REST API returns the toString of byte arrays (VARBINARY types)

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 9cbfaad0a92b4ffb6302ea1cfbcdfaa24cd8786b
Author: Vitalii Diravka <vi...@apache.org>
AuthorDate: Mon Apr 8 02:19:59 2019 +0300

    DRILL-7049: REST API returns the toString of byte arrays (VARBINARY types)
    
    closes #1739
---
 .../exec/util/ValueVectorElementFormatter.java     | 26 +++++-----------------
 .../exec/util/TestValueVectorElementFormatter.java | 11 +++++++++
 2 files changed, 17 insertions(+), 20 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java
index 01921df..63c9440 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java
@@ -52,47 +52,33 @@ public class ValueVectorElementFormatter {
    * @return the formatted value, null if failed
    */
   public String format(Object value, TypeProtos.MinorType minorType) {
-    boolean handled = false;
-	String str = null;
     switch (minorType) {
       case TIMESTAMP:
         if (value instanceof LocalDateTime) {
-          handled = true;
-          str = format((LocalDateTime) value,
+          return format((LocalDateTime) value,
                         options.getString(ExecConstants.WEB_DISPLAY_FORMAT_TIMESTAMP),
                         (v, p) -> v.format(getTimestampFormatter(p)));
         }
-        break;
       case DATE:
         if (value instanceof LocalDate) {
-          handled = true;
-          str = format((LocalDate) value,
+          return format((LocalDate) value,
                         options.getString(ExecConstants.WEB_DISPLAY_FORMAT_DATE),
                         (v, p) -> v.format(getDateFormatter(p)));
         }
-        break;
       case TIME:
         if (value instanceof LocalTime) {
-          handled = true;
-          str = format((LocalTime) value,
+          return format((LocalTime) value,
                         options.getString(ExecConstants.WEB_DISPLAY_FORMAT_TIME),
                         (v, p) -> v.format(getTimeFormatter(p)));
         }
-        break;
       case VARBINARY:
         if (value instanceof byte[]) {
-          handled = true;
           byte[] bytes = (byte[]) value;
-          str = org.apache.drill.common.util.DrillStringUtils.toBinaryString(bytes);
+          return org.apache.drill.common.util.DrillStringUtils.toBinaryString(bytes);
         }
-        break;
+      default:
+        return value.toString();
     }
-
-    if (!handled) {
-      str = value.toString();
-    }
-
-    return str;
   }
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/util/TestValueVectorElementFormatter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/util/TestValueVectorElementFormatter.java
index 5c6666d..8e7df54 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/util/TestValueVectorElementFormatter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/util/TestValueVectorElementFormatter.java
@@ -25,6 +25,7 @@ import org.junit.Test;
 import org.mockito.Mock;
 import org.mockito.MockitoAnnotations;
 
+import java.nio.charset.StandardCharsets;
 import java.time.LocalDate;
 import java.time.LocalDateTime;
 import java.time.LocalTime;
@@ -142,4 +143,14 @@ public class TestValueVectorElementFormatter {
     assertEquals("Mon, Nov 5, 2012", formattedDate);
     assertEquals("1:00:30 PM", formattedTime);
   }
+
+  @Test // DRILL-7049
+  public void testFormatValueVectorElementBinary() {
+    ValueVectorElementFormatter formatter = new ValueVectorElementFormatter(options);
+    String testString = "Fred";
+    String formattedValue = formatter.format(
+            testString.getBytes(StandardCharsets.UTF_8),
+            TypeProtos.MinorType.VARBINARY);
+    assertEquals("Wrong Varbinary value formatting", testString, formattedValue);
+  }
 }


[drill] 03/06: DRILL-7049 return VARBINARY as a string with escaped non printable bytes

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 9844b61c520b27f878e7d6fc657177da05d55dad
Author: Jean-Claude <jc...@gmail.com>
AuthorDate: Thu Feb 21 16:55:15 2019 -0500

    DRILL-7049 return VARBINARY as a string with escaped non printable bytes
---
 .../exec/util/ValueVectorElementFormatter.java     | 29 ++++++++++++++++++----
 1 file changed, 24 insertions(+), 5 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java
index bbb13a7..01921df 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/ValueVectorElementFormatter.java
@@ -52,28 +52,47 @@ public class ValueVectorElementFormatter {
    * @return the formatted value, null if failed
    */
   public String format(Object value, TypeProtos.MinorType minorType) {
+    boolean handled = false;
+	String str = null;
     switch (minorType) {
       case TIMESTAMP:
         if (value instanceof LocalDateTime) {
-          return format((LocalDateTime) value,
+          handled = true;
+          str = format((LocalDateTime) value,
                         options.getString(ExecConstants.WEB_DISPLAY_FORMAT_TIMESTAMP),
                         (v, p) -> v.format(getTimestampFormatter(p)));
         }
+        break;
       case DATE:
         if (value instanceof LocalDate) {
-          return format((LocalDate) value,
+          handled = true;
+          str = format((LocalDate) value,
                         options.getString(ExecConstants.WEB_DISPLAY_FORMAT_DATE),
                         (v, p) -> v.format(getDateFormatter(p)));
         }
+        break;
       case TIME:
         if (value instanceof LocalTime) {
-          return format((LocalTime) value,
+          handled = true;
+          str = format((LocalTime) value,
                         options.getString(ExecConstants.WEB_DISPLAY_FORMAT_TIME),
                         (v, p) -> v.format(getTimeFormatter(p)));
         }
-      default:
-        return value.toString();
+        break;
+      case VARBINARY:
+        if (value instanceof byte[]) {
+          handled = true;
+          byte[] bytes = (byte[]) value;
+          str = org.apache.drill.common.util.DrillStringUtils.toBinaryString(bytes);
+        }
+        break;
+    }
+
+    if (!handled) {
+      str = value.toString();
     }
+
+    return str;
   }
 
   /**


[drill] 06/06: DRILL-7045: Updates to address review comments

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 771fd270b684bbf388c0e2fa10b359eba3dfdb7c
Author: Sorabh Hamirwasia <so...@apache.org>
AuthorDate: Tue Apr 2 13:24:59 2019 -0700

    DRILL-7045: Updates to address review comments
    
    closes #7134
---
 .../drill/exec/vector/complex/writer/TestJsonNanInf.java      | 11 +++--------
 1 file changed, 3 insertions(+), 8 deletions(-)

diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
index 851ce0e..b7b3744 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
@@ -18,15 +18,15 @@
 package org.apache.drill.exec.vector.complex.writer;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.drill.exec.physical.impl.join.JoinTestBase;
-import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.physical.impl.join.JoinTestBase;
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.vector.VarCharVector;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 
@@ -244,12 +244,7 @@ public class TestJsonNanInf extends BaseTestQuery {
       builder.append(chunk);
     }
     String data = builder.toString();
-    String query = String.format("select string_binary(binary_string('%s')) from (values(1))", data);
-    List<QueryDataBatch> results = testSqlWithResults(query);
-    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
-    QueryDataBatch batch = results.get(0);
-    batch.release();
-    batchLoader.clear();
+    test("select string_binary(binary_string('%s')) from (values(1))", data);
  }
 
   @Test