You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by vi...@apache.org on 2022/04/27 09:47:27 UTC

[drill] 01/03: DRILL-6953: Merge row set-based JSON reader

This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit b464b9991ac34f3a8ea293e62a819a2b69504ed1
Author: Paul Rogers <pa...@yahoo.com>
AuthorDate: Mon Apr 13 19:45:16 2020 -0700

    DRILL-6953: Merge row set-based JSON reader
    
    * Snapshot: reworked JSON field parser creation
    * Updated JSON loader
    * Redo value listener with tokens
    * Extended long type works
    * More simple extended types
    * Added $date
    * Binary type
    * All extended type except arrays
    * Extended arrays partly working
    * More arrays work
    * Refactor element parser interfaces
    * Rename RowSetTests --> RowSetTest
    * More factory cleanup
    * Revised unknown field creation
    * In middle of factory/parser restructuring
    * Scalars, object, some variants work again
    * JSON loader tests pass
    * File cleanup
    * Old extended types test passes
    * Renamed JSON packages
    * Tested extended provided types
---
 .../{RowSetTests.java => RowSetTest.java}          |   2 +-
 .../exec/store/esri/TestShapefileFormatPlugin.java |   4 +-
 .../drill/exec/store/excel/TestExcelFormat.java    |   4 +-
 .../drill/exec/store/hdf5/TestHDF5Format.java      |   4 +-
 .../drill/exec/store/httpd/TestHTTPDLogReader.java |   4 +-
 .../store/httpd/TestHTTPDLogReaderUserAgent.java   |   4 +-
 .../exec/store/image/TestImageRecordReader.java    |   6 +-
 .../drill/exec/store/pcap/TestPcapEVFReader.java   |   4 +-
 .../exec/store/pcapng/TestPcapngRecordReader.java  |   4 +-
 .../store/pcapng/TestPcapngStatRecordReader.java   |   4 +-
 .../apache/drill/exec/store/pdf/TestPdfFormat.java |   4 +-
 .../apache/drill/exec/store/sas/TestSasReader.java |   4 +-
 .../drill/exec/store/spss/TestSpssReader.java      |   4 +-
 .../drill/exec/store/syslog/TestSyslogFormat.java  |   4 +-
 .../apache/drill/exec/store/xml/TestXMLReader.java |   4 +-
 .../exec/store/phoenix/PhoenixCommandTest.java     |   4 +-
 .../exec/store/phoenix/PhoenixDataTypeTest.java    |   4 +-
 .../drill/exec/store/phoenix/PhoenixSQLTest.java   |   4 +-
 .../phoenix/secured/SecuredPhoenixCommandTest.java |   4 +-
 .../secured/SecuredPhoenixDataTypeTest.java        |   4 +-
 .../phoenix/secured/SecuredPhoenixSQLTest.java     |   4 +-
 .../phoenix/secured/SecuredPhoenixTestSuite.java   |   4 +-
 .../java/org/apache/drill/exec/ExecConstants.java  |   3 +
 .../drill/exec/expr/fn/impl/TypeFunctions.java     |   1 -
 .../org/apache/drill/exec/ops/OperatorStats.java   |   8 +-
 .../impl/protocol/VectorContainerAccessor.java     |  12 +-
 .../scan/project/ExplicitSchemaProjection.java     |   2 +-
 .../physical/impl/validate/BatchValidator.java     |   6 +
 .../physical/resultSet/impl/SingleVectorState.java |  53 +-
 .../model/single/SimpleReaderBuilder.java          |  18 +-
 .../org/apache/drill/exec/record/BatchSchema.java  |  66 ++-
 .../apache/drill/exec/record/VectorContainer.java  |   2 +-
 .../exec/record/selection/SelectionVector2.java    |  28 +-
 .../drill/exec/rpc/user/QueryResultHandler.java    |   6 +-
 .../exec/server/options/SystemOptionManager.java   |   1 +
 .../exec/store/dfs/easy/EasyFormatPlugin.java      |  17 +-
 .../exec/store/easy/json/JSONRecordReader.java     |   7 +
 .../exec/store/easy/json/JsonBatchReader.java      |  85 +++
 .../store/easy/json/loader/BaseFieldFactory.java   |   1 -
 .../drill/exec/store/log/LogFormatPlugin.java      |   4 +-
 .../exec/vector/complex/fn/ExtendedJsonOutput.java |   2 -
 .../drill/exec/vector/complex/fn/ExtendedType.java |   3 -
 .../exec/vector/complex/fn/ExtendedTypeName.java   |  23 +-
 .../exec/vector/complex/fn/FieldSelection.java     |   1 -
 .../drill/exec/vector/complex/fn/JsonWriter.java   |  47 +-
 .../drill/exec/vector/complex/fn/VectorOutput.java |  47 +-
 .../java-exec/src/main/resources/drill-module.conf |   1 +
 .../java/org/apache/drill/TestStarQueries.java     | 211 ++++---
 .../org/apache/drill/exec/TestEmptyInputSql.java   |  19 +
 .../drill/exec/expr/fn/impl/TestTypeFns.java       |  63 +-
 .../impl/protocol/TestOperatorRecordBatch.java     |  69 +--
 .../exec/physical/impl/scan/TestColumnsArray.java  |   4 +-
 .../impl/scan/TestColumnsArrayFramework.java       |   4 +-
 .../physical/impl/scan/TestColumnsArrayParser.java |   4 +-
 .../physical/impl/scan/TestFileScanFramework.java  |   4 +-
 .../impl/scan/TestImplicitColumnParser.java        |   4 +-
 .../impl/scan/TestImplicitColumnProjection.java    |   4 +-
 .../physical/impl/scan/TestScanBatchWriters.java   |   4 +-
 .../physical/impl/scan/TestScanOperExecBasics.java |   4 +-
 .../impl/scan/TestScanOperExecEarlySchema.java     |   4 +-
 .../impl/scan/TestScanOperExecLateSchema.java      |   4 +-
 .../impl/scan/TestScanOperExecOuputSchema.java     |   4 +-
 .../impl/scan/TestScanOperExecOverflow.java        |   4 +-
 .../impl/scan/TestScanOperExecSmoothing.java       |   4 +-
 .../impl/scan/TestScanOrchestratorEarlySchema.java |   4 +-
 .../scan/TestScanOrchestratorImplicitColumns.java  |   4 +-
 .../impl/scan/TestScanOrchestratorLateSchema.java  |   4 +-
 .../impl/scan/convert/TestColumnConverter.java     |   4 +-
 .../scan/project/TestConstantColumnLoader.java     |   4 +-
 .../impl/scan/project/TestNullColumnLoader.java    |   4 +-
 .../scan/project/TestReaderLevelProjection.java    |   4 +-
 .../impl/scan/project/TestRowBatchMerger.java      |   4 +-
 .../impl/scan/project/TestSchemaSmoothing.java     |   4 +-
 .../physical/impl/validate/TestBatchValidator.java |   4 +-
 .../impl/TestResultSetLoaderDictArray.java         |   4 +-
 .../resultSet/impl/TestResultSetLoaderDicts.java   |   4 +-
 .../impl/TestResultSetLoaderEmptyProject.java      |   4 +-
 .../resultSet/impl/TestResultSetLoaderLimits.java  |   4 +-
 .../impl/TestResultSetLoaderMapArray.java          |   4 +-
 .../resultSet/impl/TestResultSetLoaderMaps.java    |   4 +-
 .../impl/TestResultSetLoaderOmittedValues.java     |   9 +-
 .../impl/TestResultSetLoaderOverflow.java          |   4 +-
 .../impl/TestResultSetLoaderProtocol.java          |   7 +-
 .../impl/TestResultSetLoaderRepeatedList.java      |   4 +-
 .../resultSet/impl/TestResultSetLoaderTorture.java |   4 +-
 .../resultSet/impl/TestResultSetLoaderUnions.java  |   4 +-
 .../resultSet/impl/TestResultSetSchemaChange.java  |   4 +-
 .../resultSet/impl/TestResultVectorCache.java      |   4 +-
 .../resultSet/project/TestTupleProjection.java     |   4 +-
 .../exec/physical/rowSet/TestDummyWriter.java      |   4 +-
 .../exec/physical/rowSet/TestFillEmpties.java      |   4 +-
 .../exec/physical/rowSet/TestFixedWidthWriter.java |   4 +-
 .../physical/rowSet/TestHyperVectorReaders.java    |   4 +-
 .../exec/physical/rowSet/TestIndirectReaders.java  |   4 +-
 .../exec/physical/rowSet/TestMapAccessors.java     |   4 +-
 .../physical/rowSet/TestOffsetVectorWriter.java    |   4 +-
 .../physical/rowSet/TestRepeatedListAccessors.java |   4 +-
 .../drill/exec/physical/rowSet/TestRowSet.java     |   4 +-
 .../exec/physical/rowSet/TestScalarAccessors.java  |   4 +-
 .../exec/physical/rowSet/TestSchemaBuilder.java    |   4 +-
 .../physical/rowSet/TestVariableWidthWriter.java   |   4 +-
 .../exec/physical/rowSet/TestVariantAccessors.java |   4 +-
 .../org/apache/drill/exec/sql/TestAnalyze.java     |   9 +-
 .../drill/exec/store/json/BaseTestJsonReader.java  |  60 ++
 .../drill/exec/store/json/TestJsonModes.java       |   4 +-
 .../drill/exec/store/json/TestJsonReaderFns.java   | 269 +++++++++
 .../exec/store/json/TestJsonReaderQueries.java     | 634 +++++++++++++++++++++
 .../exec/store/json/TestJsonReaderWithSchema.java  |  24 +
 .../exec/store/json/TestJsonRecordReader.java      | 196 +++++--
 .../drill/exec/store/json/TestJsonScanOp.java      | 271 +++++++++
 .../apache/drill/exec/store/log/TestLogReader.java |   4 +-
 .../drill/exec/store/mock/TestMockPlugin.java      |   4 +-
 .../drill/exec/store/mock/TestMockRowReader.java   |   4 +-
 .../store/sequencefile/TestSequenceFileReader.java |   4 +-
 .../complex/writer/TestComplexTypeWriter.java      |  27 +-
 .../vector/complex/writer/TestExtendedTypes.java   |  61 +-
 .../complex/writer/TestJsonEscapeAnyChar.java      |  28 +-
 .../exec/vector/complex/writer/TestJsonNanInf.java |  89 ++-
 .../exec/vector/complex/writer/TestJsonReader.java | 405 +++----------
 .../test/rowSet/test/TestRowSetComparison.java     |   4 +-
 .../main/codegen/templates/HolderReaderImpl.java   |   7 +-
 .../vector/complex/impl/SingleMapReaderImpl.java   |   2 -
 .../record/metadata/TestMetadataProperties.java    |   4 +-
 .../exec/record/metadata/TestTupleSchema.java      |   4 +-
 124 files changed, 2373 insertions(+), 847 deletions(-)

diff --git a/common/src/test/java/org/apache/drill/categories/RowSetTests.java b/common/src/test/java/org/apache/drill/categories/RowSetTest.java
similarity index 97%
rename from common/src/test/java/org/apache/drill/categories/RowSetTests.java
rename to common/src/test/java/org/apache/drill/categories/RowSetTest.java
index eb8300610b..44ea322072 100644
--- a/common/src/test/java/org/apache/drill/categories/RowSetTests.java
+++ b/common/src/test/java/org/apache/drill/categories/RowSetTest.java
@@ -21,7 +21,7 @@ package org.apache.drill.categories;
  * Junit category marker. <br>
  * A category for tests that test the RowSet, ResultSetLoader and related mechanisms.
  */
-public interface RowSetTests {
+public interface RowSetTest {
   /**
    * tag for JUnit5
    */
diff --git a/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java b/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
index 1df2687cc0..c9ce87c753 100644
--- a/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
+++ b/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.esri;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -36,7 +36,7 @@ import java.nio.file.Paths;
 
 import static org.junit.Assert.assertEquals;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestShapefileFormatPlugin extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
index 9faf64e95e..97c8fff753 100644
--- a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
+++ b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.excel;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -47,7 +47,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestExcelFormat extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java b/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
index e427216d07..9e1e04d9a9 100644
--- a/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
+++ b/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.hdf5;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -43,7 +43,7 @@ import java.util.List;
 import static org.junit.Assert.assertEquals;
 import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHDF5Format extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
index 4b41562dcb..877cff99cd 100644
--- a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
+++ b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.httpd;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHTTPDLogReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java
index 8b23efbbfc..5edcc2bcaa 100644
--- a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java
+++ b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.httpd;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -36,7 +36,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHTTPDLogReaderUserAgent extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java b/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
index 10f2e03710..f6894a1601 100644
--- a/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
+++ b/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
@@ -27,7 +27,7 @@ import java.nio.file.Paths;
 import java.time.Instant;
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -42,7 +42,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestImageRecordReader extends ClusterTest {
 
   @BeforeClass
@@ -233,4 +233,4 @@ public class TestImageRecordReader extends ClusterTest {
 
     new RowSetComparison(expected).verifyAndClearAll(sets);
   }
-}
\ No newline at end of file
+}
diff --git a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
index 9a27276363..ed46540262 100644
--- a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
+++ b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.pcap;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.exec.store.pcap.plugin.PcapFormatConfig;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
@@ -29,7 +29,7 @@ import java.time.LocalDateTime;
 import java.time.Month;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPcapEVFReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java
index ceb76bfa51..7c9cd40044 100644
--- a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java
+++ b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals;
 import java.nio.file.Paths;
 import java.time.Instant;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -39,7 +39,7 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPcapngRecordReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java
index 9971a886e0..c202fdeabb 100644
--- a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java
+++ b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 
 import java.nio.file.Paths;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -37,7 +37,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPcapngStatRecordReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java b/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java
index 1383448fb6..b304629406 100644
--- a/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java
+++ b/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.pdf;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -42,7 +42,7 @@ import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPdfFormat extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java b/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java
index be0965ebea..40696baab6 100644
--- a/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java
+++ b/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.sas;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertEquals;
 import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSasReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java b/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java
index b54c4f88ac..5331b2f772 100644
--- a/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java
+++ b/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.spss;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -37,7 +37,7 @@ import java.nio.file.Paths;
 import static org.junit.Assert.assertEquals;
 import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSpssReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java b/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
index c75aadd9e6..a3f5829c58 100644
--- a/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
+++ b/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
@@ -22,7 +22,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.DataMode;
@@ -43,7 +43,7 @@ import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
 import static org.junit.Assert.assertEquals;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSyslogFormat extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java b/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java
index b236416d2c..6a9fc11bf4 100644
--- a/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java
+++ b/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.xml;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -39,7 +39,7 @@ import static org.apache.drill.test.rowSet.RowSetUtilities.objArray;
 import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
 import static org.junit.Assert.assertEquals;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestXMLReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java
index 43fc645cbc..a7ab2d32e8 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.store.phoenix;
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.runners.MethodSorters;
 
 @FixMethodOrder(MethodSorters.JVM)
-@Category({ SlowTest.class, RowSetTests.class })
+@Category({ SlowTest.class, RowSetTest.class })
 public class PhoenixCommandTest extends PhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java
index 2e6c8d01c9..59b97ab04e 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java
@@ -30,7 +30,7 @@ import java.time.Instant;
 import java.time.LocalDate;
 import java.time.LocalTime;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.runners.MethodSorters;
 
 @FixMethodOrder(MethodSorters.JVM)
-@Category({ SlowTest.class, RowSetTests.class })
+@Category({ SlowTest.class, RowSetTest.class })
 public class PhoenixDataTypeTest extends PhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
index ef083cecec..8e091b9b87 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.store.phoenix;
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.runners.MethodSorters;
 
 @FixMethodOrder(MethodSorters.JVM)
-@Category({ SlowTest.class, RowSetTests.class })
+@Category({ SlowTest.class, RowSetTest.class })
 public class PhoenixSQLTest extends PhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
index e0f1ccee8d..1c54ff4482 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -31,7 +31,7 @@ import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.Test;
 
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixCommandTest extends SecuredPhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
index 5fd09629c1..2938f46102 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -44,7 +44,7 @@ import static org.apache.drill.test.rowSet.RowSetUtilities.shortArray;
 import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
 
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixDataTypeTest extends SecuredPhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
index c2209ce1f8..86ecd3f8a2 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -35,7 +35,7 @@ import org.junit.jupiter.api.Test;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixSQLTest extends SecuredPhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
index 5d0451efde..2c4c6fadeb 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.store.phoenix.QueryServerBasicsIT;
 import org.apache.drill.test.BaseTest;
@@ -41,7 +41,7 @@ import java.util.concurrent.atomic.AtomicInteger;
 })
 @Disabled
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixTestSuite extends BaseTest {
 
   private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SecuredPhoenixTestSuite.class);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 3351869549..695f8b6b02 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -471,6 +471,9 @@ public final class ExecConstants {
   public static final OptionValidator PARQUET_COMPLEX_BATCH_NUM_RECORDS_VALIDATOR = new RangeLongValidator(PARQUET_COMPLEX_BATCH_NUM_RECORDS, 1, ValueVector.MAX_ROW_COUNT -1,
       new OptionDescription("Complex Parquet Reader maximum number of records per batch."));
 
+  public static final String ENABLE_V2_JSON_READER_KEY = "store.json.enable_v2_reader";
+  public static final BooleanValidator ENABLE_V2_JSON_READER_VALIDATOR = new BooleanValidator(ENABLE_V2_JSON_READER_KEY,
+      new OptionDescription("Enable the experimental \"version 2\" JSON reader."));
   public static final String JSON_ALL_TEXT_MODE = "store.json.all_text_mode";
   public static final BooleanValidator JSON_READER_ALL_TEXT_MODE_VALIDATOR = new BooleanValidator(JSON_ALL_TEXT_MODE,
       new OptionDescription("Drill reads all data from the JSON files as VARCHAR. Prevents schema change errors."));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java
index 3a1df8f299..b820c1bdcd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java
@@ -33,7 +33,6 @@ import io.netty.buffer.DrillBuf;
  * Type functions for all types. See UnionFunctions for type functions
  * specifically for the UNION type.
  */
-
 public class TypeFunctions {
 
   @FunctionTemplate(name = "sqlTypeOf",
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
index 2f9300db6f..9ed6d8f8db 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
@@ -47,9 +47,9 @@ public class OperatorStats {
   private final long[] schemaCountByInput;
 
 
-  private boolean inProcessing = false;
-  private boolean inSetup = false;
-  private boolean inWait = false;
+  private boolean inProcessing;
+  private boolean inSetup;
+  private boolean inWait;
 
   protected long processingNanos;
   protected long setupNanos;
@@ -185,7 +185,7 @@ public class OperatorStats {
   public synchronized void batchReceived(int inputIndex, long records, boolean newSchema) {
     recordsReceivedByInput[inputIndex] += records;
     batchesReceivedByInput[inputIndex]++;
-    if(newSchema){
+    if (newSchema) {
       schemaCountByInput[inputIndex]++;
     }
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java
index 694c91b387..2ae82824be 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java
@@ -34,6 +34,14 @@ import org.apache.drill.exec.record.selection.SelectionVector4;
  * Wraps a vector container and optional selection vector in an interface
  * simpler than the entire {@link RecordBatch}. This implementation hosts
  * a container only.
+ * <p>
+ * Separates the idea of a batch schema and data batch. The accessor
+ * can identify a schema even if it has no batches. This occurs for
+ * readers that can identify the schema, but produce no actual data.
+ * <p>
+ * This version is designed for the the scan operator which will
+ * produce a series of different vector containers (which, oddly, must
+ * all contain the same vectors.)
  */
 public class VectorContainerAccessor implements BatchAccessor {
 
@@ -66,7 +74,9 @@ public class VectorContainerAccessor implements BatchAccessor {
    */
   public void addBatch(VectorContainer container) {
     setSchema(container);
-    batchCount++;
+    if (container != null) {
+      batchCount++;
+    }
   }
 
   public int batchCount() { return batchCount; }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
index 62bd729b19..b287e6c822 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
@@ -313,7 +313,7 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
       if (child.isTuple()) {
         members.add(resolveMapMembers(members, child));
       } else {
-        members.add(outputTuple.nullBuilder.add(child.name()));
+        members.add(members.nullBuilder.add(child.name()));
       }
     }
     return mapCol;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
index b79c089d4e..3bf6e71705 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.physical.impl.validate;
 
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.SimpleVectorWrapper;
 import org.apache.drill.exec.record.VectorAccessible;
@@ -247,6 +248,11 @@ public class BatchValidator {
     return reporter.errorCount() == 0;
   }
 
+
+  public static void validate(RowSet rowSet) {
+    validate(rowSet.container());
+  }
+
   private static ErrorReporter errorReporter(VectorAccessible batch) {
     String opName = batch.getClass().getSimpleName();
     if (LOG_TO_STDOUT) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
index 83c71f53ee..1723451413 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
@@ -21,6 +21,7 @@ import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
+import org.apache.drill.exec.vector.BaseDataValueVector;
 import org.apache.drill.exec.vector.FixedWidthVector;
 import org.apache.drill.exec.vector.NullableVector;
 import org.apache.drill.exec.vector.UInt4Vector;
@@ -38,8 +39,10 @@ import org.slf4j.LoggerFactory;
  * Subclasses are specialized for offset vectors or values vectors.
  * (The "single vector" name contrasts with classes that manage compound
  * vectors, such as a data and offsets vector.)
+ * * <p>
+ * During overflow it is critical to update the various stored vector
+ * lengths so that serialization/deserialization works correctly.
  */
-
 public abstract class SingleVectorState implements VectorState {
 
   public abstract static class SimpleVectorState extends SingleVectorState {
@@ -64,7 +67,6 @@ public abstract class SingleVectorState implements VectorState {
       // look-ahead vector. Uses vector-level operations for convenience.
       // These aren't very efficient, but overflow does not happen very
       // often.
-
       for (int src = sourceStartIndex; src <= sourceEndIndex; src++, newIndex++) {
         mainVector.copyEntry(newIndex, backupVector, src);
       }
@@ -76,7 +78,6 @@ public abstract class SingleVectorState implements VectorState {
    * vector, or might be the payload part of a scalar array (repeated scalar)
    * vector.
    */
-
   public static class FixedWidthVectorState extends SimpleVectorState {
 
      public FixedWidthVectorState(WriterEvents writer, ValueVector mainVector) {
@@ -101,7 +102,6 @@ public abstract class SingleVectorState implements VectorState {
       int size = super.allocateVector(vector, cardinality);
 
       // IsSet ("bit") vectors rely on values being initialized to zero (unset.)
-
       ((FixedWidthVector) vector).zeroVector();
       return size;
     }
@@ -112,7 +112,6 @@ public abstract class SingleVectorState implements VectorState {
    * vector, or might be the payload part of a scalar array (repeated scalar)
    * vector.
    */
-
   public static class VariableWidthVectorState extends SimpleVectorState {
 
     private final ColumnMetadata schema;
@@ -126,11 +125,25 @@ public abstract class SingleVectorState implements VectorState {
     public int allocateVector(ValueVector vector, int cardinality) {
 
       // Cap the allocated size to the maximum.
-
       int size = (int) Math.min(ValueVector.MAX_BUFFER_SIZE, (long) cardinality * schema.expectedWidth());
       ((VariableWidthVector) vector).allocateNew(size, cardinality);
       return vector.getAllocatedSize();
     }
+
+    @Override
+    public void rollover(int cardinality) {
+      super.rollover(cardinality);
+
+      // Adjust offset vector length
+      int offsetLength = writer.rowStartIndex() + 1;
+      VariableWidthVector varWidthVector = ((VariableWidthVector) backupVector);
+      UInt4Vector offsetVector = varWidthVector.getOffsetVector();
+      offsetVector.getMutator().setValueCount(offsetLength );
+
+      // Adjust data vector length.
+      ((BaseDataValueVector) backupVector).getBuffer().writerIndex(
+          offsetVector.getAccessor().get(offsetLength - 1));
+    }
   }
 
   /**
@@ -140,7 +153,6 @@ public abstract class SingleVectorState implements VectorState {
    * with the offsets vector (here) or the values vector to allow the needed
    * fine control over overflow operations.
    */
-
   public static class OffsetVectorState extends SingleVectorState {
 
     private static final Logger logger = LoggerFactory.getLogger(OffsetVectorState.class);
@@ -151,7 +163,6 @@ public abstract class SingleVectorState implements VectorState {
      * child type is know so this field cannot be final. It will,
      * however, change value only once: from null to a valid writer.
      */
-
     private WriterPosition childWriter;
 
     public OffsetVectorState(WriterEvents writer, ValueVector mainVector,
@@ -185,7 +196,6 @@ public abstract class SingleVectorState implements VectorState {
 
       // This is an offset vector. The data to copy is one greater
       // than the row index.
-
       sourceStartIndex++;
       sourceEndIndex++;
 
@@ -204,10 +214,10 @@ public abstract class SingleVectorState implements VectorState {
       // offset vector position contains the offset of the start of the data
       // for the current row. We must subtract that offset from each copied
       // value to adjust the offset for the destination.
-
-      UInt4Vector.Accessor sourceAccessor = ((UInt4Vector) backupVector).getAccessor();
-      UInt4Vector.Mutator destMutator = ((UInt4Vector) mainVector).getMutator();
-      int offset = childWriter.rowStartIndex();
+      UInt4Vector sourceVector = ((UInt4Vector) backupVector);
+      final UInt4Vector.Accessor sourceAccessor = sourceVector.getAccessor();
+      final UInt4Vector.Mutator destMutator = ((UInt4Vector) mainVector).getMutator();
+      final int offset = childWriter.rowStartIndex();
       int newIndex = 1;
       logger.trace("Offset vector: copy {} values from {} to {} with offset {}",
         Math.max(0, sourceEndIndex - sourceStartIndex + 1),
@@ -221,13 +231,18 @@ public abstract class SingleVectorState implements VectorState {
         destMutator.set(newIndex, sourceAccessor.get(src) - offset);
       }
 
+      // Adjust offset vector length
+      int offsetLength = writer.rowStartIndex() + 1;
+      sourceVector.getMutator().setValueCount(offsetLength );
+
       // Getting offsets right was a pain. If you modify this code,
       // you'll likely relive that experience. Enabling the next two
       // lines will help reveal some of the mystery around offsets and their
       // confusing off-by-one design.
 
-//      VectorPrinter.printOffsets((UInt4Vector) backupVector, sourceStartIndex - 1, sourceEndIndex - sourceStartIndex + 3);
-//      VectorPrinter.printOffsets((UInt4Vector) mainVector, 0, newIndex);
+      // VectorChecker.verifyOffsets("nested", sourceVector);
+      // VectorPrinter.printOffsets(sourceVector, sourceStartIndex - 1, sourceEndIndex - sourceStartIndex + 3);
+      // VectorPrinter.printOffsets((UInt4Vector) mainVector, 0, newIndex);
     }
   }
 
@@ -265,7 +280,6 @@ public abstract class SingleVectorState implements VectorState {
    *
    * @param cardinality the number of unique columns in the row
    */
-
   @Override
   public void rollover(int cardinality) {
 
@@ -274,13 +288,11 @@ public abstract class SingleVectorState implements VectorState {
     // Remember the last write index for the original vector.
     // This tells us the end of the set of values to move, while the
     // sourceStartIndex above tells us the start.
-
     int sourceEndIndex = writer.lastWriteIndex();
 
     // Switch buffers between the backup vector and the writer's output
     // vector. Done this way because writers are bound to vectors and
     // we wish to keep the binding.
-
     if (backupVector == null) {
       backupVector = TypeHelper.getNewVector(mainVector.getField(),
           parseVectorType(mainVector), mainVector.getAllocator(), null);
@@ -291,7 +303,6 @@ public abstract class SingleVectorState implements VectorState {
 
     // Copy overflow values from the full vector to the new
     // look-ahead vector.
-
     copyOverflow(sourceStartIndex, sourceEndIndex);
 
     // At this point, the writer is positioned to write to the look-ahead
@@ -312,7 +323,6 @@ public abstract class SingleVectorState implements VectorState {
    * metadata declared within that vector
    * @return the actual major type of the vector
    */
-
   protected static MajorType parseVectorType(ValueVector vector) {
     MajorType purportedType = vector.getField().getType();
     if (purportedType.getMode() != DataMode.OPTIONAL) {
@@ -322,7 +332,6 @@ public abstract class SingleVectorState implements VectorState {
     // For nullable vectors, the purported type can be wrong. The "outer"
     // vector is nullable, but the internal "values" vector is required, though
     // it carries a nullable type -- that is, the metadata lies.
-
     if (vector instanceof NullableVector) {
       return purportedType;
     }
@@ -339,7 +348,6 @@ public abstract class SingleVectorState implements VectorState {
     * overflow buffers away in the backup vector.
     * Restore the main vector's last write position.
     */
-
   @Override
   public void harvestWithLookAhead() {
     mainVector.exchange(backupVector);
@@ -350,7 +358,6 @@ public abstract class SingleVectorState implements VectorState {
    * now ready to start writing to the next batch. Initialize that new batch
    * with the look-ahead values saved during overflow of the previous batch.
    */
-
   @Override
   public void startBatchWithLookAhead() {
     mainVector.exchange(backupVector);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java
index 3220efa57f..4b6fb40158 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java
@@ -69,7 +69,6 @@ import org.apache.drill.exec.vector.complex.UnionVector;
  * variant (LIST, UNION) and tuple (MAP) columns, the tree grows
  * quite complex.
  */
-
 public class SimpleReaderBuilder extends ReaderBuilder {
 
   private static final SimpleReaderBuilder INSTANCE = new SimpleReaderBuilder();
@@ -133,7 +132,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
       case LATE:
 
         // Occurs for a list with no type: a list of nulls.
-
         return AbstractScalarReader.nullReader(descrip.metadata);
       default:
         return buildScalarReader(va, descrip.metadata);
@@ -169,22 +167,19 @@ public class SimpleReaderBuilder extends ReaderBuilder {
     final boolean isArray = mode == DataMode.REPEATED;
 
     // Map type
-
     final AbstractObjectReader mapReader = MapReader.build(
         descrip.metadata,
         isArray ? null : va,
         buildMapMembers(vector,
             descrip.parent.childProvider(descrip.metadata)));
 
-    // Single map
-
-    if (! isArray) {
+    if (isArray) {
+      // Repeated map
+      return ArrayReaderImpl.buildTuple(descrip.metadata, va, mapReader);
+    } else {
+      // Single map
       return mapReader;
     }
-
-    // Repeated map
-
-    return ArrayReaderImpl.buildTuple(descrip.metadata, va, mapReader);
   }
 
   protected List<AbstractObjectReader> buildMapMembers(AbstractMapVector mapVector, MetadataProvider provider) {
@@ -208,7 +203,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
       // Will throw an exception for unsupported types.
       // so call this only if the MajorType reports that the type
       // already exists.
-
       final ValueVector memberVector = vector.getMember(type);
       final VectorDescrip memberDescrip = new VectorDescrip(provider, i++, memberVector.getField());
       variants[type.ordinal()] = buildVectorReader(memberVector, memberDescrip);
@@ -262,7 +256,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
    * then the union must also be not null. (Experience will show whether
    * existing code does, in fact, follow that convention.)
    */
-
   private AbstractObjectReader build1DList(ListVector vector, VectorAccessor listAccessor,
       VectorDescrip listDescrip) {
     final ValueVector dataVector = vector.getDataVector();
@@ -272,7 +265,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
       // At the metadata level, a list always holds a union. But, at the
       // implementation layer, a union of a single type is collapsed out
       // to leave just a list of that single type.
-
       dataMetadata = listDescrip;
     } else {
       dataMetadata = new VectorDescrip(listDescrip.childProvider(), 0, dataVector.getField());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
index 2ee1047e91..d4d385c248 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
@@ -100,27 +100,36 @@ public class BatchSchema implements Iterable<MaterializedField> {
   }
 
   /**
-   * DRILL-5525: the semantics of this method are badly broken.
-   * Caveat emptor.
+   * DRILL-5525: the semantics of this method are badly broken. Caveat emptor.
    *
-   * This check used for detecting actual schema change inside operator record batch will not work for
-   * AbstractContainerVectors (like MapVector). In each record batch a reference to incoming batch schema is
-   * stored (let say S:{a: int}) and then equals is called on that stored reference and current incoming batch schema.
-   * Internally schema object has references to Materialized fields from vectors in container. If there is change in
-   * incoming batch schema, then the upstream will create a new ValueVector in its output container with the new
-   * detected type, which in turn will have new instance for Materialized Field. Then later a new BatchSchema object
-   * is created for this new incoming batch (let say S":{a":varchar}). The operator calling equals will have reference
-   * to old schema object (S) and hence first check will not be satisfied and then it will call equals on each of the
-   * Materialized Field (a.equals(a")). Since new materialized field is created for newly created vector the equals
-   * check on field will return false. And schema change will be detected in this case.
-   * Now consider instead of int vector there is a MapVector such that initial schema was (let say S:{a:{b:int, c:int}}
-   * and then later schema for Map field c changes, then in container Map vector will be found but later the children
-   * vector for field c will be replaced. This new schema object will be created as (S":{a:{b:int, c":varchar}}). Now
-   * when S.equals(S") is called it will eventually call a.equals(a) which will return true even though the schema of
-   * children value vector c has changed. This is because no new vector is created for field (a) and hence it's object
-   * reference to MaterializedField has not changed which will be reflected in both old and new schema instances.
-   * Hence we should make use of {@link BatchSchema#isEquivalent(BatchSchema)} method instead since
-   * {@link MaterializedField#isEquivalent(MaterializedField)} method is updated to remove the reference check.
+   * This check used for detecting actual schema change inside operator record
+   * batch will not work for AbstractContainerVectors (like MapVector). In each
+   * record batch a reference to incoming batch schema is stored (let say S:{a:
+   * int}) and then equals is called on that stored reference and current
+   * incoming batch schema. Internally schema object has references to
+   * Materialized fields from vectors in container. If there is change in
+   * incoming batch schema, then the upstream will create a new ValueVector in
+   * its output container with the new detected type, which in turn will have
+   * new instance for Materialized Field. Then later a new BatchSchema object is
+   * created for this new incoming batch (let say S":{a":varchar}). The operator
+   * calling equals will have reference to old schema object (S) and hence first
+   * check will not be satisfied and then it will call equals on each of the
+   * Materialized Field (a.equals(a")). Since new materialized field is created
+   * for newly created vector the equals check on field will return false. And
+   * schema change will be detected in this case. Now consider instead of int
+   * vector there is a MapVector such that initial schema was (let say
+   * S:{a:{b:int, c:int}} and then later schema for Map field c changes, then in
+   * container Map vector will be found but later the children vector for field
+   * c will be replaced. This new schema object will be created as
+   * (S":{a:{b:int, c":varchar}}). Now when S.equals(S") is called it will
+   * eventually call a.equals(a) which will return true even though the schema
+   * of children value vector c has changed. This is because no new vector is
+   * created for field (a) and hence it's object reference to MaterializedField
+   * has not changed which will be reflected in both old and new schema
+   * instances. Hence we should make use of
+   * {@link BatchSchema#isEquivalent(BatchSchema)} method instead since
+   * {@link MaterializedField#isEquivalent(MaterializedField)} method is updated
+   * to remove the reference check.
    */
   @Override
   public boolean equals(Object obj) {
@@ -201,18 +210,21 @@ public class BatchSchema implements Iterable<MaterializedField> {
    * @param t2
    * @return
    */
-  private boolean majorTypeEqual(MajorType t1, MajorType t2) {
+  private static boolean majorTypeEqual(MajorType t1, MajorType t2) {
     if (t1.equals(t2)) {
       return true;
-    } else if (!t1.getMinorType().equals(t2.getMinorType())) {
-      return false;
-    } else if (!t1.getMode().equals(t2.getMode())) {
+    }
+    // TODO: the next two checks are redundant: equals does them.
+    if (!t1.getMinorType().equals(t2.getMinorType())) {
       return false;
-    } else if (!Sets.newHashSet(t1.getSubTypeList()).equals(Sets.newHashSet(t2.getSubTypeList()))) {
+    }
+    if (!t1.getMode().equals(t2.getMode())) {
       return false;
-    } else {
-      return true;
     }
+
+    // TODO: this does not do anything. The call to equals() above
+    // checks subtypes in a different way.
+    return Sets.newHashSet(t1.getSubTypeList()).equals(Sets.newHashSet(t2.getSubTypeList()));
   }
 
   /**
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index 9d1c6a3d1a..551971b592 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -167,7 +167,7 @@ public class VectorContainer implements VectorAccessible {
     final ValueVector vector;
     if (id != null) {
       vector = getValueAccessorById(id.getFieldIds()).getValueVector();
-      if (id.getFieldIds().length == 1 && !vector.getField().getType().equals(field.getType())) {
+      if (id.getFieldIds().length == 1 && !vector.getField().isEquivalent(field)) {
         final ValueVector newVector = TypeHelper.getNewVector(field, this.getAllocator(), callBack);
         replace(vector, newVector);
         return (T) newVector;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
index 30791e9474..0a514df0a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
@@ -38,10 +38,12 @@ public class SelectionVector2 implements AutoCloseable {
   public static final int RECORD_SIZE = 2;
 
   private final BufferAllocator allocator;
-  // Indicates number of indexes stored in the SV2 buffer which may be less than actual number of rows stored in
-  // RecordBatch container owning this SV2 instance
+  // Indicates number of indexes stored in the SV2 buffer which may be less
+  // than the actual number of rows stored in RecordBatch container owning
+  // this SV2 instance
   private int recordCount;
-  // Indicates actual number of rows in the RecordBatch container which owns this SV2 instance
+  // Indicates actual number of rows in the RecordBatch
+  // container which owns this SV2 instance
   private int batchActualRecordCount = -1;
   private DrillBuf buffer = DeadBuf.DEAD_BUFFER;
 
@@ -82,12 +84,11 @@ public class SelectionVector2 implements AutoCloseable {
     DrillBuf bufferHandle = buffer;
 
     if (clear) {
-      /* Increment the ref count for this buffer */
+      // Increment the ref count for this buffer
       bufferHandle.retain(1);
 
-      /* We are passing ownership of the buffer to the
-       * caller. clear the buffer from within our selection vector
-       */
+      // We are passing ownership of the buffer to the
+      // caller. clear the buffer from within our selection vector
       clear();
     }
 
@@ -95,7 +96,7 @@ public class SelectionVector2 implements AutoCloseable {
   }
 
   public void setBuffer(DrillBuf bufferHandle) {
-    /* clear the existing buffer */
+    // clear the existing buffer
     clear();
 
     buffer = bufferHandle;
@@ -106,6 +107,10 @@ public class SelectionVector2 implements AutoCloseable {
     return buffer.getChar(index * RECORD_SIZE);
   }
 
+  public void setIndex(int index, char value) {
+    buffer.setChar(index * RECORD_SIZE, value);
+  }
+
   public long getDataAddr() {
     return buffer.memoryAddress();
   }
@@ -135,10 +140,9 @@ public class SelectionVector2 implements AutoCloseable {
     newSV.batchActualRecordCount = batchActualRecordCount;
     newSV.buffer = buffer;
 
-    /* Since buffer and newSV.buffer essentially point to the
-     * same buffer, if we don't do a retain() on the newSV's
-     * buffer, it might get freed.
-     */
+    // Since buffer and newSV.buffer essentially point to the
+    // same buffer, if we don't do a retain() on the newSV's
+    // buffer, it might get freed.
     newSV.buffer.retain(1);
     clear();
     return newSV;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
index 460dbb7979..063b840c37 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
@@ -172,7 +172,11 @@ public class QueryResultHandler {
       resultsListener.dataArrived(batch, throttle);
       // That releases batch if successful.
     } catch (Exception e) {
-      batch.release();
+      try {
+        batch.release();
+      } catch (IllegalStateException e2) {
+        // Ignore, released twice
+      }
       resultsListener.submissionFailed(UserException.systemError(e).build(logger));
     }
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index 6c89470009..3e1f6a73ec 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -190,6 +190,7 @@ public class SystemOptionManager extends BaseOptionManager implements AutoClosea
       new OptionDefinition(ExecConstants.PARQUET_FLAT_BATCH_MEMORY_SIZE_VALIDATOR, new OptionMetaData(OptionValue.AccessibleScopes.SYSTEM_AND_SESSION, true, true)),
       new OptionDefinition(ExecConstants.PARQUET_COMPLEX_BATCH_NUM_RECORDS_VALIDATOR, new OptionMetaData(OptionValue.AccessibleScopes.SYSTEM_AND_SESSION, true, true)),
       new OptionDefinition(ExecConstants.PARTITIONER_MEMORY_REDUCTION_THRESHOLD_VALIDATOR),
+      new OptionDefinition(ExecConstants.ENABLE_V2_JSON_READER_VALIDATOR),
       new OptionDefinition(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR),
       new OptionDefinition(ExecConstants.JSON_WRITER_NAN_INF_NUMBERS_VALIDATOR),
       new OptionDefinition(ExecConstants.JSON_READER_NAN_INF_NUMBERS_VALIDATOR),
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index 15ddf6b73e..fad3634d27 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -48,7 +48,8 @@ import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.CloseableRecordBatch;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
+import org.apache.drill.exec.store.ColumnExplorer;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
 import org.apache.drill.exec.store.StatisticsRecordWriter;
@@ -130,7 +131,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
     /**
      *  Choose whether to use the "traditional" or "enhanced" reader
      *  structure. Can also be selected at runtime by overriding
-     *  {@link #useEnhancedScan()}.
+     *  {@link #useEnhancedScan(OptionSet)}.
      */
     private final ScanFrameworkVersion scanVersion;
 
@@ -515,7 +516,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * @return true to use the enhanced scan framework, false for the
    * traditional scan-batch framework
    */
-  protected ScanFrameworkVersion scanVersion(OptionManager options) {
+  protected ScanFrameworkVersion scanVersion(OptionSet options) {
     return easyConfig.scanVersion;
   }
 
@@ -545,15 +546,15 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
   /**
    * Initialize the scan framework builder with standard options.
    * Call this from the plugin-specific
-   * {@link #frameworkBuilder(OptionManager, EasySubScan)} method.
+   * {@link #frameworkBuilder(OptionSet, EasySubScan)} method.
    * The plugin can then customize/revise options as needed.
    * <p>
    * For EVF V1, to be removed.
    *
    * @param builder the scan framework builder you create in the
-   * {@link #frameworkBuilder(OptionManager, EasySubScan)} method
+   * {@link #frameworkBuilder(OptionSet, EasySubScan)} method
    * @param scan the physical scan operator definition passed to
-   * the {@link #frameworkBuilder(OptionManager, EasySubScan)} method
+   * the {@link #frameworkBuilder(OptionSet, EasySubScan)} method
    */
   protected void initScanBuilder(FileScanBuilder builder, EasySubScan scan) {
     EvfV1ScanBuilder.initScanBuilder(this, builder, scan);
@@ -563,7 +564,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * For EVF V1, to be removed.
    */
   public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(
-      EasySubScan scan, OptionManager options) throws ExecutionSetupException {
+      EasySubScan scan, OptionSet options) throws ExecutionSetupException {
     throw new ExecutionSetupException("Must implement newBatchReader() if using the enhanced framework.");
   }
 
@@ -583,7 +584,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * @throws ExecutionSetupException for all setup failures
    */
   protected FileScanBuilder frameworkBuilder(
-      OptionManager options, EasySubScan scan) throws ExecutionSetupException {
+      OptionSet options, EasySubScan scan) throws ExecutionSetupException {
     throw new ExecutionSetupException("Must implement frameworkBuilder() if using the enhanced framework.");
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
index d5f498b5a3..7fe6ffaa55 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
@@ -49,6 +49,13 @@ import org.slf4j.LoggerFactory;
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.databind.JsonNode;
 
+/**
+ * Old-style JSON record reader. Not used when reading JSON files,
+ * but is used by some "mini-plan" unit tests, and by the VALUES
+ * reader. As a result, this reader cannot be removed and must be
+ * maintained until the other uses are converted to the new-style
+ * JSON reader.
+ */
 public class JSONRecordReader extends AbstractRecordReader {
   private static final Logger logger = LoggerFactory.getLogger(JSONRecordReader.class);
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
new file mode 100644
index 0000000000..48d44f42a4
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.easy.json;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.drill.common.exceptions.ChildErrorContext;
+import org.apache.drill.common.exceptions.CustomErrorContext;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
+import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
+import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoader;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder;
+import org.apache.hadoop.mapred.FileSplit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class JsonBatchReader implements ManagedReader<FileSchemaNegotiator> {
+  private static final Logger logger = LoggerFactory.getLogger(JsonBatchReader.class);
+
+  private JsonLoader jsonLoader;
+
+  @Override
+  public boolean open(FileSchemaNegotiator negotiator) {
+    DrillFileSystem fileSystem = negotiator.fileSystem();
+    FileSplit split = negotiator.split();
+
+    InputStream stream;
+    try {
+      stream = fileSystem.openPossiblyCompressedStream(split.getPath());
+    } catch (IOException e) {
+      throw UserException
+          .dataReadError(e)
+          .addContext("Failure to open JSON file", split.getPath().toString())
+          .build(logger);
+    }
+    CustomErrorContext errorContext = new ChildErrorContext(negotiator.parentErrorContext()) {
+      @Override
+      public void addContext(UserException.Builder builder) {
+        super.addContext(builder);
+        builder.addContext("File name", split.getPath().toString());
+      }
+    };
+    negotiator.setErrorContext(errorContext);
+
+    // Create the JSON loader (high-level parser).
+    jsonLoader = new JsonLoaderBuilder()
+        .resultSetLoader(negotiator.build())
+        .standardOptions(negotiator.queryOptions())
+        .errorContext(errorContext)
+        .fromStream(stream)
+        .build();
+    return true;
+  }
+
+  @Override
+  public boolean next() {
+    return jsonLoader.readBatch();
+  }
+
+  @Override
+  public void close() {
+    if (jsonLoader != null) {
+      jsonLoader.close();
+      jsonLoader = null;
+    }
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
index 49185ce996..ad0cba9ffb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
@@ -75,7 +75,6 @@ public abstract class BaseFieldFactory implements FieldFactory {
 
   protected JsonLoaderImpl loader() { return loader; }
 
-  @Override
   public ValueParser scalarParserFor(FieldDefn fieldDefn, ColumnMetadata colSchema) {
     return scalarParserFor(fieldDefn.scalarWriterFor(colSchema));
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
index ddd1b03f47..43812a22b2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
@@ -34,7 +34,7 @@ import org.apache.drill.exec.record.metadata.Propertied;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.log.LogBatchReader.LogReaderConfig;
@@ -144,7 +144,7 @@ public class LogFormatPlugin extends EasyFormatPlugin<LogFormatConfig> {
    */
   @Override
   protected FileScanBuilder frameworkBuilder(
-      OptionManager options, EasySubScan scan) throws ExecutionSetupException {
+      OptionSet options, EasySubScan scan) throws ExecutionSetupException {
 
     // Pattern and schema identical across readers; define
     // up front.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
index 5dd9898bde..8dfbac00ec 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
@@ -180,6 +180,4 @@ public class ExtendedJsonOutput extends BasicJsonOutput {
   public void writeIntNull() throws IOException {
     writeBigIntNull();
   }
-
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java
index aa2883b089..31e83bfc59 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java
@@ -35,7 +35,4 @@ public enum ExtendedType {
   ExtendedType(String name) {
     this.serialized = new SerializedString(name);
   }
-
-
-
 }
\ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java
index 2760b9ab61..2b03414ab6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java
@@ -17,14 +17,21 @@
  */
 package org.apache.drill.exec.vector.complex.fn;
 
+/**
+ * Based on
+ * <a href="https://docs.mongodb.com/manual/reference/mongodb-extended-json-v1/">
+ * V1</a> of the Mongo extended type spec. Some names overlap with the current
+ * <a href="https://docs.mongodb.com/manual/reference/mongodb-extended-json/">
+ * V2</a> of the Mongo specs.
+ */
 public interface ExtendedTypeName {
-  public static final String BINARY = "$binary";      // base64 encoded binary (ZHJpbGw=)  [from Mongo]
-  public static final String TYPE = "$type";          // type of binary data
-  public static final String DATE = "$dateDay";       // ISO date with no time. such as (12-24-27)
-  public static final String TIME = "$time";          // ISO time with no timezone (19:20:30.45Z)
-  public static final String TIMESTAMP = "$date";     // ISO standard time (2009-02-23T00:00:00.000-08:00) [from Mongo]
-  public static final String INTERVAL = "$interval";  // ISO standard duration (PT26.4S)
-  public static final String INTEGER = "$numberLong"; // 8 byte signed integer (123) [from Mongo]
-  public static final String DECIMAL = "$decimal";    // exact numeric value (123.123)
+  String BINARY = "$binary";      // base64 encoded binary (ZHJpbGw=)  [from Mongo]
+  String TYPE = "$type";          // type of binary data
+  String DATE = "$dateDay";       // ISO date with no time. such as (12-24-27)
+  String TIME = "$time";          // ISO time with no timezone (19:20:30.45Z)
+  String TIMESTAMP = "$date";     // ISO standard time (2009-02-23T00:00:00.000-08:00) [from Mongo]
+  String INTERVAL = "$interval";  // ISO standard duration (PT26.4S)
+  String INTEGER = "$numberLong"; // 8 byte signed integer (123) [from Mongo]
+  String DECIMAL = "$decimal";    // exact numeric value (123.123)
 }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java
index 71098d0141..6df6ec6786 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java
@@ -170,5 +170,4 @@ public class FieldSelection {
       return root.fixNodes();
     }
   }
-
 }
\ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
index 269293e43a..07fb3d5b8f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
@@ -61,7 +61,7 @@ public class JsonWriter {
     final DataMode m = reader.getType().getMode();
     final MinorType mt = reader.getType().getMinorType();
 
-    switch(m){
+    switch(m) {
     case OPTIONAL:
     case REQUIRED:
 
@@ -87,7 +87,6 @@ public class JsonWriter {
       case BIT:
         gen.writeBoolean(reader);
         break;
-
       case DATE:
         gen.writeDate(reader);
         break;
@@ -123,9 +122,9 @@ public class JsonWriter {
       case MAP:
         gen.writeStartObject();
         if (reader.isSet()) {
-          for(String name : reader){
+          for (String name : reader) {
             FieldReader childReader = reader.reader(name);
-            if(childReader.isSet()){
+            if (childReader.isSet()) {
               gen.writeFieldName(name);
               writeValue(childReader);
             }
@@ -154,60 +153,60 @@ public class JsonWriter {
       gen.writeStartArray();
       switch (mt) {
       case FLOAT4:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeFloat(i, reader);
         }
         break;
       case FLOAT8:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeDouble(i, reader);
         }
         break;
       case INT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeInt(i, reader);
         }
         break;
       case SMALLINT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeSmallInt(i, reader);
         }
         break;
       case TINYINT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeTinyInt(i, reader);
         }
         break;
       case BIGINT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeBigInt(i, reader);
         }
         break;
       case BIT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeBoolean(i, reader);
         }
         break;
 
       case DATE:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeDate(i, reader);
         }
         break;
       case TIME:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeTime(i, reader);
         }
         break;
       case TIMESTAMP:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeTimestamp(i, reader);
         }
         break;
       case INTERVALYEAR:
       case INTERVALDAY:
       case INTERVAL:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeInterval(i, reader);
         }
         break;
@@ -218,24 +217,24 @@ public class JsonWriter {
       case DECIMAL9:
       case DECIMAL18:
       case VARDECIMAL:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeDecimal(i, reader);
         }
         break;
 
       case LIST:
-        for(int i = 0; i < reader.size(); i++){
-          while(reader.next()){
+        for (int i = 0; i < reader.size(); i++) {
+          while (reader.next()) {
             writeValue(reader.reader());
           }
         }
         break;
       case MAP:
-        while(reader.next()){
+        while (reader.next()) {
           gen.writeStartObject();
-          for(String name : reader){
+          for (String name : reader) {
             FieldReader mapField = reader.reader(name);
-            if(mapField.isSet()){
+            if (mapField.isSet()) {
               gen.writeFieldName(name);
               writeValue(mapField);
             }
@@ -247,17 +246,17 @@ public class JsonWriter {
         break;
 
       case VAR16CHAR:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeVar16Char(i, reader);
         }
         break;
       case VARBINARY:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeBinary(i, reader);
         }
         break;
       case VARCHAR:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeVarChar(i, reader);
         }
         break;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
index 4493518d27..ea3bfd4150 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
@@ -80,29 +80,29 @@ abstract class VectorOutput {
       .optionalStart().appendOffset("+HH", "Z").optionalEnd()
       .toFormatter();
 
-  public VectorOutput(WorkingBuffer work){
+  public VectorOutput(WorkingBuffer work) {
     this.work = work;
   }
 
-  public void setParser(JsonParser parser){
+  public void setParser(JsonParser parser) {
     this.parser = parser;
   }
 
   protected boolean innerRun() throws IOException{
     JsonToken t = parser.nextToken();
-    if(t != JsonToken.FIELD_NAME){
+    if (t != JsonToken.FIELD_NAME) {
       return false;
     }
 
     String possibleTypeName = parser.getText();
-    if(!possibleTypeName.isEmpty() && possibleTypeName.charAt(0) == '$'){
-      switch(possibleTypeName){
+    if (!possibleTypeName.isEmpty() && possibleTypeName.charAt(0) == '$') {
+      switch(possibleTypeName) {
       case ExtendedTypeName.BINARY:
         writeBinary(checkNextToken(JsonToken.VALUE_STRING));
         checkCurrentToken(JsonToken.END_OBJECT);
         return true;
       case ExtendedTypeName.TYPE:
-        if(checkNextToken(JsonToken.VALUE_NUMBER_INT) || !hasBinary()) {
+        if (checkNextToken(JsonToken.VALUE_NUMBER_INT) || !hasBinary()) {
           throw UserException.parseError()
           .message("Either $type is not an integer or has no $binary")
           .build(logger);
@@ -177,13 +177,13 @@ abstract class VectorOutput {
   }
 
   public boolean checkToken(final JsonToken t, final JsonToken expected1, final JsonToken expected2) throws IOException{
-    if(t == JsonToken.VALUE_NULL){
+    if (t == JsonToken.VALUE_NULL) {
       return true;
-    }else if(t == expected1){
+    } else if (t == expected1) {
       return false;
-    }else if(t == expected2){
+    } else if (t == expected2) {
       return false;
-    }else{
+    } else {
       throw new JsonParseException(String.format("Failure while reading ExtendedJSON typed value. Expected a %s but "
           + "received a token of type %s", expected1, t), parser.getCurrentLocation());
     }
@@ -212,7 +212,7 @@ abstract class VectorOutput {
     @Override
     public void writeBinary(boolean isNull) throws IOException {
       VarBinaryWriter bin = writer.varBinary();
-      if(!isNull){
+      if (!isNull) {
         byte[] binaryData = parser.getBinaryValue();
         if (hasType()) {
           //Ignoring type info as of now.
@@ -231,7 +231,7 @@ abstract class VectorOutput {
     @Override
     public void writeDate(boolean isNull) throws IOException {
       DateWriter dt = writer.date();
-      if(!isNull){
+      if (!isNull) {
         work.prepareVarCharHolder(parser.getValueAsString(), varchar);
         dt.writeDate(StringFunctionHelpers.getDate(varchar.buffer, varchar.start, varchar.end));
       }
@@ -240,7 +240,7 @@ abstract class VectorOutput {
     @Override
     public void writeTime(boolean isNull) throws IOException {
       TimeWriter t = writer.time();
-      if(!isNull){
+      if (!isNull) {
         // read time and obtain the local time in the provided time zone.
         LocalTime localTime = OffsetTime.parse(parser.getValueAsString(), DateUtility.isoFormatTime).toLocalTime();
         t.writeTime((int) ((localTime.toNanoOfDay() + 500000L) / 1000000L)); // round to milliseconds
@@ -250,7 +250,7 @@ abstract class VectorOutput {
     @Override
     public void writeTimestamp(boolean isNull) throws IOException {
       TimeStampWriter ts = writer.timeStamp();
-      if(!isNull){
+      if (!isNull) {
         switch (parser.getCurrentToken()) {
         case VALUE_NUMBER_INT:
           DateTime dt = new DateTime(parser.getLongValue(), org.joda.time.DateTimeZone.UTC);
@@ -276,7 +276,7 @@ abstract class VectorOutput {
     @Override
     public void writeInterval(boolean isNull) throws IOException {
       IntervalWriter intervalWriter = writer.interval();
-      if(!isNull){
+      if (!isNull) {
         final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString());
         int months = DateUtilities.monthsFromPeriod(p);
         int days = p.getDays();
@@ -288,7 +288,7 @@ abstract class VectorOutput {
     @Override
     public void writeInteger(boolean isNull) throws IOException {
       BigIntWriter intWriter = writer.bigInt();
-      if(!isNull){
+      if (!isNull) {
         intWriter.writeBigInt(Long.parseLong(parser.getValueAsString()));
       }
     }
@@ -297,7 +297,6 @@ abstract class VectorOutput {
     public void writeDecimal(boolean isNull) throws IOException {
       throw new JsonParseException("Decimal Extended types not yet supported.", parser.getCurrentLocation());
     }
-
   }
 
   static class MapVectorOutput extends VectorOutput {
@@ -318,7 +317,7 @@ abstract class VectorOutput {
     @Override
     public void writeBinary(boolean isNull) throws IOException {
       VarBinaryWriter bin = writer.varBinary(fieldName);
-      if(!isNull){
+      if (!isNull) {
         byte[] binaryData = parser.getBinaryValue();
         if (hasType()) {
           //Ignoring type info as of now.
@@ -337,7 +336,7 @@ abstract class VectorOutput {
     @Override
     public void writeDate(boolean isNull) throws IOException {
       DateWriter dt = writer.date(fieldName);
-      if(!isNull){
+      if (!isNull) {
         LocalDate    localDate = LocalDate.parse(parser.getValueAsString(), DateUtility.isoFormatDate);
         OffsetDateTime utcDate = OffsetDateTime.of(localDate, LocalTime.MIDNIGHT, ZoneOffset.UTC);
 
@@ -348,7 +347,7 @@ abstract class VectorOutput {
     @Override
     public void writeTime(boolean isNull) throws IOException {
       TimeWriter t = writer.time(fieldName);
-      if(!isNull){
+      if (!isNull) {
         LocalTime localTime = OffsetTime.parse(parser.getValueAsString(), DateUtility.isoFormatTime).toLocalTime();
         t.writeTime((int) ((localTime.toNanoOfDay() + 500000L) / 1000000L)); // round to milliseconds
       }
@@ -357,7 +356,7 @@ abstract class VectorOutput {
     @Override
     public void writeTimestamp(boolean isNull) throws IOException {
       TimeStampWriter ts = writer.timeStamp(fieldName);
-      if(!isNull){
+      if (!isNull) {
         switch (parser.getCurrentToken()) {
         case VALUE_NUMBER_INT:
           DateTime dt = new DateTime(parser.getLongValue(), org.joda.time.DateTimeZone.UTC);
@@ -383,7 +382,7 @@ abstract class VectorOutput {
     @Override
     public void writeInterval(boolean isNull) throws IOException {
       IntervalWriter intervalWriter = writer.interval(fieldName);
-      if(!isNull){
+      if (!isNull) {
         final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString());
         int months = DateUtilities.monthsFromPeriod(p);
         int days = p.getDays();
@@ -395,7 +394,7 @@ abstract class VectorOutput {
     @Override
     public void writeInteger(boolean isNull) throws IOException {
       BigIntWriter intWriter = writer.bigInt(fieldName);
-      if(!isNull){
+      if (!isNull) {
         intWriter.writeBigInt(Long.parseLong(parser.getValueAsString()));
       }
     }
@@ -404,7 +403,5 @@ abstract class VectorOutput {
     public void writeDecimal(boolean isNull) throws IOException {
       throw new IOException("Decimal Extended types not yet supported.");
     }
-
   }
-
 }
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index 56b1515e83..d5277ff54c 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -693,6 +693,7 @@ drill.exec.options: {
     # Property name and value should be separated by =.
     # Properties should be separated by new line (\n).
     store.hive.conf.properties: "",
+    store.json.enable_v2_reader: true,
     store.json.all_text_mode: false,
     store.json.writer.allow_nan_inf: true,
     store.json.reader.allow_nan_inf: true,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index f79eb60ef3..df83dbc159 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -22,6 +22,7 @@ import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
@@ -49,7 +50,9 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select n_name, *, n_name, n_name from cp.`tpch/nation.parquet`")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q1.tsv")
-      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_name", "n_nationkey", "n_name0", "n_regionkey", "n_comment", "n_name00", "n_name1")
       .build().run();
 
@@ -57,7 +60,9 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select n_name, *, n_name, n_name from cp.`tpch/nation.parquet` limit 2")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q2.tsv")
-      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_name", "n_nationkey", "n_name0", "n_regionkey", "n_comment", "n_name00", "n_name1")
       .build().run();
 
@@ -65,8 +70,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name, *, n_name, n_name from cp.`tpch/nation.parquet`")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q3.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-            TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_name0",
             "n_nationkey0", "n_name1", "n_regionkey0", "n_comment0", "n_name00", "n_name10")
       .build().run();
@@ -75,8 +82,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name, *, n_name, n_name from cp.`tpch/nation.parquet` limit 2")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q4.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-            TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_name0",
             "n_nationkey0", "n_name1", "n_regionkey0", "n_comment0", "n_name00", "n_name10")
       .build().run();
@@ -89,8 +98,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name as extra, *, n_name as extra from cp.`tpch/nation.parquet`")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStarsRegularColumnAsAlias/q1.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-              TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "extra", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "extra0")
       .build().run();
 
@@ -98,8 +109,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name as extra, *, n_name as extra from cp.`tpch/nation.parquet` limit 2")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStarsRegularColumnAsAlias/q2.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-              TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "extra", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "extra0")
       .build().run();
   }
@@ -111,7 +124,9 @@ public class TestStarQueries extends BaseTestQuery {
     .sqlQuery("select *, *, n_name from cp.`tpch/nation.parquet`")
     .ordered()
     .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStars/q1.tsv")
-    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                   TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
     .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "n_name1")
     .build().run();
 
@@ -119,7 +134,9 @@ public class TestStarQueries extends BaseTestQuery {
     .sqlQuery("select *, *, n_name from cp.`tpch/nation.parquet` limit 2")
     .ordered()
     .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStars/q2.tsv")
-    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                   TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
     .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "n_name1")
     .build().run();
   }
@@ -131,40 +148,74 @@ public class TestStarQueries extends BaseTestQuery {
     .sqlQuery("select *, n_nationkey, *, n_name from cp.`tpch/nation.parquet` limit 2")
     .ordered()
     .csvBaselineFile("testframework/testStarQueries/testSelStarWithAdditionalColumnLimit/q1.tsv")
-    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                   TypeProtos.MinorType.VARCHAR)
     .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_nationkey0", "n_nationkey1", "n_name0", "n_regionkey0", "n_comment0", "n_name1")
     .build().run();
   }
 
+  public static final String ENABLE_V2_READER = "ALTER SESSION SET `" + ExecConstants.ENABLE_V2_JSON_READER_KEY + "` = %s";
+
   @Test
-  public void testSelStarOrderBy() throws Exception{
-    testBuilder()
-        .ordered()
-        .sqlQuery(" select * from cp.`employee.json` order by last_name")
-        .sqlBaselineQuery(" select employee_id, full_name,first_name,last_name,position_id,position_title,store_id," +
-            " department_id,birth_date,hire_date,salary,supervisor_id,education_level,marital_status,gender,management_role " +
+  public void testSelStarOrderBy() throws Exception {
+    // See DRILL-7522
+    String query = "select * from cp.`employee.json` order by last_name";
+    String baselineQueryHead = "select employee_id, full_name, first_name, last_name, position_id, position_title, store_id," +
+            " department_id, birth_date, hire_date, ";
+    String baselineQueryTail = "salary, supervisor_id, education_level, marital_status, gender, management_role " +
             " from cp.`employee.json` " +
-            " order by last_name ")
-        .build().run();
-
+            " order by last_name";
+    try {
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "false")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + baselineQueryTail)
+          .build().run();
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "true")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + "end_date, " + baselineQueryTail)
+          .build().run();
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   @Test
   @Category(UnlikelyTest.class)
-  public void testSelStarOrderByLimit() throws Exception{
-    testBuilder()
-        .ordered()
-        .sqlQuery(" select * from cp.`employee.json` order by last_name limit 2")
-        .sqlBaselineQuery(" select employee_id, full_name,first_name,last_name,position_id,position_title,store_id," +
-            " department_id,birth_date,hire_date,salary,supervisor_id,education_level,marital_status,gender,management_role " +
-            " from cp.`employee.json` " +
-            " order by last_name limit 2")
-        .build().run();
-
+  public void testSelStarOrderByLimit() throws Exception {
+    // See DRILL-7522
+    String query = "select * from cp.`employee.json` order by last_name limit 2";
+    String baselineQueryHead = "select employee_id, full_name, first_name, last_name, position_id, position_title, store_id, " +
+            "department_id, birth_date, hire_date, ";
+    String baselineQueryTail = "salary, supervisor_id, education_level, marital_status, " +
+            "gender, management_role " +
+            "from cp.`employee.json` " +
+            "order by last_name limit 2";
+    try {
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "false")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + baselineQueryTail)
+          .build().run();
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "true")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + "end_date, " + baselineQueryTail)
+          .build().run();
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   @Test
-  public void testSelStarPlusRegCol() throws Exception{
+  public void testSelStarPlusRegCol() throws Exception {
     testBuilder()
         .unOrdered()
         .sqlQuery("select *, n_nationkey as key2 from cp.`tpch/nation.parquet` order by n_name limit 2")
@@ -174,16 +225,31 @@ public class TestStarQueries extends BaseTestQuery {
   }
 
   @Test
-  public void testSelStarWhereOrderBy() throws Exception{
-    testBuilder()
-        .ordered()
-        .sqlQuery("select * from cp.`employee.json` where first_name = 'James' order by last_name")
-        .sqlBaselineQuery("select employee_id, full_name,first_name,last_name,position_id,position_title,store_id," +
-            " department_id,birth_date,hire_date,salary,supervisor_id,education_level,marital_status,gender,management_role " +
-            " from cp.`employee.json` " +
-            " where first_name = 'James' order by last_name")
-        .build().run();
+  public void testSelStarWhereOrderBy() throws Exception {
+    // See DRILL-7522
+    String query = "select * from cp.`employee.json` where first_name = 'James' order by last_name";
+    String baselineQueryHead = "select employee_id, full_name, first_name, last_name, position_id, position_title, store_id," +
+        " department_id, birth_date, hire_date, ";
+    String baselineQueryTail = "salary, supervisor_id, education_level, marital_status, gender,management_role " +
+        " from cp.`employee.json` " +
+        " where first_name = 'James' order by last_name";
 
+    try {
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "false")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + baselineQueryTail)
+          .build().run();
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "true")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + "end_date, " + baselineQueryTail)
+          .build().run();
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   @Test
@@ -192,9 +258,10 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .ordered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
-        .sqlBaselineQuery("select n.n_nationkey, n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name, r.r_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select n.n_nationkey, n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name, r.r_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                           "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
@@ -202,33 +269,37 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .ordered()
         .sqlQuery("select n.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
-        .sqlBaselineQuery("select n.n_nationkey, n.n_name, n.n_regionkey, n.n_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select n.n_nationkey, n.n_name, n.n_regionkey, n.n_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                          "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
   public void testSelRightStarJoin() throws Exception {
     testBuilder()
         .ordered()
-        .sqlQuery("select r.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
-        .sqlBaselineQuery("select r.r_regionkey, r.r_name, r.r_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlQuery("select r.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                  "where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select r.r_regionkey, r.r_name, r.r_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                          "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
   public void testSelStarRegColConstJoin() throws Exception {
     testBuilder()
         .ordered()
-        .sqlQuery("select *, n.n_nationkey as n_nationkey0, 1 + 2 as constant from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlQuery("select *, n.n_nationkey as n_nationkey0, 1 + 2 as constant " +
+                  "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                  "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .sqlBaselineQuery(" select n.n_nationkey, n.n_name, n.n_regionkey, n.n_comment, r.r_regionkey, r.r_name, r.r_comment, " +
             " n.n_nationkey as n_nationkey0, 1 + 2 as constant " +
             " from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
             " where n.n_regionkey = r.r_regionkey " +
             " order by n.n_name")
         .build().run();
-
   }
 
   @Test
@@ -236,9 +307,10 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .unOrdered()
         .sqlQuery("select n.*, r.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey")
-        .sqlBaselineQuery("select n.n_nationkey,n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name,r.r_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select n.n_nationkey,n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name,r.r_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                          "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
@@ -247,9 +319,9 @@ public class TestStarQueries extends BaseTestQuery {
         .unOrdered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` n1, cp.`tpch/nation.parquet` n2 where n1.n_nationkey = n2.n_nationkey")
         .sqlBaselineQuery("select n1.n_nationkey,n1.n_name,n1.n_regionkey,n1.n_comment,n2.n_nationkey,n2.n_name,n2.n_regionkey, n2.n_comment " +
-            "from cp.`tpch/nation.parquet` n1, cp.`tpch/nation.parquet` n2 where n1.n_nationkey = n2.n_nationkey")
+                          "from cp.`tpch/nation.parquet` n1, cp.`tpch/nation.parquet` n2 " +
+                          "where n1.n_nationkey = n2.n_nationkey")
         .build().run();
-
   }
 
   @Test // DRILL-1293
@@ -295,7 +367,8 @@ public class TestStarQueries extends BaseTestQuery {
   @Test(expected = UserException.class)  // Should get "At line 1, column 8: Column 'n_nationkey' is ambiguous"
   public void testSelStarAmbiguousJoin() throws Exception {
     try {
-      test("select x.n_nationkey, x.n_name, x.n_regionkey, x.r_name from (select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey) x " );
+      test("select x.n_nationkey, x.n_name, x.n_regionkey, x.r_name from " +
+           "(select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey) x " );
     } catch (UserException e) {
       // Expected
       throw e;
@@ -312,9 +385,12 @@ public class TestStarQueries extends BaseTestQuery {
   public void testSelStarSubQPrefix() throws Exception {
     test("select t.n_nationkey, t.n_name, t.n_regionkey from (select * from cp.`tpch/nation.parquet`) t where t.n_regionkey > 1 order by t.n_name" );
 
-    test("select n.n_regionkey, count(*) as cnt from ( select * from ( select * from cp.`tpch/nation.parquet`) t where t.n_nationkey < 10 ) n where n.n_nationkey >1 group by n.n_regionkey order by n.n_regionkey ; ");
+    test("select n.n_regionkey, count(*) as cnt from " +
+         "( select * from ( select * from cp.`tpch/nation.parquet`) t where t.n_nationkey < 10 ) n " +
+         "where n.n_nationkey >1 group by n.n_regionkey order by n.n_regionkey ; ");
 
-    test("select t.n_regionkey, count(*) as cnt from (select * from cp.`tpch/nation.parquet`) t where t.n_nationkey > 1 group by t.n_regionkey order by t.n_regionkey;" );
+    test("select t.n_regionkey, count(*) as cnt from (select * from cp.`tpch/nation.parquet`) t " +
+         "where t.n_nationkey > 1 group by t.n_regionkey order by t.n_regionkey;" );
   }
 
   @Test  // Select * in SubQuery : regular columns appear in select clause, where, group by, order by.
@@ -384,7 +460,6 @@ public class TestStarQueries extends BaseTestQuery {
          " where x.n_nationkey > 5 \n" +
          " group by x.n_regionkey \n" +
          " order by cnt limit 5; ");
-
   }
 
   @Test // DRILL-595 : Join two CTE, each having select * : regular columns appear in the select , where and on clause, group by, order by.
@@ -411,19 +486,22 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .ordered()
         .sqlQuery("select *  from cp.`tpch/nation.parquet` order by substr(n_name, 2, 5) limit 3")
-        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey from cp.`tpch/nation.parquet` order by substr(n_name, 2, 5) limit 3 ")
+        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey " +
+                          "from cp.`tpch/nation.parquet` order by substr(n_name, 2, 5) limit 3 ")
         .build().run();
 
     testBuilder()
         .ordered()
         .sqlQuery("select *, n_nationkey + 5 as myexpr from cp.`tpch/nation.parquet` limit 3")
-        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey, n_nationkey + 5 as myexpr from cp.`tpch/nation.parquet` order by n_nationkey limit 3")
+        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey, n_nationkey + 5 as myexpr " +
+                          "from cp.`tpch/nation.parquet` order by n_nationkey limit 3")
         .build().run();
 
     testBuilder()
         .ordered()
         .sqlQuery("select *  from cp.`tpch/nation.parquet` where n_nationkey + 5 > 10 limit 3")
-        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey  from cp.`tpch/nation.parquet` where n_nationkey + 5 > 10 order by n_nationkey limit 3")
+        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey  from cp.`tpch/nation.parquet` " +
+                          "where n_nationkey + 5 > 10 order by n_nationkey limit 3")
         .build().run();
   }
 
@@ -435,7 +513,7 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
     .sqlQuery("select * from dfs.`multilevel/parquet` where dir0=1994 and dir1='Q1' order by dir0 limit 1")
     .ordered()
-    .baselineColumns("dir0", "dir1", "o_clerk", "o_comment", "o_custkey", "o_orderdate", "o_orderkey",  "o_orderpriority", "o_orderstatus", "o_shippriority",  "o_totalprice")
+    .baselineColumns("dir0", "dir1", "o_clerk", "o_comment", "o_custkey", "o_orderdate", "o_orderkey", "o_orderpriority", "o_orderstatus", "o_shippriority",  "o_totalprice")
     .baselineValues("1994", "Q1", "Clerk#000000743", "y pending requests integrate", 1292, mydate, 66, "5-LOW", "F",  0, 104190.66)
     .build().run();
   }
@@ -446,14 +524,16 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .unOrdered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` where n_regionkey in (select r_regionkey from cp.`tpch/region.parquet`)")
-        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` where n_regionkey in (select r_regionkey from cp.`tpch/region.parquet`)")
+        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` " +
+                          "where n_regionkey in (select r_regionkey from cp.`tpch/region.parquet`)")
         .build().run();
 
     // multiple columns in "IN" subquery predicates.
     testBuilder()
         .unOrdered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` where (n_nationkey, n_name) in ( select n_nationkey, n_name from cp.`tpch/nation.parquet`)")
-        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` where (n_nationkey, n_name) in ( select n_nationkey, n_name from cp.`tpch/nation.parquet`)")
+        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` " +
+                          "where (n_nationkey, n_name) in ( select n_nationkey, n_name from cp.`tpch/nation.parquet`)")
         .build().run();
 
     // Multiple in subquery predicates.
@@ -558,5 +638,4 @@ public class TestStarQueries extends BaseTestQuery {
         .build()
         .run();
   }
-
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
index 58e2e80a31..97a7a7ddac 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
@@ -117,6 +117,17 @@ public class TestEmptyInputSql extends BaseTestQuery {
 
   @Test
   public void testQueryMapArrayEmptyJson() throws Exception {
+    try {
+      enableV2Reader(false);
+      doTestQueryMapArrayEmptyJson();
+      enableV2Reader(true);
+      doTestQueryMapArrayEmptyJson();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  private void doTestQueryMapArrayEmptyJson() throws Exception {
     SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("col1", TypeProtos.MinorType.INT)
         .addNullable("col2", TypeProtos.MinorType.INT)
@@ -132,6 +143,14 @@ public class TestEmptyInputSql extends BaseTestQuery {
         .run();
   }
 
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
   /**
    * Test with query against an empty file. Select clause has three expressions.
    * 1.0 + 100.0 as constant expression, is resolved to required FLOAT8/VARDECIMAL
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
index 83cc81f032..3870c9d657 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
@@ -307,19 +307,50 @@ public class TestTypeFns extends ClusterTest {
         .go();
   }
 
+  /**
+   * The V1 JSON reader appears to omit missing columns.
+   */
   @Test
-  public void testTypeOfWithFile() throws Exception {
-    // Column `x` does not actually appear in the file.
-    String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
-                "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
-                "       typeof(x) AS x_t\n" +
-                "FROM cp.`jsoninput/allTypes.json`";
-     testBuilder()
-      .sqlQuery(sql)
-      .ordered()
-      .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t", "x_t")
-      .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "NULL", "NULL")
-      .go();
+  public void testTypeOfWithFileV1() throws Exception {
+    try {
+      enableV2Reader(false);
+      // Column `x` does not actually appear in the file.
+      String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
+                  "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
+                  "       typeof(x) AS x_t\n" +
+                  "FROM cp.`jsoninput/allTypes.json`";
+       testBuilder()
+        .sqlQuery(sql)
+        .ordered()
+        .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t", "x_t")
+        .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "NULL", "NULL")
+        .go();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  /**
+   * The V2 JSON reader fills in missing columns with a nullable VARCHAR.
+   */
+  @Test
+  public void testTypeOfWithFileV2() throws Exception {
+    try {
+      enableV2Reader(true);
+      // Column `x` does not actually appear in the file.
+      String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
+                  "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
+                  "       typeof(x) AS x_t\n" +
+                  "FROM cp.`jsoninput/allTypes.json`";
+       testBuilder()
+        .sqlQuery(sql)
+        .ordered()
+        .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t",    "x_t")
+        .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "VARCHAR", "VARCHAR")
+        .go();
+    } finally {
+      resetV2Reader();
+    }
   }
 
   @Test
@@ -345,4 +376,12 @@ public class TestTypeFns extends ClusterTest {
       client.resetSession(ExecConstants.ENABLE_UNION_TYPE_KEY);
     }
   }
+
+  private void enableV2Reader(boolean enable) throws Exception {
+    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
index 63cf2b2bec..16c42ad0d2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
@@ -27,20 +27,19 @@ import static org.junit.Assert.fail;
 
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.config.Limit;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
 import org.apache.drill.exec.proto.UserBitShared.NamePart;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
-import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch.IterOutcome;
 import org.apache.drill.exec.record.TypedFieldId;
@@ -51,6 +50,7 @@ import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.VarCharVector;
 import org.apache.drill.test.SubOperatorTest;
+import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
@@ -60,8 +60,7 @@ import org.slf4j.LoggerFactory;
  * Test the implementation of the Drill Volcano iterator protocol that
  * wraps the modular operator implementation.
  */
-
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestOperatorRecordBatch extends SubOperatorTest {
   private static final Logger logger = LoggerFactory.getLogger(TestOperatorRecordBatch.class);
 
@@ -70,7 +69,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * and provides a light-weight vector container. Returns a
    * defined number of (batches) with an optional schema change.
    */
-
   private class MockOperatorExec implements OperatorExec {
 
     public boolean bindCalled;
@@ -117,11 +115,11 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
         return false;
       }
       if (nextCount == schemaChangeAt) {
-        BatchSchemaBuilder newSchema = new BatchSchemaBuilder(batchAccessor.schema());
-        newSchema.schemaBuilder()
-            .add("b", MinorType.VARCHAR);
-        VectorContainer newContainer = new VectorContainer(fixture.allocator(), newSchema.build());
-        batchAccessor.addBatch(newContainer);
+        VectorContainer container =  batchAccessor.container();
+        container.addOrGet(
+            MaterializedField.create("b", Types.required(MinorType.VARCHAR)));
+        container.buildSchema(SelectionVectorMode.NONE);
+        batchAccessor.addBatch(container);
       }
       return true;
     }
@@ -137,11 +135,11 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   }
 
   private static VectorContainer mockBatch() {
-    SchemaBuilder schemaBuilder = new SchemaBuilder()
-      .add("a", MinorType.INT);
-    VectorContainer container = new VectorContainer(fixture.allocator(), new BatchSchemaBuilder()
-        .withSchemaBuilder(schemaBuilder)
-        .build());
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .build();
+    VectorContainer container = new VectorContainer(fixture.allocator());
+    container.addOrGet(schema.column(0));
     container.buildSchema(SelectionVectorMode.NONE);
     return container;
   }
@@ -155,7 +153,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Simulate a normal run: return some batches, encounter a schema change.
    */
-
   @Test
   public void testNormalLifeCycle() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -167,24 +164,20 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       assertNotNull(opBatch.getContext());
 
       // First call to next() builds schema
-
       assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
       assertTrue(opExec.bindCalled);
       assertTrue(opExec.buildSchemaCalled);
       assertEquals(0, opExec.nextCount);
 
       // Second call returns the first batch
-
       assertEquals(IterOutcome.OK, opBatch.next());
       assertEquals(1, opExec.nextCount);
 
       // Third call causes a schema change
-
       assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
       assertEquals(2, opExec.nextCount);
 
       // Fourth call reaches EOF
-
       assertEquals(IterOutcome.NONE, opBatch.next());
       assertEquals(3, opExec.nextCount);
 
@@ -201,7 +194,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate a truncated life cycle: next() is never called. Not a valid part
    * of the protocol; but should be ready anyway.
    */
-
   @Test
   public void testTruncatedLifeCycle() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -218,7 +210,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Simulate reaching EOF when trying to create the schema.
    */
-
   @Test
   public void testSchemaEOF() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -237,7 +228,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate reaching EOF on the first batch. This simulated data source
    * discovered a schema, but had no data.
    */
-
   @Test
   public void testFirstBatchEOF() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -257,7 +247,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Simulate the caller failing the operator before getting the schema.
    */
-
   @Test
   public void testFailEarly() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -298,7 +287,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate the caller failing the operator after EOF but before close.
    * This is a silly time to fail, but have to handle it anyway.
    */
-
   @Test
   public void testFailBeforeClose() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -312,7 +300,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       opBatch.cancel();
 
       // Already hit EOF, so fail won't be passed along.
-
       assertFalse(opExec.cancelCalled);
     } catch (Exception e) {
       fail();
@@ -324,7 +311,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate the caller failing the operator after close.
    * This is violates the operator protocol, but have to handle it anyway.
    */
-
   @Test
   public void testFailAfterClose() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -351,14 +337,11 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * container operations. Probably an artifact of its history. In any event, make
    * sure those methods are passed through to the container accessor.
    */
-
   @Test
   public void testBatchAccessor() {
-    SchemaBuilder schemaBuilder = new SchemaBuilder()
-      .add("a", MinorType.INT)
-      .add("b", MinorType.VARCHAR);
-    BatchSchema schema = new BatchSchemaBuilder()
-        .withSchemaBuilder(schemaBuilder)
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .add("b", MinorType.VARCHAR)
         .build();
     SingleRowSet rs = fixture.rowSetBuilder(schema)
         .addRow(10, "fred")
@@ -369,7 +352,7 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
     try (OperatorRecordBatch opBatch = makeOpBatch(opExec)) {
       assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
-      assertEquals(schema, opBatch.getSchema());
+      RowSetUtilities.assertSchemasEqual(schema, opBatch.getSchema());
       assertEquals(2, opBatch.getRecordCount());
       assertSame(rs.container(), opBatch.getOutgoingContainer());
 
@@ -379,7 +362,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
       // Not a full test of the schema path; just make sure that the
       // pass-through to the Vector Container works.
-
       SchemaPath path = SchemaPath.create(NamePart.newBuilder().setName("a").build());
       TypedFieldId id = opBatch.getValueVectorId(path);
       assertEquals(MinorType.INT, id.getFinalType().getMinorType());
@@ -393,7 +375,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       assertEquals(1, id.getFieldIds()[0]);
 
       // Sanity check of getValueAccessorById()
-
       VectorWrapper<?> w = opBatch.getValueAccessorById(IntVector.class, 0);
       assertNotNull(w);
       assertEquals("a", w.getValueVector().getField().getName());
@@ -404,7 +385,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       // getWritableBatch() ?
 
       // No selection vectors
-
       try {
         opBatch.getSelectionVector2();
         fail();
@@ -439,7 +419,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     int schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // Be tidy: start at 1.
-
     assertEquals(1, schemaVersion);
 
     // Changing data does not trigger schema change
@@ -449,7 +428,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     assertEquals(schemaVersion, opExec.batchAccessor().schemaVersion());
 
     // Different container, same vectors, does not trigger a change
-
     VectorContainer c2 = new VectorContainer(fixture.allocator());
     for (VectorWrapper<?> vw : container) {
       c2.add(vw.getValueVector());
@@ -463,7 +441,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
     // Replacing a vector with another of the same type does trigger
     // a change.
-
     VectorContainer c3 = new VectorContainer(fixture.allocator());
     c3.add(container.getValueVector(0).getValueVector());
     c3.add(TypeHelper.getNewVector(
@@ -475,12 +452,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // No change if same schema again
-
     opExec.batchAccessor.addBatch(c3);
     assertEquals(schemaVersion, opExec.batchAccessor().schemaVersion());
 
     // Adding a vector triggers a change
-
     MaterializedField c = SchemaBuilder.columnSchema("c", MinorType.INT, DataMode.OPTIONAL);
     c3.add(TypeHelper.getNewVector(c, fixture.allocator(), null));
     c3.buildSchema(SelectionVectorMode.NONE);
@@ -489,12 +464,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // No change if same schema again
-
     opExec.batchAccessor.addBatch(c3);
     assertEquals(schemaVersion, opExec.batchAccessor().schemaVersion());
 
     // Removing a vector triggers a change
-
     c3.remove(c3.getValueVector(2).getValueVector());
     c3.buildSchema(SelectionVectorMode.NONE);
     assertEquals(2, c3.getNumberOfColumns());
@@ -503,7 +476,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // Clean up
-
     opExec.close();
     c2.clear();
     c3.clear();
@@ -512,7 +484,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Test that an SV2 is properly handled by the proper container accessor.
    */
-
   @Test
   public void testSv2() {
     TupleMetadata schema = new SchemaBuilder()
@@ -542,7 +513,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     assertTrue(opExec.closeCalled);
 
     // Must release SV2
-
     rs.clear();
   }
 
@@ -559,7 +529,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Failure on the bind method.
    */
-
   @Test
   public void testWrappedExceptionOnBind() {
     MockOperatorExec opExec = new MockOperatorExec() {
@@ -654,7 +623,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Failure on the second or subsequent calls to next(), when actually
    * fetching a record batch.
    */
-
   @Test
   public void testWrappedExceptionOnNext() {
     MockOperatorExec opExec = new MockOperatorExec() {
@@ -704,7 +672,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Failure when closing the operator implementation.
    */
-
   @Test
   public void testWrappedExceptionOnClose() {
     MockOperatorExec opExec = new MockOperatorExec() {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java
index 6009ed3c51..9346aaea90 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.MockScanBuilder;
@@ -50,7 +50,7 @@ import org.junit.experimental.categories.Category;
  * Test the "columns" array mechanism integrated with the scan schema
  * orchestrator including simulating reading data.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnsArray extends SubOperatorTest {
 
   private static class MockScanner {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java
index 35cc9d1e41..90ed824b57 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java
@@ -21,7 +21,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -55,7 +55,7 @@ import static org.junit.Assert.assertTrue;
 /**
  * Test the columns-array specific behavior in the columns scan framework.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnsArrayFramework extends SubOperatorTest {
 
   private static final Path MOCK_FILE_PATH = new Path("file:/w/x/y/z.csv");
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java
index 809c8ea363..835b0185fd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.physical.impl.scan.columns.ColumnsArrayParser;
@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnsArrayParser extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
index 2efbe2af30..9ffda169b0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
@@ -28,7 +28,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -64,7 +64,7 @@ import org.junit.experimental.categories.Category;
  * Focuses on the file metadata itself, assumes that other tests have
  * verified the underlying mechanisms.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestFileScanFramework extends SubOperatorTest {
 
   private static final String MOCK_FILE_NAME = "foo.csv";
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java
index b4971ccf90..cb8c8887d4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.physical.impl.scan.file.FileMetadataColumn;
 import org.apache.drill.exec.physical.impl.scan.file.ImplicitColumnManager;
@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestImplicitColumnParser extends SubOperatorTest {
 
   private ImplicitColumnOptions standardOptions(Path filePath) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java
index e511e8e0c5..62bfd96498 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.file.FileMetadata;
@@ -52,7 +52,7 @@ import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestImplicitColumnProjection extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
index fd53d6fe8c..95c0ddbcbd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.scan;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.base.AbstractSubScan;
@@ -44,7 +44,7 @@ import io.netty.buffer.DrillBuf;
  * set follows the same semantics as the original set.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanBatchWriters extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java
index 938f3848e2..a67a79c3f9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
  * Tests the basics of the scan operator protocol: error conditions,
  * etc.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecBasics extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java
index b50394dfa0..ff21a58172 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -35,7 +35,7 @@ import org.junit.experimental.categories.Category;
  * Test "early schema" readers: those that can declare a schema at
  * open time.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecEarlySchema extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java
index 500b343c1c..918a457288 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -40,7 +40,7 @@ import org.junit.experimental.categories.Category;
  * Test "late schema" readers: those like JSON that discover their schema
  * as they read data.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecLateSchema extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java
index 87ea958492..68cd0044a8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.impl.scan;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
  * defines the schema to be output from the scan operator, and forces
  * conversions between reader and output data types.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecOuputSchema extends BaseScanOperatorExecTest {
 
   private static class MockSimpleReader implements ManagedReader<SchemaNegotiator> {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java
index 21db742b11..b11b3c1ed6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
@@ -37,7 +37,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test vector overflow in the context of the scan operator.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecOverflow extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
index 86899da507..0a9aa1fc19 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
@@ -40,7 +40,7 @@ import org.junit.experimental.categories.Category;
  * context of a single scan operator: it cannot help when a query has
  * multiple scans, each in its own fragment.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecSmoothing extends BaseScanOperatorExecTest {
 
   private static class MockEarlySchemaReader2 extends MockEarlySchemaReader {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
index f7075fffac..7bc00722cc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -53,7 +53,7 @@ import org.junit.experimental.categories.Category;
  * The tests here focus on the scan orchestrator itself; the tests assume
  * that tests for lower-level components have already passed.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java
index 36fa31a058..7c7573ca9d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java
@@ -25,7 +25,7 @@ import java.io.IOException;
 import java.nio.file.Paths;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -59,7 +59,7 @@ import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
  * with implicit file columns provided by the file metadata manager.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOrchestratorImplicitColumns extends SubOperatorTest {
 
   private ImplicitColumnOptions standardOptions(Path filePath) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
index a57f86f9b6..b703f77310 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.impl.scan;
 
 import static org.junit.Assert.assertFalse;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.MockScanBuilder;
@@ -47,7 +47,7 @@ import org.junit.experimental.categories.Category;
  * that tests for lower-level components have already passed.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOrchestratorLateSchema extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java
index 969d81ef0c..88fdc41f86 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.impl.scan.convert;
 
 import static org.apache.drill.test.rowSet.RowSetUtilities.intArray;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetTestUtils;
@@ -43,7 +43,7 @@ import org.junit.experimental.categories.Category;
  * Not really much to test, more a verification that the pattern works
  * in practice.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnConverter extends SubOperatorTest {
 
   private static class MockSource {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
index 9d91cd7851..3be0b4bf49 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.impl.scan.project;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -48,7 +48,7 @@ import org.junit.experimental.categories.Category;
  * values. The ConstantColumnLoader builds and populates these columns.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestConstantColumnLoader extends SubOperatorTest {
 
   private static class DummyColumn implements ConstantColumnSpec {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
index 882e2169f8..b7d01c895e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
@@ -25,7 +25,7 @@ import java.util.List;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder;
 import org.apache.drill.exec.physical.resultSet.ResultVectorCache;
@@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category;
  * can create the classic nullable Int null column, or one of
  * any other type and mode.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestNullColumnLoader extends SubOperatorTest {
 
   private ResolvedNullColumn makeNullCol(String name, MajorType nullType) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
index db183dee5e..e0cccb0b71 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * combines these to map out the actual projection.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestReaderLevelProjection extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
index 1bf35b3a7a..e3d5a70b49 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
@@ -41,7 +41,7 @@ import io.netty.buffer.DrillBuf;
 import static org.apache.drill.test.rowSet.RowSetUtilities.mapValue;
 import static org.apache.drill.test.rowSet.RowSetUtilities.singleMap;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 
 import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
 
@@ -55,7 +55,7 @@ import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
  * vector.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRowBatchMerger extends SubOperatorTest {
 
   public static class RowSetSource implements VectorSource {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java
index 842f18e0c9..7b251ceef9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.protocol.SchemaTracker;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils;
@@ -87,7 +87,7 @@ import org.junit.experimental.categories.Category;
  * because such an algorithm would require time-travel: looking into
  * the future to know what data will be scanned.)
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSchemaSmoothing extends SubOperatorTest {
 
   private ImplicitColumnOptions standardOptions(List<Path> files) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
index a30bda94a1..87e3153a6b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
@@ -26,7 +26,7 @@ import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -41,7 +41,7 @@ import org.apache.drill.test.SubOperatorTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestBatchValidator extends SubOperatorTest {
 
   public static class CapturingReporter implements BatchValidator.ErrorReporter {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java
index 9f3e71f1fb..5cf72660b0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.Arrays;
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.validate.BatchValidator;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test dict array support in the result set loader.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderDictArray extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java
index 3fb523116d..ccf2b07faf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.validate.BatchValidator;
@@ -53,7 +53,7 @@ import java.util.Arrays;
 /**
  * Test (non-array) dict support in the result set loader and related classes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderDicts extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java
index ef18b70628..86a25f2cbc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -41,7 +41,7 @@ import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderEmptyProject extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java
index 49ff638bc3..16b69f2de0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * in fact, depend on the row count) and vector overflow (which an occur when
  * the row limit turns out to be too large.)
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderLimits extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java
index 5e607f6c4b..41b82f8b58 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java
@@ -28,7 +28,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.Arrays;
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -60,7 +60,7 @@ import org.junit.experimental.categories.Category;
  * tests work. Maps, and especially repeated maps, are very complex
  * constructs not to be tackled lightly.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderMapArray extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java
index e869c7c9f6..a3628ef70d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java
@@ -28,7 +28,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -56,7 +56,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test (non-array) map support in the result set loader and related classes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderMaps extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java
index 6d7f09252f..e0add3b645 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java
@@ -23,8 +23,9 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.physical.impl.validate.BatchValidator;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
 import org.apache.drill.exec.physical.resultSet.RowSetLoader;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
@@ -40,7 +41,7 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
 
   /**
@@ -137,7 +138,6 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
     // Harvest the row and verify.
 
     RowSet actual = fixture.wrap(rsLoader.harvest());
-//    actual.print();
 
     TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
@@ -210,6 +210,7 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
     // Harvest and verify
 
     RowSet result = fixture.wrap(rsLoader.harvest());
+    BatchValidator.validate(result);
     assertEquals(rowNumber - 1, result.rowCount());
     RowSetReader reader = result.reader();
     int rowIndex = 0;
@@ -248,9 +249,9 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
     // Verify that holes were preserved.
 
     result = fixture.wrap(rsLoader.harvest());
+    BatchValidator.validate(result);
     assertEquals(rowNumber, rsLoader.totalRowCount());
     assertEquals(rowNumber - startRowNumber + 1, result.rowCount());
-//    result.print();
     reader = result.reader();
     rowIndex = 0;
     while (reader.next()) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java
index a7eea4d418..143ef0c254 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category;
  * Exercise the vector overflow functionality for the result set loader.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderOverflow extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
index c68bdae69b..14d5bc6e16 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
@@ -35,7 +35,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -52,6 +52,9 @@ import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.exec.vector.accessor.TupleWriter.UndefinedColumnException;
 import org.apache.drill.test.SubOperatorTest;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
+import org.apache.drill.exec.physical.rowSet.RowSetReader;
 import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -77,7 +80,7 @@ import org.junit.experimental.categories.Category;
  * the structure. The object tree will show all the components and their
  * current state.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderProtocol extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java
index b4c1fea76a..59696cf5e1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.resultSet.impl;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -64,7 +64,7 @@ import static org.junit.Assert.assertTrue;
  * actually, since the different "slices" need not have the same length...)
  * Repeated lists appear to be used only by JSON.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderRepeatedList extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java
index 8e90e1dbb6..d5c404dc69 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
 import org.apache.drill.exec.physical.resultSet.RowSetLoader;
@@ -65,7 +65,7 @@ import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
  * things in a single query.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderTorture extends SubOperatorTest {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestResultSetLoaderTorture.class);
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java
index 7479f15d46..d63a3f8c9a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java
@@ -33,7 +33,7 @@ import java.util.Arrays;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
 import org.apache.drill.exec.physical.resultSet.RowSetLoader;
@@ -76,7 +76,7 @@ import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
  * Most operators do not support them. But, JSON uses them, so they must
  * be made to work in the result set loader layer.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderUnions extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java
index 250303ab68..4f53227dae 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -43,7 +43,7 @@ import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetSchemaChange extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java
index a44d0bef9e..ec58fc4d8a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNotSame;
 import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -35,7 +35,7 @@ import org.apache.drill.test.SubOperatorTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultVectorCache extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java
index 10fc958e53..8484e29137 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * parsing; the only bits not tested here is that which is
  * inherently specific to some use case.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestTupleProjection extends BaseTest {
 
   private static final ColumnMetadata NORMAL_COLUMN =
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java
index ca2a62116f..ac87ae3171 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertFalse;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
@@ -40,7 +40,7 @@ import org.apache.drill.test.SubOperatorTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestDummyWriter extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java
index b918b3cd22..03485434ed 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * Note that this test also has the handy side-effect of testing
  * null handling in the accessor classes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestFillEmpties extends SubOperatorTest {
 
   public static final int ROW_COUNT = 1000;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java
index c74d526fad..76e7a30854 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.MaterializedField;
@@ -42,7 +42,7 @@ import org.junit.experimental.categories.Category;
  * overflow, and filling in empty values.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestFixedWidthWriter extends SubOperatorTest {
 
   public static class TestIndex implements ColumnWriterIndex {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java
index b765b55aef..5a1e6cdc03 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java
@@ -25,7 +25,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet.HyperRowSet;
@@ -48,7 +48,7 @@ import org.junit.experimental.categories.Category;
  * <p>
  * This test does not cover repeated vectors; those tests should be added.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHyperVectorReaders extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java
index 887a05b0b9..e4fbf9996a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.rowSet;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -44,7 +44,7 @@ import org.junit.experimental.categories.Category;
  * so if the index works for one reader, it will for for all.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestIndirectReaders extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java
index 4057f3365a..e8d40d7359 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
 import org.apache.drill.exec.record.MaterializedField;
@@ -58,7 +58,7 @@ import org.junit.experimental.categories.Category;
  * schema, which makes this mechanism far simpler.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestMapAccessors extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java
index 8fce484661..a4884a1455 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.TestFixedWidthWriter.TestIndex;
@@ -48,7 +48,7 @@ import org.junit.experimental.categories.Category;
  * counts.)
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestOffsetVectorWriter extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java
index 9fbaa1444e..9a17276e96 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -63,7 +63,7 @@ import org.junit.experimental.categories.Category;
  * on to the result set loader tests.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRepeatedListAccessors extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java
index 987bd02c85..1ddfe3cb59 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java
@@ -32,7 +32,7 @@ import java.math.BigDecimal;
 import java.util.Arrays;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -80,7 +80,7 @@ import org.junit.experimental.categories.Category;
  * A list is an array of variants. Variants are tested elsewhere.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRowSet extends SubOperatorTest {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestRowSet.class);
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java
index c1e803bf9c..6531e95a97 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java
@@ -33,7 +33,7 @@ import java.time.LocalTime;
 import java.time.ZoneOffset;
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -77,7 +77,7 @@ import org.junit.experimental.categories.Category;
 // TODO: Var16Char
 // TODO: Bit
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScalarAccessors extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java
index e754db5889..0e104b161c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -46,7 +46,7 @@ import org.junit.experimental.categories.Category;
  * lists and repeated lists. This test verifies that it assembles the various
  * pieces correctly for the various nesting combinations.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSchemaBuilder extends DrillTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java
index 69190d219c..b66376a12a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.TestFixedWidthWriter.TestIndex;
@@ -38,7 +38,7 @@ import org.bouncycastle.util.Arrays;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestVariableWidthWriter extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java
index a0dd8ad066..2e7e59824e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
@@ -67,7 +67,7 @@ import org.junit.experimental.categories.Category;
  * and other operators. Some assembly required for future use.)
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestVariantAccessors extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
index a0292d6961..2d7a4f6e44 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
@@ -152,25 +152,28 @@ public class TestAnalyze extends ClusterTest {
 
   @Test
   public void testAnalyzeSupportedFormats() throws Exception {
-    //Only allow computing statistics on PARQUET files.
+    // Only allow computing statistics on PARQUET files.
     try {
       client.alterSession(ExecConstants.SLICE_TARGET, 1);
       client.alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "json");
       run("CREATE TABLE dfs.tmp.employee_basic4 AS SELECT * from cp.`employee.json`");
-      //Should display not supported
+      // Should display not supported
       verifyAnalyzeOutput("ANALYZE TABLE dfs.tmp.employee_basic4 COMPUTE STATISTICS",
           "Table employee_basic4 is not supported by ANALYZE. "
           + "Support is currently limited to directory-based Parquet tables.");
 
+      // See DRILL-7522
+      client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, false);
       run("DROP TABLE dfs.tmp.employee_basic4");
       client.alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "parquet");
       run("CREATE TABLE dfs.tmp.employee_basic4 AS SELECT * from cp.`employee.json`");
-      //Should complete successfully (16 columns in employee.json)
+      // Should complete successfully (16 columns in employee.json)
       verifyAnalyzeOutput("ANALYZE TABLE dfs.tmp.employee_basic4 COMPUTE STATISTICS",
           "16");
     } finally {
       client.resetSession(ExecConstants.SLICE_TARGET);
       client.resetSession(ExecConstants.OUTPUT_FORMAT_OPTION);
+      client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/BaseTestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/BaseTestJsonReader.java
new file mode 100644
index 0000000000..d7be5e8aeb
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/BaseTestJsonReader.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import static org.junit.Assert.fail;
+
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.rpc.RpcException;
+import org.apache.drill.test.ClusterTest;
+
+public class BaseTestJsonReader extends ClusterTest {
+
+  protected void enableV2Reader(boolean enable) throws Exception {
+    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  protected void resetV2Reader() throws Exception {
+    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
+  protected interface TestWrapper {
+    void apply() throws Exception;
+  }
+
+  protected void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  protected RowSet runTest(String sql) {
+    try {
+      return client.queryBuilder().sql(sql).rowSet();
+    } catch (RpcException e) {
+      fail(e.getMessage());
+      throw new IllegalStateException(e);
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
index 1af8e1d413..19b0b7be72 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.json;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestJsonModes extends ClusterTest {
 
   @BeforeClass
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
new file mode 100644
index 0000000000..5b2fb24741
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import java.nio.file.Paths;
+
+import org.apache.drill.categories.RowSetTest;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.exec.vector.complex.writer.TestJsonReader;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.rowSet.RowSetComparison;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Tests of Drill selected Drill functions using JSON as an input source.
+ * (Split from the original <tt>TestJsonReader</tt>.) Relative to the Drill 1.12
+ * version, the tests here:
+ * <ul>
+ * <li>Are rewritten to use the {@link ClusterFixture} framework.</li>
+ * <li>Add data verification where missing.</li>
+ * <li>Clean up handling of session options.</li>
+ * </ul>
+ * When running tests, consider these to be secondary. First verify the core
+ * JSON reader itself (using {@link TestJsonReader}), then run these tests to
+ * ensure vectors populated by JSON work with downstream functions.
+ */
+@Category(RowSetTest.class)
+public class TestJsonReaderFns extends BaseTestJsonReader {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+    dirTestWatcher.copyResourceToRoot(Paths.get("store", "json"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
+  }
+
+  @Test
+  public void testEmptyList() throws Exception {
+    runBoth(() -> doTestEmptyList());
+  }
+
+  private void doTestEmptyList() throws Exception {
+    final String sql = "select count(a[0]) as ct from dfs.`store/json/emptyLists`";
+
+    final RowSet results = runTest(sql);
+    final TupleMetadata schema = new SchemaBuilder()
+        .add("ct", MinorType.BIGINT)
+        .build();
+
+    final RowSet expected = client.rowSetBuilder(schema)
+        .addRow(6L)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  // Expansion of former testRepeatedCount()
+
+  @Test
+  public void testRepeatedCountStr() throws Exception {
+    runBoth(() -> doTestRepeatedCountStr());
+  }
+
+  private void doTestRepeatedCountStr() throws Exception {
+    final RowSet results = runTest("select repeated_count(str_list) from cp.`store/json/json_basic_repeated_varchar.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(5)
+        .addSingleCol(1)
+        .addSingleCol(3)
+        .addSingleCol(1)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountInt() throws Exception {
+    runBoth(() -> doTestRepeatedCountInt());
+  }
+
+  private void doTestRepeatedCountInt() throws Exception {
+    final RowSet results = runTest("select repeated_count(INT_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(12)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountFloat4() throws Exception {
+    runBoth(() -> doTestRepeatedCountFloat4());
+  }
+
+  private void doTestRepeatedCountFloat4() throws Exception {
+    final RowSet results = runTest("select repeated_count(FLOAT4_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(7)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountVarchar() throws Exception {
+    runBoth(() -> doTestRepeatedCountVarchar());
+  }
+
+  private void doTestRepeatedCountVarchar() throws Exception {
+    final RowSet results = runTest("select repeated_count(VARCHAR_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(4)
+        .addSingleCol(3)
+        .addSingleCol(3)
+        .addSingleCol(3)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountBit() throws Exception {
+    runBoth(() -> doTestRepeatedCountBit());
+  }
+
+  private void doTestRepeatedCountBit() throws Exception {
+    final RowSet results = runTest("select repeated_count(BIT_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(7)
+        .addSingleCol(7)
+        .addSingleCol(5)
+        .addSingleCol(3)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  private TupleMetadata countSchema() {
+    final TupleMetadata expectedSchema = new SchemaBuilder()
+        .add("EXPR$0", MinorType.INT)
+        .build();
+    return expectedSchema;
+  }
+
+
+  // Reimplementation of testRepeatedContains()
+
+  @Test
+  public void testRepeatedContainsStr() throws Exception {
+    runBoth(() -> doTestRepeatedContainsStr());
+  }
+
+  private void doTestRepeatedContainsStr() throws Exception {
+    final RowSet results = runTest("select repeated_contains(str_list, 'asdf') from cp.`store/json/json_basic_repeated_varchar.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(2) // WRONG! Should be 1 (true). See DRILL-6034
+        .addSingleCol(0)
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsInt() throws Exception {
+    runBoth(() -> doTestRepeatedContainsInt());
+  }
+
+  private void doTestRepeatedContainsInt() throws Exception {
+    final RowSet results = runTest("select repeated_contains(INT_col, -2147483648) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsFloat4() throws Exception {
+    runBoth(() -> doTestRepeatedContainsFloat4());
+  }
+
+  private void doTestRepeatedContainsFloat4() throws Exception {
+    final RowSet results = runTest("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsVarchar() throws Exception {
+    runBoth(() -> doTestRepeatedContainsVarchar());
+  }
+
+  private void doTestRepeatedContainsVarchar() throws Exception {
+    final RowSet results = runTest("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsBitTrue() throws Exception {
+    runBoth(() -> doTestRepeatedContainsBitTrue());
+  }
+
+  private void doTestRepeatedContainsBitTrue() throws Exception {
+    final RowSet results = runTest("select repeated_contains(BIT_col, true) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(11) // WRONG! Should be 1 (true). See DRILL-6034
+        .addSingleCol(2)
+        .addSingleCol(0)
+        .addSingleCol(3)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsBitFalse() throws Exception {
+    runBoth(() -> doTestRepeatedContainsBitFalse());
+  }
+
+  private void doTestRepeatedContainsBitFalse() throws Exception {
+    final RowSet results = runTest("select repeated_contains(BIT_col, false) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(5) // WRONG! Should be 1 (true). See DRILL-6034
+        .addSingleCol(5)
+        .addSingleCol(5)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  private TupleMetadata bitCountSchema() {
+    return new SchemaBuilder()
+        .add("EXPR$0", MinorType.BIT)
+        .buildSchema();
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
new file mode 100644
index 0000000000..3fcad38e00
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
@@ -0,0 +1,634 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import static org.apache.drill.test.TestBuilder.listOf;
+import static org.apache.drill.test.TestBuilder.mapOf;
+import static org.apache.drill.test.rowSet.RowSetUtilities.longArray;
+import static org.apache.drill.test.rowSet.RowSetUtilities.mapValue;
+import static org.apache.drill.test.rowSet.RowSetUtilities.singleMap;
+import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.nio.file.Paths;
+import java.util.zip.GZIPOutputStream;
+
+import org.apache.drill.categories.RowSetTest;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.util.DrillFileUtils;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.physical.rowSet.DirectRowSet;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
+import org.apache.drill.shaded.guava.com.google.common.io.Files;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.QueryBuilder.QuerySummary;
+import org.apache.drill.test.QueryResultSet;
+import org.apache.drill.test.rowSet.RowSetUtilities;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Reimplementation of selected tests from the
+ * TestJsonReader test suite.
+ */
+
+@Category(RowSetTest.class)
+public class TestJsonReaderQueries extends BaseTestJsonReader {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+    dirTestWatcher.copyResourceToRoot(Paths.get("store", "json"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("jsoninput/drill_3353"));
+  }
+
+  /**
+   * Reimplementation of a Drill 1.12 unit test to actually verify results.
+   * Doing so is non-trivial as inline comments explain. This test shows the
+   * limits "schema-free" processing when the schema changes.
+   * @throws Exception
+   */
+
+  @Test
+  @Ignore("Too fragile to keep working")
+  public void schemaChange() throws Exception {
+    String sql = "select b from dfs.`vector/complex/writer/schemaChange/`";
+//    runAndPrint(sql);
+    QueryResultSet results = client.queryBuilder().sql(sql).resultSet();
+
+    // Query will scan two files:
+    // f1:
+    // {"a": "foo","b": null}
+    // {"a": "bar","b": null}
+    // f2:
+    // {"a": "foo2","b": null}
+    // {"a": "bar2","b": {"x":1, "y":2}}
+
+    // When f1 is read, we didn't know the type of b, so it will default to Varchar
+    // (Assuming text mode for that column.)
+    //
+    // On reading f2, we discover that b is a map (which we discover the
+    // second record.)
+    //
+    // The scanner handles schema persistence, but not (at present) for maps.
+    // If we did have schema persistence, then if f2 was first, we'd remember
+    // the map schema when we read f1.
+    //
+    // This crazy behavior is the best we can do without a schema. Bottom line:
+    // Drill needs a user-provided schema to make sense of these cases because
+    // "Drill can't predict the future" (TM).
+    //
+    // See TestCSV* for a way to implement this test case
+
+    TupleMetadata f2Schema = new SchemaBuilder()
+        .addMap("b")
+          .addNullable("x", MinorType.BIGINT)
+          .addNullable("y", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+    RowSet f2Expected = client.rowSetBuilder(f2Schema)
+        .addSingleCol(mapValue(null, null))
+        .addSingleCol(mapValue(1L, 2L))
+        .build();
+
+    TupleMetadata f1Schema = new SchemaBuilder()
+        .addNullable("b", MinorType.VARCHAR)
+        .build();
+    RowSet f1Expected = client.rowSetBuilder(f1Schema)
+        .addSingleCol(null)
+        .addSingleCol(null)
+        .build();
+
+    // First batch is empty; presents only schema. But,
+    // since file order is non-deterministic, we don't know
+    // which one.
+
+    RowSet batch = results.next();
+    assertNotNull(batch);
+    assertEquals(0, batch.rowCount());
+    boolean mapFirst;
+    if (batch.schema().metadata("b").type() == MinorType.MAP) {
+      RowSet expected = client.rowSetBuilder(f2Schema)
+          .build();
+      RowSetUtilities.verify(expected, batch);
+      mapFirst = true;
+    } else {
+      RowSet expected = client.rowSetBuilder(f1Schema)
+          .build();
+      RowSetUtilities.verify(expected, batch);
+      mapFirst = false;
+    }
+    for (int i = 0; i < 2; i++) {
+      batch = results.next();
+      assertNotNull(batch);
+      if (i == 0 && mapFirst || i == 1 && ! mapFirst) {
+        RowSetUtilities.verify(f2Expected, batch);
+      } else {
+        RowSetUtilities.verify(f1Expected, batch);
+      }
+    }
+    assertNull(results.next());
+    results.close();
+  }
+
+  /**
+   * Reimplementation of the Drill 1.12 test. Tests the odd case in which
+   * we project both a single column from inside a map, as well as the
+   * entire map.
+   *
+   *
+   * As it turns out, the original functionality
+   * was broken, that the test had incorrect expected results that reflected the broken
+   * functionality.
+   * <p>
+   * The query selects two fields which are deeply nested:
+   * <ul>
+   * <li><tt>t.field_4.inner_3</tt> where <tt>field_4</tt> is a map and
+   * <tt>inner_3</tt> is another map.</li>
+   * <li><tt>t.field_4</tt> is a map with three total items.</li>
+   * </ul>
+   * The original expected results
+   * @throws Exception
+   */
+
+  @Test
+  @Ignore("broken")
+  public void testFieldSelectionBug() throws Exception {
+    runBoth(() -> doTestFieldSelectionBug());
+  }
+
+  private void doTestFieldSelectionBug() throws Exception {
+    String sql = "select t.field_4.inner_3 as col_1, t.field_4 as col_2 from cp.`store/json/schema_change_int_to_string.json` t";
+    try {
+      client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+
+      testBuilder()
+          .sqlQuery(sql)
+          .unOrdered()
+          .baselineColumns("col_1", "col_2")
+          .baselineValues(
+              mapOf(),
+              mapOf(
+                  "inner_1", listOf(),
+                  "inner_3", mapOf()))
+          .baselineValues(
+              mapOf("inner_object_field_1", "2"),
+              mapOf(
+                  "inner_1", listOf("1", "2", "3"),
+                  "inner_2", "3",
+                  "inner_3", mapOf("inner_object_field_1", "2")))
+          .baselineValues(
+              mapOf(),
+              mapOf(
+                  "inner_1", listOf("4", "5", "6"),
+                  "inner_2", "3",
+                  "inner_3", mapOf()))
+          .go();
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  @Test
+  public void testReadCompressed() throws Exception {
+    runBoth(() -> doTestReadCompressed());
+  }
+
+  private void doTestReadCompressed() throws Exception {
+    String filepath = "compressed_json.json";
+    File f = new File(dirTestWatcher.getRootDir(), filepath);
+    PrintWriter out = new PrintWriter(f);
+    out.println("{\"a\" :5}");
+    out.close();
+
+    gzipIt(f);
+    testBuilder()
+        .sqlQuery("select * from dfs.`%s.gz`", filepath)
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues(5l)
+        .build().run();
+
+    // test reading the uncompressed version as well
+    testBuilder()
+        .sqlQuery("select * from dfs.`%s`", filepath)
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues(5l)
+        .build().run();
+  }
+
+  public static void gzipIt(File sourceFile) throws IOException {
+
+    // modified from: http://www.mkyong.com/java/how-to-compress-a-file-in-gzip-format/
+    byte[] buffer = new byte[1024];
+    GZIPOutputStream gzos =
+        new GZIPOutputStream(new FileOutputStream(sourceFile.getPath() + ".gz"));
+
+    FileInputStream in =
+        new FileInputStream(sourceFile);
+
+    int len;
+    while ((len = in.read(buffer)) > 0) {
+      gzos.write(buffer, 0, len);
+    }
+    in.close();
+    gzos.finish();
+    gzos.close();
+  }
+
+  @Test
+  public void testDrill_1419() throws Exception {
+    runBoth(() -> doTestDrill_1419());
+  }
+
+  private void doTestDrill_1419() throws Exception {
+    String sql = "select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`store/json/clicks.json` t limit 5";
+    RowSet results = client.queryBuilder().sql(sql).rowSet();
+    TupleMetadata schema = new SchemaBuilder()
+        .addNullable("trans_id", MinorType.BIGINT)
+        .addNullable("EXPR$1", MinorType.BIGINT)
+        .addNullable("EXPR$2", MinorType.BIGINT)
+        .build();
+
+    RowSet expected = client.rowSetBuilder(schema)
+        .addRow(31920L, 174L, 2L)
+        .addRow(31026L, null, null)
+        .addRow(33848L, 582L, null)
+        .addRow(32383L, 710L, 47L)
+        .addRow(32359L, 0L, 8L)
+        .build();
+    RowSetUtilities.verify(expected, results);
+  }
+
+  @Test
+  public void testSingleColumnRead_vector_fill_bug() throws Exception {
+    runBoth(() -> doTestSingleColumnRead_vector_fill_bug());
+  }
+
+  private void doTestSingleColumnRead_vector_fill_bug() throws Exception {
+    String sql = "select * from cp.`store/json/single_column_long_file.json`";
+    QuerySummary results = client.queryBuilder().sql(sql).run();
+    assertEquals(13_512, results.recordCount());
+  }
+
+  @Test
+  public void testNonExistentColumnReadAlone() throws Exception {
+    runBoth(() -> doTestNonExistentColumnReadAlone());
+  }
+
+  private void doTestNonExistentColumnReadAlone() throws Exception {
+    String sql = "select non_existent_column from cp.`store/json/single_column_long_file.json`";
+    QuerySummary results = client.queryBuilder().sql(sql).run();
+    assertEquals(13_512, results.recordCount());
+  }
+
+  @Test
+  public void testAllTextMode() throws Exception {
+    runBoth(() -> doTestAllTextMode());
+  }
+
+  private void doTestAllTextMode() throws Exception {
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+    try {
+      String sql = "select * from cp.`store/json/schema_change_int_to_string.json`";
+      QuerySummary results = client.queryBuilder().sql(sql).run();
+
+      // This is a pretty lame test as it does not verify results. However,
+      // enough other all-text mode tests do verify results. Here, we just
+      // make sure that the query does not die with a schema change exception.
+
+      assertEquals(3, results.recordCount());
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  private void testExistentColumns(RowSet result) throws SchemaChangeException {
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addArray("field_1", MinorType.BIGINT)
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .addMap("field_4")
+          .addArray("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = client.rowSetBuilder(expectedSchema)
+        .addRow(longArray(1L), mapValue(null, null), mapValue(longArray(), null))
+        .addRow(longArray(5L), mapValue(2L, null), mapValue(longArray(1L, 2L, 3L), 3L))
+        .addRow(longArray(5L, 10L, 15L), mapValue(5L, 3L), mapValue(longArray(4L, 5L, 6L), 3L))
+        .build();
+
+    RowSetUtilities.verify(expected, result);
+  }
+
+  @Test
+  public void readComplexWithStar() throws Exception {
+    runBoth(() -> doReadComplexWithStar());
+  }
+
+  private void doReadComplexWithStar() throws Exception {
+    RowSet results = runTest("select * from cp.`store/json/test_complex_read_with_star.json`");
+    testExistentColumns(results);
+  }
+
+  @Test
+  public void testNullWhereListExpectedNumeric() throws Exception {
+    runBoth(() -> doTestNullWhereListExpectedNumeric());
+  }
+
+  private void doTestNullWhereListExpectedNumeric() throws Exception {
+    String sql = "select * from cp.`store/json/null_where_list_expected.json`";
+    RowSet results = runTest(sql);
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addArray("list_1", MinorType.BIGINT)
+        .build();
+
+    RowSet expected = client.rowSetBuilder(expectedSchema)
+        .addSingleCol(longArray(1L, 2L, 3L))
+        .addSingleCol(longArray())
+        .addSingleCol(longArray(4L, 5L, 6L))
+        .build();
+
+    RowSetUtilities.verify(expected, results);
+  }
+
+  @Test
+  public void testNullWhereMapExpectedNumeric() throws Exception {
+    runBoth(() -> doTestNullWhereMapExpectedNumeric());
+  }
+
+  private void doTestNullWhereMapExpectedNumeric() throws Exception {
+    String sql = "select * from cp.`store/json/null_where_map_expected.json`";
+    RowSet results = runTest(sql);
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addMap("map_1")
+          .addNullable("f_1", MinorType.BIGINT)
+          .addNullable("f_2", MinorType.BIGINT)
+          .addNullable("f_3", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = client.rowSetBuilder(expectedSchema)
+        .addSingleCol(mapValue(1L, 2L, 3L))
+        .addSingleCol(mapValue(null, null, null))
+        .addSingleCol(mapValue(3L, 4L, 5L))
+        .build();
+
+    RowSetUtilities.verify(expected, results);
+  }
+
+  @Test
+  public void testNullWhereMapExpectedText() throws Exception {
+    runBoth(() -> doTestNullWhereMapExpectedText());
+  }
+
+  private void doTestNullWhereMapExpectedText() throws Exception {
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+    try {
+      String sql = "select * from cp.`store/json/null_where_map_expected.json`";
+      RowSet results = runTest(sql);
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .addMap("map_1")
+            .addNullable("f_1", MinorType.VARCHAR)
+            .addNullable("f_2", MinorType.VARCHAR)
+            .addNullable("f_3", MinorType.VARCHAR)
+            .resumeSchema()
+          .build();
+
+      RowSet expected = client.rowSetBuilder(expectedSchema)
+          .addSingleCol(mapValue("1", "2", "3"))
+          .addSingleCol(mapValue(null, null, null))
+          .addSingleCol(mapValue("3", "4", "5"))
+          .build();
+
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  @Test
+  public void testNullWhereListExpectedText() throws Exception {
+    runBoth(() -> doTestNullWhereListExpectedText());
+  }
+
+  private void doTestNullWhereListExpectedText() throws Exception {
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+    try {
+      String sql = "select * from cp.`store/json/null_where_list_expected.json`";
+      RowSet results = runTest(sql);
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .addArray("list_1", MinorType.VARCHAR)
+          .build();
+
+      RowSet expected = client.rowSetBuilder(expectedSchema)
+          .addSingleCol(strArray("1", "2", "3"))
+          .addSingleCol(strArray())
+          .addSingleCol(strArray("4", "5", "6"))
+          .build();
+
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  @Test
+  public void ensureProjectionPushdown() throws Exception {
+    runBoth(() -> doEnsureProjectionPushdown());
+  }
+
+  private void doEnsureProjectionPushdown() throws Exception {
+    // Tests to make sure that we are correctly eliminating schema changing columns.
+    // If completes, means that the projection pushdown was successful.
+
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, false);
+    try {
+      String sql = "select t.field_1, t.field_3.inner_1, t.field_3.inner_2, t.field_4.inner_1 "
+                  + "from cp.`store/json/schema_change_int_to_string.json` t";
+      assertEquals(3, client.queryBuilder().sql(sql).run().recordCount());
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  /**
+   * Old description: The project pushdown rule is correctly adding the
+   * projected columns to the scan, however it is not removing the redundant
+   * project operator after the scan, this tests runs a physical plan generated
+   * from one of the tests to ensure that the project is filtering out the
+   * correct data in the scan alone.
+   * <p>
+   * Revised functionality: the scan operator does all of the requested project
+   * operations, producing five columns.
+   */
+
+  @Test
+  public void testProjectPushdown() throws Exception {
+    try {
+      enableV2Reader(true);
+      client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, false);
+      String plan = Files.asCharSource(DrillFileUtils.getResourceAsFile(
+          "/store/json/project_pushdown_json_physical_plan.json"),
+          Charsets.UTF_8).read();
+//      client.queryBuilder().physical(plan).printCsv();
+      DirectRowSet results = client.queryBuilder().physical(plan).rowSet();
+//      results.print();
+
+      // Projects all columns (since the revised scan operator handles missing-column
+      // projection.) Note that the result includes two batches, including the first empty
+      // batch.
+
+      TupleMetadata schema = new SchemaBuilder()
+          .addArray("field_1", MinorType.BIGINT)
+          .addMap("field_3")
+            .addNullable("inner_1", MinorType.BIGINT)
+            .addNullable("inner_2", MinorType.BIGINT)
+            .resumeSchema()
+          .addMap("field_4")
+            .addArray("inner_1", MinorType.BIGINT)
+            .resumeSchema()
+          .addNullable("non_existent_at_root", MinorType.VARCHAR)
+          .addMap("non_existent")
+            .addMap("nested")
+              .addNullable("field", MinorType.VARCHAR)
+              .resumeMap()
+            .resumeSchema()
+          .build();
+
+      Object nullMap = singleMap(singleMap(null));
+      RowSet expected = client.rowSetBuilder(schema)
+          .addRow(longArray(1L), mapValue(null, null), singleMap(longArray()), null, nullMap )
+          .addRow(longArray(5L), mapValue(2L, null), singleMap(longArray(1L, 2L, 3L)), null, nullMap)
+          .addRow(longArray(5L, 10L, 15L), mapValue(5L, 3L), singleMap(longArray(4L, 5L, 6L)), null, nullMap)
+          .build();
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+      resetV2Reader();
+    }
+  }
+
+  @Test
+  public void testJsonDirectoryWithEmptyFile() throws Exception {
+    runBoth(() -> doTestJsonDirectoryWithEmptyFile());
+  }
+
+  private void doTestJsonDirectoryWithEmptyFile() throws Exception {
+    testBuilder()
+        .sqlQuery("select * from dfs.`store/json/jsonDirectoryWithEmpyFile`")
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues(1l)
+        .build()
+        .run();
+  }
+
+  // Only works in V2 reader.
+  // Disabled because it depends on the (random) read order
+
+  @Test
+  @Ignore("unstable")
+  public void drill_4032() throws Exception {
+    try {
+      enableV2Reader(true);
+      File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4032"));
+      table_dir.mkdir();
+      try (PrintWriter os = new PrintWriter(new FileWriter(new File(table_dir, "a.json")))) {
+        os.write("{\"col1\": \"val1\", \"col2\": null}");
+        os.write("{\"col1\": \"val2\", \"col2\": {\"col3\":\"abc\", \"col4\":\"xyz\"}}");
+      }
+      try (PrintWriter os = new PrintWriter(new FileWriter(new File(table_dir, "b.json")))) {
+        os.write("{\"col1\": \"val3\", \"col2\": null}");
+        os.write("{\"col1\": \"val4\", \"col2\": null}");
+      }
+      String sql = "select t.col1, t.col2.col3 from dfs.tmp.drill_4032 t order by col1";
+//      String sql = "select t.col1, t.col2.col3 from dfs.tmp.drill_4032 t";
+      RowSet results = runTest(sql);
+      results.print();
+
+      TupleMetadata schema = new SchemaBuilder()
+          .addNullable("col1", MinorType.VARCHAR)
+          .addNullable("EXPR$1", MinorType.VARCHAR)
+          .build();
+
+      RowSet expected = client.rowSetBuilder(schema)
+          .addRow("val1", null)
+          .addRow("val2", "abc")
+          .addRow("val3", null)
+          .addRow("val4", null)
+          .build();
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  /** Test <pre>
+   * { "a": 5.2 }
+   * { "a": 6 }</pre>
+   * In Drill 1.16 and before, triggered an exception. In Drill 1.17
+   * and later, the second number, an integer, is converted to a
+   * double.
+   */
+
+  @Test
+  public void testMixedNumberTypes() throws Exception {
+    try {
+      enableV2Reader(true);
+      String sql = "select * from cp.`jsoninput/mixed_number_types.json`";
+      RowSet results = runTest(sql);
+      TupleMetadata schema = new SchemaBuilder()
+          .addNullable("a", MinorType.FLOAT8)
+          .build();
+
+      RowSet expected = client.rowSetBuilder(schema)
+          .addSingleCol(5.2D)
+          .addSingleCol(6.0D)
+          .build();
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      resetV2Reader();
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java
new file mode 100644
index 0000000000..c0d4a4b80f
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java
@@ -0,0 +1,24 @@
+package org.apache.drill.exec.store.json;
+
+import org.apache.drill.exec.ExecConstants;
+import org.junit.Test;
+
+public class TestJsonReaderWithSchema extends BaseTestJsonReader {
+
+  @Test
+  public void testSelectFromListWithCase() throws Exception {
+    try {
+      testBuilder()
+              .sqlQuery("select a, typeOf(a) `type` from " +
+                "(select case when is_list(field2) then field2[4][1].inner7 end a " +
+                "from cp.`jsoninput/union/a.json`) where a is not null")
+              .ordered()
+              .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .baselineColumns("a", "type")
+              .baselineValues(13L, "BIGINT")
+              .go();
+    } finally {
+      client.resetSession(ExecConstants.ENABLE_UNION_TYPE_KEY);
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
index bf83ae29b9..7b0a61c496 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
@@ -17,29 +17,63 @@
  */
 package org.apache.drill.exec.store.json;
 
-import org.apache.drill.test.BaseTestQuery;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.nio.file.Paths;
+
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.exec.proto.UserBitShared;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.proto.UserBitShared;
+import org.apache.drill.test.BaseTestQuery;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.junit.Assert;
 import org.junit.experimental.categories.Category;
 
-import java.nio.file.Paths;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
+/**
+ * Original JSON reader tests. Left in original form; not converted
+ * to the newer formats.
+ */
+@Category(RowSetTest.class)
 public class TestJsonRecordReader extends BaseTestQuery {
+
   @BeforeClass
   public static void setupTestFiles() {
     dirTestWatcher.copyResourceToRoot(Paths.get("jsoninput/drill_3353"));
   }
 
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
+  public interface TestWrapper {
+    void apply() throws Exception;
+  }
+
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
   @Test
   public void testComplexJsonInput() throws Exception {
+    runBoth(this::doTestComplexJsonInput);
+  }
+
+  private void doTestComplexJsonInput() throws Exception {
     test("select `integer`, x['y'] as x1, x['y'] as x2, z[0], z[0]['orange'], z[1]['pink']  from cp.`jsoninput/input2.json` limit 10 ");
   }
 
@@ -50,11 +84,19 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testContainingArray() throws Exception {
+    runBoth(this::doTestContainingArray);
+  }
+
+  private void doTestContainingArray() throws Exception {
     test("select * from cp.`store/json/listdoc.json`");
   }
 
   @Test
   public void testComplexMultipleTimes() throws Exception {
+    runBoth(this::doTestComplexMultipleTimes);
+  }
+
+  private void doTestComplexMultipleTimes() throws Exception {
     for (int i = 0; i < 5; i++) {
       test("select * from cp.`join/merge_join.json`");
     }
@@ -62,6 +104,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void trySimpleQueryWithLimit() throws Exception {
+    runBoth(this::doTrySimpleQueryWithLimit);
+  }
+
+  private void doTrySimpleQueryWithLimit() throws Exception {
     test("select * from cp.`limit/test1.json` limit 10");
   }
 
@@ -69,6 +115,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   // DRILL-1634 : retrieve an element in a nested array in a repeated map.
   // RepeatedMap (Repeated List (Repeated varchar))
   public void testNestedArrayInRepeatedMap() throws Exception {
+    runBoth(() -> doTestNestedArrayInRepeatedMap());
+  }
+
+  private void doTestNestedArrayInRepeatedMap() throws Exception {
     test("select a[0].b[0] from cp.`jsoninput/nestedArray.json`");
     test("select a[0].b[1] from cp.`jsoninput/nestedArray.json`");
     test("select a[1].b[1] from cp.`jsoninput/nestedArray.json`"); // index out of the range. Should return empty list.
@@ -76,19 +126,31 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testEmptyMapDoesNotFailValueCapacityCheck() throws Exception {
+    runBoth(() -> doTestEmptyMapDoesNotFailValueCapacityCheck());
+  }
+
+  private void doTestEmptyMapDoesNotFailValueCapacityCheck() throws Exception {
     final String sql = "select * from cp.`store/json/value-capacity.json`";
     test(sql);
   }
 
   @Test
   public void testEnableAllTextMode() throws Exception {
-    testNoResult("alter session set `store.json.all_text_mode`= true");
+    runBoth(() -> doTestEnableAllTextMode());
+  }
+
+  private void doTestEnableAllTextMode() throws Exception {
+    alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
     test("select * from cp.`jsoninput/big_numeric.json`");
-    testNoResult("alter session set `store.json.all_text_mode`= false");
+    resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
   }
 
   @Test
   public void testExceptionHandling() throws Exception {
+    runBoth(this::doTestExceptionHandling);
+  }
+
+  private void doTestExceptionHandling() throws Exception {
     try {
       test("select * from cp.`jsoninput/DRILL-2350.json`");
     } catch (UserException e) {
@@ -96,8 +158,8 @@ public class TestJsonRecordReader extends BaseTestQuery {
           UserBitShared.DrillPBError.ErrorType.UNSUPPORTED_OPERATION, e
               .getOrCreatePBError(false).getErrorType());
       String s = e.getMessage();
-      assertEquals("Expected Unsupported Operation Exception.", true,
-          s.contains("Drill does not support lists of different types."));
+      assertTrue("Expected Unsupported Operation Exception.",
+        s.contains("Drill does not support lists of different types."));
     }
 
   }
@@ -106,6 +168,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   // DRILL-1832
   public void testJsonWithNulls1() throws Exception {
+    runBoth(() -> doTestJsonWithNulls1());
+  }
+
+  private void doTestJsonWithNulls1() throws Exception {
     final String query = "select * from cp.`jsoninput/twitter_43.json`";
     testBuilder().sqlQuery(query).unOrdered()
         .jsonBaselineFile("jsoninput/drill-1832-1-result.json").go();
@@ -115,70 +181,97 @@ public class TestJsonRecordReader extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   // DRILL-1832
   public void testJsonWithNulls2() throws Exception {
+    runBoth(() -> doTestJsonWithNulls2());
+  }
+
+  private void doTestJsonWithNulls2() throws Exception {
     final String query = "select SUM(1) as `sum_Number_of_Records_ok` from cp.`jsoninput/twitter_43.json` having (COUNT(1) > 0)";
     testBuilder().sqlQuery(query).unOrdered()
         .jsonBaselineFile("jsoninput/drill-1832-2-result.json").go();
   }
 
+  // V1-only test. In V2, this works. See TestJsonReaderQueries.
+
   @Test
   public void testMixedNumberTypes() throws Exception {
     try {
+      enableV2Reader(false);
       testBuilder()
           .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
           .unOrdered().jsonBaselineFile("jsoninput/mixed_number_types.json")
           .build().run();
+      fail("Mixed number types verification failed, expected failure on conflicting number types.");
     } catch (Exception ex) {
+      // this indicates successful completion of the test
       assertTrue(ex
           .getMessage()
           .contains(
               "You tried to write a BigInt type when you are using a ValueWriter of type NullableFloat8WriterImpl."));
-      // this indicates successful completion of the test
-      return;
+    } finally {
+      resetV2Reader();
     }
-    throw new Exception(
-        "Mixed number types verification failed, expected failure on conflicting number types.");
   }
 
   @Test
   public void testMixedNumberTypesInAllTextMode() throws Exception {
-    testNoResult("alter session set `store.json.all_text_mode`= true");
-    testBuilder()
-        .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
-        .unOrdered().baselineColumns("a").baselineValues("5.2")
-        .baselineValues("6").build().run();
+    runBoth(() -> doTestMixedNumberTypesInAllTextMode());
+  }
+
+  private void doTestMixedNumberTypesInAllTextMode() throws Exception {
+    try {
+      alterSession("store.json.all_text_mode", true);
+      testBuilder()
+          .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
+          .unOrdered().baselineColumns("a").baselineValues("5.2")
+          .baselineValues("6").build().run();
+    } finally {
+      resetSessionOption("store.json.all_text_mode");
+    }
   }
 
   @Test
   public void testMixedNumberTypesWhenReadingNumbersAsDouble() throws Exception {
     try {
-      testNoResult("alter session set `store.json.read_numbers_as_double`= true");
+      alterSession(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE, true);
       testBuilder()
           .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
           .unOrdered().baselineColumns("a").baselineValues(5.2D)
           .baselineValues(6D).build().run();
     } finally {
-      testNoResult("alter session set `store.json.read_numbers_as_double`= false");
+      resetSessionOption(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
     }
   }
 
   @Test
   public void drill_3353() throws Exception {
     try {
-      testNoResult("alter session set `store.json.all_text_mode` = true");
-      test("create table dfs.tmp.drill_3353 as select a from dfs.`jsoninput/drill_3353` where e = true");
-      String query = "select t.a.d cnt from dfs.tmp.drill_3353 t where t.a.d is not null";
-      test(query);
-      testBuilder().sqlQuery(query).unOrdered().baselineColumns("cnt")
-          .baselineValues("1").go();
+      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+       test("create table dfs.tmp.drill_3353 as select a from dfs.`jsoninput/drill_3353` where e = true");
+      runBoth(this::doDrill_3353);
     } finally {
-      testNoResult("alter session set `store.json.all_text_mode` = false");
+      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
     }
   }
 
+  private void doDrill_3353() throws Exception {
+    String query = "select t.a.d cnt from dfs.tmp.drill_3353 t where t.a.d is not null";
+    test(query);
+    testBuilder()
+      .sqlQuery(query)
+      .unOrdered()
+      .baselineColumns("cnt")
+      .baselineValues("1")
+      .go();
+  }
+
   @Test
   @Category(UnlikelyTest.class)
   // See DRILL-3476
   public void testNestedFilter() throws Exception {
+    runBoth(this::doTestNestedFilter);
+  }
+
+  private void doTestNestedFilter() throws Exception {
     String query = "select a from cp.`jsoninput/nestedFilter.json` t where t.a.b = 1";
     String baselineQuery = "select * from cp.`jsoninput/nestedFilter.json` t where t.a.b = 1";
     testBuilder().sqlQuery(query).unOrdered().sqlBaselineQuery(baselineQuery)
@@ -192,19 +285,22 @@ public class TestJsonRecordReader extends BaseTestQuery {
   public void testCountingQuerySkippingInvalidJSONRecords() throws Exception {
     try {
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
       String set1 = "alter session set `"
-          + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
-          + "` = true";
+        + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
+        + "` = true";
       String query = "select count(*) from cp.`jsoninput/drill4653/file.json`";
 
       testNoResult(set);
       testNoResult(set1);
-      testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
-          .run();
+      testBuilder()
+        .unOrdered()
+        .sqlQuery(query)
+        .sqlBaselineQuery(query)
+        .go();
     } finally {
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
       testNoResult(set);
     }
   }
@@ -214,6 +310,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   // See DRILL-4653
   /* Test for CountingJSONReader */
   public void testCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
+    runBoth(this::doTestCountingQueryNotSkippingInvalidJSONRecords);
+  }
+
+  private void doTestCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
     try {
       String query = "select count(*) from cp.`jsoninput/drill4653/file.json`";
       testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
@@ -231,21 +331,23 @@ public class TestJsonRecordReader extends BaseTestQuery {
   /* Test for JSONReader */
   public void testNotCountingQuerySkippingInvalidJSONRecords() throws Exception {
     try {
-
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
       String set1 = "alter session set `"
-          + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
-          + "` = true";
+        + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
+        + "` = true";
       String query = "select sum(balance) from cp.`jsoninput/drill4653/file.json`";
       testNoResult(set);
       testNoResult(set1);
-      testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
-          .run();
+      testBuilder()
+        .unOrdered()
+        .sqlQuery(query)
+        .sqlBaselineQuery(query)
+        .go();
     }
     finally {
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
       testNoResult(set);
     }
   }
@@ -256,6 +358,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   /* Test for JSONReader */
   public void testNotCountingQueryNotSkippingInvalidJSONRecords()
       throws Exception {
+    runBoth(this::doTestNotCountingQueryNotSkippingInvalidJSONRecords);
+  }
+
+  private void doTestNotCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
     try {
       String query = "select sum(balance) from cp.`jsoninput/drill4653/file.json`";
       testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
@@ -272,6 +378,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   // See DRILL-7362
   /* Test for CountingJSONReader */
   public void testContainingArrayCount() throws Exception {
+    runBoth(this::doTestContainingArrayCount);
+  }
+
+  private void doTestContainingArrayCount() throws Exception {
     testBuilder()
       .sqlQuery("select count(*) as cnt from cp.`store/json/listdoc.json`")
       .unOrdered()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonScanOp.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonScanOp.java
new file mode 100644
index 0000000000..8f03dd53e7
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonScanOp.java
@@ -0,0 +1,271 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import static org.apache.drill.test.rowSet.RowSetUtilities.longArray;
+import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
+import static org.apache.drill.test.rowSet.RowSetUtilities.mapValue;
+import static org.apache.drill.test.rowSet.RowSetUtilities.singleMap;
+import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
+import static org.junit.Assert.assertTrue;
+
+import java.io.BufferedInputStream;
+import java.io.InputStream;
+
+import org.apache.drill.categories.RowSetTest;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.physical.impl.scan.BaseScanOperatorExecTest.BaseScanFixtureBuilder;
+import org.apache.drill.exec.physical.impl.scan.ScanOperatorExec;
+import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
+import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
+import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoader;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoaderOptions;
+import org.apache.drill.test.SubOperatorTest;
+import org.apache.drill.test.rowSet.RowSetUtilities;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(RowSetTest.class)
+public class TestJsonScanOp extends SubOperatorTest {
+
+  private static class JsonReaderFixure implements ManagedReader<SchemaNegotiator> {
+
+    private final String filePath;
+    private InputStream stream;
+    private JsonLoader jsonLoader;
+    private final JsonLoaderOptions options;
+
+    public JsonReaderFixure(String filePath, JsonLoaderOptions options) {
+      this.filePath = filePath;
+      this.options = options;
+    }
+
+    @Override
+    public boolean open(SchemaNegotiator negotiator) {
+      stream = new BufferedInputStream(getClass().getResourceAsStream(filePath));
+      jsonLoader = new JsonLoaderBuilder()
+          .resultSetLoader(negotiator.build())
+          .options(options)
+          .fromStream(stream)
+          .build();
+      return true;
+    }
+
+    @Override
+    public boolean next() {
+      return jsonLoader.readBatch();
+    }
+
+    @Override
+    public void close() {
+      if (jsonLoader != null) {
+        jsonLoader.close();
+        jsonLoader = null;
+      }
+    }
+  }
+
+  /**
+   * Test the case where the reader does not play the "first batch contains
+   * only schema" game, and instead returns data. The Scan operator will
+   * split the first batch into two: one with schema only, another with
+   * data.
+   */
+
+  @Test
+  public void testScanOperator() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    options.allTextMode = true;
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+    builder.setProjection("field_3", "field_5");
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    result.clear();
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.VARCHAR)
+          .addNullable("inner_2", MinorType.VARCHAR)
+          .addMapArray("inner_3")
+            .addNullable("inner_object_field_1", MinorType.VARCHAR)
+            .resumeMap()
+          .resumeSchema()
+        .addMapArray("field_5")
+          .addArray("inner_list", MinorType.VARCHAR)
+          .addArray("inner_list_2", MinorType.VARCHAR)
+          .resumeSchema()
+        .buildSchema();
+
+    RowSetUtilities.strArray();
+    RowSet expected = fixture.rowSetBuilder(expectedSchema)
+        .addRow(mapValue(null, null, mapArray()),
+                mapArray())
+        .addRow(mapValue("2", null, mapArray()),
+                mapArray(
+                  mapValue(strArray("1", "", "6"), strArray()),
+                  mapValue(strArray("3", "8"), strArray()),
+                  mapValue(strArray("12", "", "4", "null", "5"), strArray())))
+        .addRow(mapValue("5", "3", mapArray(singleMap(null), singleMap("10"))),
+            mapArray(
+                mapValue(strArray("5", "", "6.0", "1234"), strArray()),
+                mapValue(strArray("7", "8.0", "12341324"),
+                         strArray("1", "2", "2323.443e10", "hello there")),
+                mapValue(strArray("3", "4", "5"), strArray("10", "11", "12"))))
+        .build();
+
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+
+  @Test
+  public void testScanProjectMapSubset() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+    builder.setProjection("field_3.inner_1", "field_3.inner_2");
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    TupleMetadata schema = new SchemaBuilder()
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = fixture.rowSetBuilder(schema)
+        .addSingleCol(mapValue(null, null))
+        .addSingleCol(mapValue(2L, null))
+        .addSingleCol(mapValue(5L, 3L))
+        .build();
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+
+  @Test
+  public void testScanProjectMapArraySubsetAndNull() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    options.allTextMode = true;
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+    builder.setProjection("field_5.inner_list", "field_5.dummy");
+    builder.builder().nullType(Types.optional(MinorType.VARCHAR));
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    TupleMetadata schema = new SchemaBuilder()
+        .addMapArray("field_5")
+          .addArray("inner_list", MinorType.VARCHAR)
+          .addNullable("dummy", MinorType.VARCHAR)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = fixture.rowSetBuilder(schema)
+        .addSingleCol(mapArray())
+        .addSingleCol(mapArray(
+            mapValue(strArray("1", "", "6"), null),
+            mapValue(strArray("3", "8"), null),
+            mapValue(strArray("12", "", "4", "null", "5"), null)))
+        .addSingleCol(mapArray(
+            mapValue(strArray("5", "", "6.0", "1234"), null),
+            mapValue(strArray("7", "8.0", "12341324"), null),
+            mapValue(strArray("3", "4", "5"), null)))
+        .build();
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+
+  @Test
+  public void testScanProject() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+
+    // Projection omits field_2 which has an ambiguous type. Since
+    // the field is not materialized, the ambiguity is benign.
+    // (If this test triggers an error, perhaps a change has caused
+    // the column to become materialized.)
+
+    builder.setProjection("field_1", "field_3.inner_1", "field_3.inner_2", "field_4.inner_1",
+        "non_existent_at_root", "non_existent.nested.field");
+    builder.builder().nullType(Types.optional(MinorType.VARCHAR));
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    // Projects all columns (since the revised scan operator handles missing-column
+    // projection.) Note that the result includes two batches, including the first empty
+    // batch.
+
+    TupleMetadata schema = new SchemaBuilder()
+        .addArray("field_1", MinorType.BIGINT)
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .addMap("field_4")
+          .addArray("inner_1", MinorType.BIGINT)
+          .resumeSchema()
+        .addNullable("non_existent_at_root", MinorType.VARCHAR)
+        .addMap("non_existent")
+          .addMap("nested")
+            .addNullable("field", MinorType.VARCHAR)
+            .resumeMap()
+          .resumeSchema()
+        .build();
+
+    Object nullMap = singleMap(singleMap(null));
+    RowSet expected = fixture.rowSetBuilder(schema)
+        .addRow(longArray(1L), mapValue(null, null), singleMap(longArray()), null, nullMap )
+        .addRow(longArray(5L), mapValue(2L, null), singleMap(longArray(1L, 2L, 3L)), null, nullMap)
+        .addRow(longArray(5L, 10L, 15L), mapValue(5L, 3L), singleMap(longArray(4L, 5L, 6L)), null, nullMap)
+        .build();
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
index 0ba1f2261a..02f5d8209c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
@@ -25,7 +25,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -53,7 +53,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestLogReader extends ClusterTest {
 
   public static final String DATE_ONLY_PATTERN = "(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d) .*";
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java
index c7ddd9eabf..65cb0ab8c4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.store.mock;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
  * tested, where needed in unit tests.
  */
 
-@Category({RowSetTests.class, UnlikelyTest.class})
+@Category({RowSetTest.class, UnlikelyTest.class})
 public class TestMockPlugin extends ClusterTest {
 
   @BeforeClass
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java
index ad207ef0e4..45587a84b5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java
@@ -27,7 +27,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.DataMode;
@@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category;
  * scan operator, without the rest of Drill. A side effect is that this
  * also tests the scan mechanism itself.
  */
-@Category({RowSetTests.class, UnlikelyTest.class})
+@Category({RowSetTest.class, UnlikelyTest.class})
 public class TestMockRowReader extends SubOperatorTest {
 
   private static ScanFixture buildScan(MockSubScanPOP config,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java
index fd72e8ef26..f989a3f41d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java
@@ -23,7 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.nio.file.Paths;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -39,7 +39,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSequenceFileReader extends ClusterTest {
 
   @BeforeClass
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
index e80b9f06d7..5ab2cfeb1b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
@@ -25,13 +25,13 @@ public class TestComplexTypeWriter  extends BaseTestQuery {
   @Test
   //basic case. convert varchar into json.
   public void testA0() throws Exception{
-    test(" select convert_from('{x:100, y:215.6}' ,'JSON') as mycol from cp.`tpch/nation.parquet`;");
+    test("select convert_from('{x:100, y:215.6}' ,'JSON') as mycol from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //map contains int, float, repeated list , repeated map, nested repeated map, etc.
   public void testA1() throws Exception{
-    test(" select convert_from('{x:100, y:215.6, z: [1, 2, 3], s : [[5, 6, 7], [8, 9]], " +
+    test("select convert_from('{x:100, y:215.6, z: [1, 2, 3], s : [[5, 6, 7], [8, 9]], " +
                                 " t : [{a : 100, b: 200}, {a:300, b: 400}], " +
                                 " nrmp: [ { x: [{ id: 123}], y: { y : \"SQL\"} }] }' ,'JSON') " +
                                 " as mycol from cp.`tpch/nation.parquet`;");
@@ -40,55 +40,55 @@ public class TestComplexTypeWriter  extends BaseTestQuery {
   @Test
   //two convert functions.
   public void testA2() throws Exception{
-    test(" select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions.  One convert's input comes from a string concat function.
   public void testA3() throws Exception{
-    test(" select convert_from(concat('{x:100,',  'y:215.6}') ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from(concat('{x:100,',  'y:215.6}') ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions. One's input is an empty map.
   public void testA4() throws Exception{
-    test(" select convert_from('{}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from('{}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions. One's input is an empty list ( ok to have null in the result?)
   public void testA5() throws Exception{
-    test(" select convert_from('[]' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[]' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of BigInt. Output will be a repeated list vector.
   public void testA6() throws Exception{
-    test(" select convert_from('[1, 2, 3]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[1, 2, 3]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of float. Output will be a repeated list vector.
   public void testA7() throws Exception{
-    test(" select convert_from('[1.2, 2.3, 3.5]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[1.2, 2.3, 3.5]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of list of big int. Output will be a repeated list vector.
   public void testA8() throws Exception{
-    test(" select convert_from('[ [1, 2], [3, 4], [5]]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[ [1, 2], [3, 4], [5]]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of map. Output will be a repeated list vector.
   public void testA9() throws Exception{
-    test(" select convert_from('[{a : 100, b: 200}, {a:300, b: 400}]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[{a : 100, b: 200}, {a:300, b: 400}]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions, one regular nest functions, used with Filter op.
   public void testA10() throws Exception{
-    test(" select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, " +
+    test("select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, " +
          "        convert_from('{x:200, y:678.9}' ,'JSON') as mycol2, " +
          "        1 + 2 * 3 as numvalue " +
          " from cp.`tpch/nation.parquet` where n_nationkey > 5;");
@@ -97,15 +97,14 @@ public class TestComplexTypeWriter  extends BaseTestQuery {
   @Test
   //convert from string constructed from columns in parquet file.
   public void testA11() throws Exception{
-    test(" select convert_from(concat(concat('{ NationName: \"', N_NAME) , '\"}'), 'JSON')" +
+    test("select convert_from(concat(concat('{ NationName: \"', N_NAME) , '\"}'), 'JSON')" +
          " from cp.`tpch/nation.parquet` where n_nationkey > 5;");
   }
 
   @Test
   //Test multiple batches creation ( require multiple alloc for complex writer during Project ).
   public void testA100() throws Exception{
-    test(" select convert_from(concat(concat('{ Price : ', L_EXTENDEDPRICE) , '}') , 'JSON') " +
+    test("select convert_from(concat(concat('{ Price : ', L_EXTENDEDPRICE) , '}') , 'JSON') " +
          " from cp.`tpch/lineitem.parquet` limit 10; ");
   }
-
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
index bc27e88044..cb205453b1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
@@ -22,15 +22,17 @@ import static org.junit.Assert.assertEquals;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.List;
+import java.util.TimeZone;
 
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
-import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+// TODO: Move to JSON reader package after code review
 public class TestExtendedTypes extends BaseTestQuery {
+
   @BeforeClass
   public static void setupTestFiles() {
     dirTestWatcher.copyResourceToRoot(Paths.get("vector", "complex"));
@@ -38,14 +40,19 @@ public class TestExtendedTypes extends BaseTestQuery {
 
   @Test
   public void checkReadWriteExtended() throws Exception {
-    mockUtcDateTimeZone();
+    runBoth(() -> doCheckReadWriteExtended());
+  }
+
+  private void doCheckReadWriteExtended() throws Exception {
 
     final String originalFile = "vector/complex/extended.json";
     final String newTable = "TestExtendedTypes/newjson";
 
+    TimeZone origZone = TimeZone.getDefault();
     try {
-      testNoResult(String.format("ALTER SESSION SET `%s` = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName()));
-      testNoResult(String.format("ALTER SESSION SET `%s` = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName()));
+      TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+      alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "json");
+      alterSession(ExecConstants.JSON_EXTENDED_TYPES_KEY, true);
 
       // create table
       test("create table dfs.tmp.`%s` as select * from cp.`%s`", newTable, originalFile);
@@ -57,18 +64,26 @@ public class TestExtendedTypes extends BaseTestQuery {
       final byte[] newData = Files.readAllBytes(dirTestWatcher.getDfsTestTmpDir().toPath().resolve(Paths.get(newTable, "0_0_0.json")));
       assertEquals(new String(originalData), new String(newData));
     } finally {
-      resetSessionOption(ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName());
-      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES.getOptionName());
+      TimeZone.setDefault(origZone);
+      resetSessionOption(ExecConstants.OUTPUT_FORMAT_OPTION);
+      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES_KEY);
+      test("DROP TABLE IF EXISTS dfs.tmp.`%s`", newTable);
     }
   }
 
   @Test
   public void testMongoExtendedTypes() throws Exception {
+    runBoth(() -> doTestMongoExtendedTypes());
+  }
+
+  private void doTestMongoExtendedTypes() throws Exception {
     final String originalFile = "vector/complex/mongo_extended.json";
 
+    TimeZone origZone = TimeZone.getDefault();
     try {
-      testNoResult(String.format("ALTER SESSION SET `%s` = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName()));
-      testNoResult(String.format("ALTER SESSION SET `%s` = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName()));
+      TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+      alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "json");
+      alterSession(ExecConstants.JSON_EXTENDED_TYPES_KEY, true);
 
       int actualRecordCount = testSql(String.format("select * from cp.`%s`", originalFile));
       assertEquals(
@@ -78,10 +93,34 @@ public class TestExtendedTypes extends BaseTestQuery {
       List<QueryDataBatch> resultList = testSqlWithResults(String.format("select * from dfs.`%s`", originalFile));
       String actual = getResultString(resultList, ",");
       String expected = "drill_timestamp_millies,bin,bin1\n2015-07-07 03:59:43.488,drill,drill\n";
-      Assert.assertEquals(expected, actual);
+      assertEquals(expected, actual);
+    } finally {
+      TimeZone.setDefault(origZone);
+      resetSessionOption(ExecConstants.OUTPUT_FORMAT_OPTION);
+      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES_KEY);
+    }
+  }
+
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
+  public interface TestWrapper {
+    void apply() throws Exception;
+  }
+
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
     } finally {
-      resetSessionOption(ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName());
-      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES.getOptionName());
+      resetV2Reader();
     }
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
index 314a328559..dcd65a6f92 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
@@ -20,6 +20,7 @@ package org.apache.drill.exec.vector.complex.writer;
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.vector.complex.writer.TestJsonReader.TestWrapper;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
 import org.junit.After;
@@ -45,9 +46,23 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
     FileUtils.writeStringToFile(testFile, JSON_DATA);
   }
 
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
   @Test
   public void testwithOptionEnabled() throws Exception {
+    runBoth(() -> doTestWithOptionEnabled());
+  }
 
+  private void doTestWithOptionEnabled() throws Exception {
     try {
       enableJsonReaderEscapeAnyChar();
       testBuilder()
@@ -61,9 +76,12 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
       resetJsonReaderEscapeAnyChar();
     }
   }
-
   @Test
   public void testwithOptionDisabled() throws Exception {
+    runBoth(() -> doTestWithOptionDisabled());
+  }
+
+  private void doTestWithOptionDisabled() throws Exception {
     try {
       queryBuilder().sql(QUERY)
         .run();
@@ -80,6 +98,14 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
     client.alterSession(ExecConstants.JSON_READER_ESCAPE_ANY_CHAR, false);
   }
 
+  private void enableV2Reader(boolean enable) {
+    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() {
+    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
   @After
   public void teardown() throws Exception {
     FileUtils.deleteQuietly(testFile);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
index 9b32b465ff..5b440a740b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
@@ -22,6 +22,7 @@ import static org.hamcrest.CoreMatchers.containsString;
 import static org.junit.Assert.assertFalse;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.util.List;
@@ -35,15 +36,32 @@ import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.vector.VarCharVector;
+import org.apache.drill.exec.vector.complex.writer.TestJsonReader.TestWrapper;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 
+// TODO: Move to JSON reader package after code review
+// TODO: Split or rename: this tests mor than NanInf
 public class TestJsonNanInf extends BaseTestQuery {
 
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
 
   @Test
   public void testNanInfSelect() throws Exception {
+    runBoth(() -> doTestNanInfSelect());
+  }
+
+  private void doTestNanInfSelect() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}";
@@ -65,6 +83,10 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testExcludePositiveInfinity() throws Exception {
+    runBoth(() -> doTestExcludePositiveInfinity());
+  }
+
+  private void doTestExcludePositiveInfinity() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":Infinity}," +
@@ -87,6 +109,10 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testExcludeNegativeInfinity() throws Exception {
+    runBoth(() -> doTestExcludeNegativeInfinity());
+  }
+
+  private void doTestExcludeNegativeInfinity() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":-Infinity}," +
@@ -109,6 +135,10 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testIncludePositiveInfinity() throws Exception {
+    runBoth(() -> doTestIncludePositiveInfinity());
+  }
+
+  private void doTestIncludePositiveInfinity() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":Infinity}," +
@@ -130,6 +160,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testExcludeNan() throws Exception {
+    runBoth(() -> doTestExcludeNan());
+  }
+
+  private void doTestExcludeNan() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":-Infinity}," +
@@ -149,9 +183,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-
   @Test
   public void testIncludeNan() throws Exception {
+    runBoth(() -> doTestIncludeNan());
+  }
+
+  private void doTestIncludeNan() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":-Infinity}," +
@@ -171,8 +208,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testNanInfFailure() throws Exception {
+    runBoth(() -> doTestNanInfFailure());
+  }
+
+  private void doTestNanInfFailure() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     test("alter session set `%s` = false", ExecConstants.JSON_READER_NAN_INF_NUMBERS);
@@ -180,9 +221,9 @@ public class TestJsonNanInf extends BaseTestQuery {
     try {
       FileUtils.writeStringToFile(file, json);
       test("select * from dfs.`%s`;", table);
+      fail();
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString("Error parsing JSON"));
-      throw e;
     } finally {
       resetSessionOption(ExecConstants.JSON_READER_NAN_INF_NUMBERS);
       FileUtils.deleteQuietly(file);
@@ -191,6 +232,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testCreateTableNanInf() throws Exception {
+    runBoth(() -> doTestCreateTableNanInf());
+  }
+
+  private void doTestCreateTableNanInf() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}";
@@ -217,6 +262,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testConvertFromJsonFunction() throws Exception {
+    runBoth(() -> doTestConvertFromJsonFunction());
+  }
+
+  private void doTestConvertFromJsonFunction() throws Exception {
     String table = "nan_test.csv";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String csv = "col_0, {\"nan_col\":NaN}";
@@ -234,10 +283,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-
-
   @Test
   public void testLargeStringBinary() throws Exception {
+    runBoth(() -> doTestLargeStringBinary());
+  }
+
+  private void doTestLargeStringBinary() throws Exception {
     String chunk = "0123456789";
     StringBuilder builder = new StringBuilder();
     for (int i = 0; i < 1000; i++) {
@@ -249,6 +300,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testConvertToJsonFunction() throws Exception {
+    runBoth(() -> doTestConvertToJsonFunction());
+  }
+
+  private void doTestConvertToJsonFunction() throws Exception {
     String table = "nan_test.csv";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String csv = "col_0, {\"nan_col\":NaN}";
@@ -290,6 +345,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testOrderByWithNaN() throws Exception {
+    runBoth(() -> doTestOrderByWithNaN());
+  }
+
+  private void doTestOrderByWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
         "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":4, \"attr4\":Infinity}\n" +
@@ -319,6 +378,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testNestedLoopJoinWithNaN() throws Exception {
+    runBoth(() -> doTestNestedLoopJoinWithNaN());
+  }
+
+  private void doTestNestedLoopJoinWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"object1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
             "{\"name\":\"object1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
@@ -358,6 +421,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testHashJoinWithNaN() throws Exception {
+    runBoth(() -> doTestHashJoinWithNaN());
+  }
+
+  private void doTestHashJoinWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
             "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":4, \"attr4\":Infinity}\n" +
@@ -386,9 +453,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-
   @Test
   public void testMergeJoinWithNaN() throws Exception {
+    runBoth(() -> doTestMergeJoinWithNaN());
+  }
+
+  private void doTestMergeJoinWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
             "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":4, \"attr4\":Infinity}\n" +
@@ -417,4 +487,11 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
index 0643e22e44..bd2517ceea 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
@@ -20,42 +20,41 @@ package org.apache.drill.exec.vector.complex.writer;
 import static org.apache.drill.test.TestBuilder.listOf;
 import static org.apache.drill.test.TestBuilder.mapOf;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
 
 import java.io.BufferedOutputStream;
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
 import java.nio.file.Paths;
-import java.util.List;
-import java.util.zip.GZIPOutputStream;
 
-import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.proto.UserBitShared;
-import org.apache.drill.exec.record.RecordBatchLoader;
-import org.apache.drill.exec.record.VectorWrapper;
-import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.store.easy.json.JSONRecordReader;
 import org.apache.drill.exec.util.JsonStringHashMap;
 import org.apache.drill.exec.util.Text;
-import org.apache.drill.exec.vector.IntVector;
-import org.apache.drill.exec.vector.RepeatedBigIntVector;
 import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
 import org.apache.drill.shaded.guava.com.google.common.io.Files;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Some tests previously here have moved, and been rewritten to use
+ * the newer test framework. Find them in
+ * <tt>org.apache.drill.exec.store.json</tt>:
+ * <ul>
+ * <li><tt>TestJsonReaderFns</tt></li>
+ * <li><tt>TestJsonReaderQuery</tt></li>
+ * </ul>
+ */
+//TODO: Move to JSON reader package after code review
+@Category(RowSetTest.class)
 public class TestJsonReader extends BaseTestQuery {
   private static final Logger logger = LoggerFactory.getLogger(TestJsonReader.class);
 
@@ -65,60 +64,44 @@ public class TestJsonReader extends BaseTestQuery {
     dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
   }
 
-  @Test
-  public void testEmptyList() throws Exception {
-    final String root = "store/json/emptyLists";
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
 
-    testBuilder()
-        .sqlQuery("select count(a[0]) as ct from dfs.`%s`", root, root)
-        .ordered()
-        .baselineColumns("ct")
-        .baselineValues(6l)
-        .build()
-        .run();
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
   }
 
-  @Test
-  public void schemaChange() throws Exception {
-    // Verifies that the schema change does not cause a
-    // crash. A pretty minimal test.
-    // TODO: Verify actual results.
-    test("select b from dfs.`vector/complex/writer/schemaChange/`");
+  public interface TestWrapper {
+    void apply() throws Exception;
   }
 
-  @Test
-  public void testFieldSelectionBug() throws Exception {
+  public void runBoth(TestWrapper wrapper) throws Exception {
     try {
-      testBuilder()
-          .sqlQuery("select t.field_4.inner_3 as col_1, t.field_4 as col_2 from cp.`store/json/schema_change_int_to_string.json` t")
-          .unOrdered()
-          .optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
-          .baselineColumns("col_1", "col_2")
-          .baselineValues(
-              mapOf(),
-              mapOf(
-                  "inner_1", listOf(),
-                  "inner_3", mapOf()))
-          .baselineValues(
-              mapOf("inner_object_field_1", "2"),
-              mapOf(
-                  "inner_1", listOf("1", "2", "3"),
-                  "inner_2", "3",
-                  "inner_3", mapOf("inner_object_field_1", "2")))
-          .baselineValues(
-              mapOf(),
-              mapOf(
-                  "inner_1", listOf("4", "5", "6"),
-                  "inner_2", "3",
-                  "inner_3", mapOf()))
-          .go();
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
     } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
+      resetV2Reader();
     }
   }
 
+   @Test
+  public void schemaChange() throws Exception {
+    runBoth(() -> doSchemaChange());
+  }
+
+  private void doSchemaChange() throws Exception {
+    test("select b from dfs.`vector/complex/writer/schemaChange/`");
+  }
+
   @Test
   public void testSplitAndTransferFailure() throws Exception {
+    runBoth(() -> doTestSplitAndTransferFailure());
+  }
+
+  private void doTestSplitAndTransferFailure() throws Exception {
     final String testVal = "a string";
     testBuilder()
         .sqlQuery("select flatten(config) as flat from cp.`store/json/null_list.json`")
@@ -148,6 +131,10 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test // DRILL-1824
   public void schemaChangeValidate() throws Exception {
+    runBoth(() -> doSchemaChangeValidate());
+  }
+
+  private void doSchemaChangeValidate() throws Exception {
     testBuilder()
       .sqlQuery("select b from dfs.`vector/complex/writer/schemaChange/`")
       .unOrdered()
@@ -183,251 +170,7 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
-  @Test
-  public void testReadCompressed() throws Exception {
-    String filepath = "compressed_json.json";
-    File f = new File(dirTestWatcher.getRootDir(), filepath);
-    PrintWriter out = new PrintWriter(f);
-    out.println("{\"a\" :5}");
-    out.close();
-
-    gzipIt(f);
-    testBuilder()
-        .sqlQuery("select * from dfs.`%s.gz`", filepath)
-        .unOrdered()
-        .baselineColumns("a")
-        .baselineValues(5l)
-        .build().run();
-
-    // test reading the uncompressed version as well
-    testBuilder()
-        .sqlQuery("select * from dfs.`%s`", filepath)
-        .unOrdered()
-        .baselineColumns("a")
-        .baselineValues(5l)
-        .build().run();
-  }
-
-  public static void gzipIt(File sourceFile) throws IOException {
-
-    // modified from: http://www.mkyong.com/java/how-to-compress-a-file-in-gzip-format/
-    byte[] buffer = new byte[1024];
-    GZIPOutputStream gzos =
-        new GZIPOutputStream(new FileOutputStream(sourceFile.getPath() + ".gz"));
-
-    FileInputStream in =
-        new FileInputStream(sourceFile);
-
-    int len;
-    while ((len = in.read(buffer)) > 0) {
-      gzos.write(buffer, 0, len);
-    }
-    in.close();
-    gzos.finish();
-    gzos.close();
-  }
-
-  @Test
-  public void testDrill_1419() throws Exception {
-    String[] queries = {"select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`store/json/clicks.json` t limit 5"};
-    long[] rowCounts = {5};
-    String filename = "/store/json/clicks.json";
-    runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-  }
-
-  @Test
-  public void testRepeatedCount() throws Exception {
-    test("select repeated_count(str_list) from cp.`store/json/json_basic_repeated_varchar.json`");
-    test("select repeated_count(INT_col) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_count(FLOAT4_col) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_count(VARCHAR_col) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_count(BIT_col) from cp.`parquet/alltypes_repeated.json`");
-  }
-
-  @Test
-  public void testRepeatedContains() throws Exception {
-    test("select repeated_contains(str_list, 'asdf') from cp.`store/json/json_basic_repeated_varchar.json`");
-    test("select repeated_contains(INT_col, -2147483648) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(BIT_col, true) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(BIT_col, false) from cp.`parquet/alltypes_repeated.json`");
-  }
-
-  @Test
-  public void testSingleColumnRead_vector_fill_bug() throws Exception {
-    String[] queries = {"select * from cp.`store/json/single_column_long_file.json`"};
-    long[] rowCounts = {13512};
-    String filename = "/store/json/single_column_long_file.json";
-    runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-  }
-
-  @Test
-  public void testNonExistentColumnReadAlone() throws Exception {
-    String[] queries = {"select non_existent_column from cp.`store/json/single_column_long_file.json`"};
-    long[] rowCounts = {13512};
-    String filename = "/store/json/single_column_long_file.json";
-    runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-  }
-
-  @Test
-  public void testAllTextMode() throws Exception {
-    try {
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-      String[] queries = {"select * from cp.`store/json/schema_change_int_to_string.json`"};
-      long[] rowCounts = {3};
-      String filename = "/store/json/schema_change_int_to_string.json";
-      runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-    } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void readComplexWithStar() throws Exception {
-    List<QueryDataBatch> results = testSqlWithResults("select * from cp.`store/json/test_complex_read_with_star.json`");
-    assertEquals(1, results.size());
-
-    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
-    QueryDataBatch batch = results.get(0);
-
-    assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
-    assertEquals(3, batchLoader.getSchema().getFieldCount());
-    testExistentColumns(batchLoader);
-
-    batch.release();
-    batchLoader.clear();
-  }
-
-  @Test
-  public void testNullWhereListExpected() throws Exception {
-    try {
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-      String[] queries = {"select * from cp.`store/json/null_where_list_expected.json`"};
-      long[] rowCounts = {3};
-      String filename = "/store/json/null_where_list_expected.json";
-      runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-    }
-    finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void testNullWhereMapExpected() throws Exception {
-    try {
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-      String[] queries = {"select * from cp.`store/json/null_where_map_expected.json`"};
-      long[] rowCounts = {3};
-      String filename = "/store/json/null_where_map_expected.json";
-      runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-    }
-    finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void ensureProjectionPushdown() throws Exception {
-    try {
-      // Tests to make sure that we are correctly eliminating schema changing
-      // columns. If completes, means that the projection pushdown was
-      // successful.
-      test("alter system set `store.json.all_text_mode` = false; "
-          + "select  t.field_1, t.field_3.inner_1, t.field_3.inner_2, t.field_4.inner_1 "
-          + "from cp.`store/json/schema_change_int_to_string.json` t");
-    } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  // The project pushdown rule is correctly adding the projected columns to the
-  // scan, however it is not removing the redundant project operator after the
-  // scan, this tests runs a physical plan generated from one of the tests to
-  // ensure that the project is filtering out the correct data in the scan alone.
-  @Test
-  public void testProjectPushdown() throws Exception {
-    try {
-      String[] queries = {Files.asCharSource(DrillFileUtils.getResourceAsFile(
-          "/store/json/project_pushdown_json_physical_plan.json"), Charsets.UTF_8).read()};
-      String filename = "/store/json/schema_change_int_to_string.json";
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, false);
-      long[] rowCounts = {3};
-      runTestsOnFile(filename, UserBitShared.QueryType.PHYSICAL, queries, rowCounts);
-
-      List<QueryDataBatch> results = testPhysicalWithResults(queries[0]);
-      assertEquals(1, results.size());
-      // "`field_1`", "`field_3`.`inner_1`", "`field_3`.`inner_2`", "`field_4`.`inner_1`"
-
-      RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
-      QueryDataBatch batch = results.get(0);
-      assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
-
-      // this used to be five. It is now four. This is because the plan doesn't
-      // have a project. Scanners are not responsible for projecting non-existent
-      // columns (as long as they project one column)
-      //
-      // That said, the JSON format plugin does claim it can do project
-      // push-down, which means it will ensure columns for any column
-      // mentioned in the project list, in a form consistent with the schema
-      // path. In this case, `non_existent`.`nested`.`field` appears in
-      // the query. But, even more oddly, the missing field is inserted only
-      // if all text mode is true, omitted if all text mode is false.
-      // Seems overly complex.
-      assertEquals(3, batchLoader.getSchema().getFieldCount());
-      testExistentColumns(batchLoader);
-
-      batch.release();
-      batchLoader.clear();
-    } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void testJsonDirectoryWithEmptyFile() throws Exception {
-    testBuilder()
-        .sqlQuery("select * from dfs.`store/json/jsonDirectoryWithEmpyFile`")
-        .unOrdered()
-        .baselineColumns("a")
-        .baselineValues(1l)
-        .build()
-        .run();
-  }
-
-  private void testExistentColumns(RecordBatchLoader batchLoader) throws SchemaChangeException {
-    VectorWrapper<?> vw = batchLoader.getValueAccessorById(
-        RepeatedBigIntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_1")).getFieldIds()
-    );
-    assertEquals("[1]", vw.getValueVector().getAccessor().getObject(0).toString());
-    assertEquals("[5]", vw.getValueVector().getAccessor().getObject(1).toString());
-    assertEquals("[5,10,15]", vw.getValueVector().getAccessor().getObject(2).toString());
-
-    vw = batchLoader.getValueAccessorById(
-        IntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_1")).getFieldIds()
-    );
-    assertNull(vw.getValueVector().getAccessor().getObject(0));
-    assertEquals(2l, vw.getValueVector().getAccessor().getObject(1));
-    assertEquals(5l, vw.getValueVector().getAccessor().getObject(2));
-
-    vw = batchLoader.getValueAccessorById(
-        IntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_2")).getFieldIds()
-    );
-    assertNull(vw.getValueVector().getAccessor().getObject(0));
-    assertNull(vw.getValueVector().getAccessor().getObject(1));
-    assertEquals(3l, vw.getValueVector().getAccessor().getObject(2));
-
-    vw = batchLoader.getValueAccessorById(
-        RepeatedBigIntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_4", "inner_1")).getFieldIds()
-    );
-    assertEquals("[]", vw.getValueVector().getAccessor().getObject(0).toString());
-    assertEquals("[1,2,3]", vw.getValueVector().getAccessor().getObject(1).toString());
-    assertEquals("[4,5,6]", vw.getValueVector().getAccessor().getObject(2).toString());
-  }
+  // TODO: Union not yet supported in V2.
 
   @Test
   public void testSelectStarWithUnionType() throws Exception {
@@ -481,6 +224,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSelectFromListWithCase() throws Exception {
     try {
@@ -498,6 +243,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testTypeCase() throws Exception {
     try {
@@ -518,6 +265,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSumWithTypeCase() throws Exception {
     try {
@@ -536,6 +285,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testUnionExpressionMaterialization() throws Exception {
     try {
@@ -553,6 +304,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSumMultipleBatches() throws Exception {
     File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_batch"));
@@ -577,6 +330,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSumFilesWithDifferentSchema() throws Exception {
     File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_file"));
@@ -606,6 +361,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // V1 version of the test. See TsetJsonReaderQueries for the V2 version.
+
   @Test
   public void drill_4032() throws Exception {
     File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4032"));
@@ -625,19 +382,23 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test
   public void drill_4479() throws Exception {
-    try {
-      File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
-      table_dir.mkdir();
-      BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
-      // Create an entire batch of null values for 3 columns
-      for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
-        os.write("{\"a\": null, \"b\": null, \"c\": null}".getBytes());
-      }
-      // Add a row with {bigint,  float, string} values
-      os.write("{\"a\": 123456789123, \"b\": 99.999, \"c\": \"Hello World\"}".getBytes());
-      os.flush();
-      os.close();
+    File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
+    table_dir.mkdir();
+    BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
+    // Create an entire batch of null values for 3 columns
+    for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
+      os.write("{\"a\": null, \"b\": null, \"c\": null}".getBytes());
+    }
+    // Add a row with {bigint,  float, string} values
+    os.write("{\"a\": 123456789123, \"b\": 99.999, \"c\": \"Hello World\"}".getBytes());
+    os.flush();
+    os.close();
+
+    runBoth(() -> doDrill_4479());
+  }
 
+  private void doDrill_4479() throws Exception {
+    try {
       testBuilder()
         .sqlQuery("select c, count(*) as cnt from dfs.tmp.drill_4479 t group by c")
         .ordered()
@@ -675,6 +436,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
     }
 
+    runBoth(() -> doTestFlattenEmptyArrayWithAllTextMode());
+  }
+
+  private void doTestFlattenEmptyArrayWithAllTextMode() throws Exception {
     try {
       String query = "select flatten(t.a.b.c) as c from dfs.`empty_array_all_text_mode.json` t";
 
@@ -703,6 +468,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
     }
 
+    runBoth(() -> doTestFlattenEmptyArrayWithUnionType());
+  }
+
+  private void doTestFlattenEmptyArrayWithUnionType() throws Exception {
     try {
       String query = "select flatten(t.a.b.c) as c from dfs.`empty_array.json` t";
 
@@ -734,6 +503,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{\"rk\": \"a\", \"m\": {\"a\":\"1\"}}");
     }
 
+    runBoth(() -> doTestKvgenWithUnionAll(fileName));
+  }
+
+  private void doTestKvgenWithUnionAll(String fileName) throws Exception {
     String query = String.format("select kvgen(m) as res from (select m from dfs.`%s` union all " +
         "select convert_from('{\"a\" : null}' ,'json') as m from (values(1)))", fileName);
     assertEquals("Row count should match", 2, testSql(query));
@@ -746,6 +519,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
     }
 
+    runBoth(() -> doTestFieldWithDots(fileName));
+  }
+
+  private void doTestFieldWithDots(String fileName) throws Exception {
     testBuilder()
       .sqlQuery("select t.m.`a.b` as a,\n" +
         "t.m.a.b as b,\n" +
@@ -759,6 +536,8 @@ public class TestJsonReader extends BaseTestQuery {
       .go();
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test // DRILL-6020
   public void testUntypedPathWithUnion() throws Exception {
     String fileName = "table.json";
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java
index 0326612428..cb67b2c8d7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.test.rowSet.test;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.memory.RootAllocator;
@@ -32,7 +32,7 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRowSetComparison extends BaseTest {
   private BufferAllocator allocator;
 
diff --git a/exec/vector/src/main/codegen/templates/HolderReaderImpl.java b/exec/vector/src/main/codegen/templates/HolderReaderImpl.java
index dd0c3f8a3f..62b2fd44c8 100644
--- a/exec/vector/src/main/codegen/templates/HolderReaderImpl.java
+++ b/exec/vector/src/main/codegen/templates/HolderReaderImpl.java
@@ -100,9 +100,9 @@ public class ${holderMode}${name}HolderReaderImpl extends AbstractFieldReader {
     return BasicTypeHelper.getType(holder);
 <#else>
   <#if holderMode == "Repeated">
-    return repeatedHolder.TYPE;
+    return ${holderMode}${name}Holder.TYPE;
   <#else>
-    return holder.TYPE;
+    return ${nullMode}${name}Holder.TYPE;
   </#if>
 </#if>
   }
@@ -311,6 +311,7 @@ public void copyAsField(String name, MapWriter writer) {
 </#if>
     impl.vector.getMutator().setSafe(impl.idx(), repeatedHolder);
   }
+
 <#else>
   <#if !(minor.class == "Decimal9" || minor.class == "Decimal18")>
   public void copyAsValue(${minor.class?cap_first}Writer writer) {
@@ -318,7 +319,6 @@ public void copyAsField(String name, MapWriter writer) {
       writer.write${minor.class}(<#list fields as field>holder.${field.name}<#if field_has_next>, </#if></#list>);
     }
   }
-
     <#if minor.class == "VarDecimal">
   public void copyAsField(String name, MapWriter writer, int precision, int scale) {
     ${minor.class?cap_first}Writer impl = writer.${lowerName}(name, precision, scale);
@@ -330,6 +330,7 @@ public void copyAsField(String name, MapWriter writer) {
       impl.write${minor.class}(<#list fields as field>holder.${field.name}<#if field_has_next>,</#if></#list>);
     }
   }
+
   </#if>
 </#if>
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
index 5bd6a7e5e4..98b5ab5cce 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
@@ -105,7 +105,5 @@ public class SingleMapReaderImpl extends AbstractFieldReader {
     SingleMapWriter impl = (SingleMapWriter) writer.map(name);
     impl.container.copyFromSafe(idx(), impl.idx(), vector);
   }
-
-
 }
 
diff --git a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
index 6ffd170993..1e889da2cb 100644
--- a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
+++ b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.expr.BasicTypeHelper;
@@ -34,7 +34,7 @@ import org.apache.drill.test.BaseTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestMetadataProperties extends BaseTest {
 
   @Test
diff --git a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
index f8e489b6c8..c66dd92673 100644
--- a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
+++ b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
@@ -32,7 +32,7 @@ import java.util.List;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.metadata.ColumnMetadata.StructureType;
@@ -43,7 +43,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test the tuple and column metadata, including extended attributes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestTupleSchema extends BaseTest {
 
   /**