You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by vi...@apache.org on 2022/04/27 09:47:26 UTC

[drill] branch master updated (7c3732320b -> ead453c984)

This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git


    from 7c3732320b DRILL-8155: Introduce New Plugin Authentication Modes (#2516)
     new b464b9991a DRILL-6953: Merge row set-based JSON reader
     new e98793badd DRILL-8037: Add V2 JSON Format Plugin based on EVF
     new ead453c984 DRILL-8195: Add Timestamp Zone offset ISO-8601 format for JSON EVF

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../drill/common/util/DrillDateTimeFormatter.java  |  22 +-
 .../{RowSetTests.java => RowSetTest.java}          |   2 +-
 .../drill/exec/store/esri/ShpFormatPlugin.java     |   7 +-
 .../exec/store/esri/TestShapefileFormatPlugin.java |   4 +-
 .../drill/exec/store/excel/ExcelFormatPlugin.java  |   6 +-
 .../drill/exec/store/excel/TestExcelFormat.java    |   4 +-
 .../drill/exec/store/hdf5/HDF5FormatPlugin.java    |   5 +-
 .../drill/exec/store/hdf5/TestHDF5Format.java      |   4 +-
 .../exec/store/httpd/HttpdLogFormatPlugin.java     |   1 -
 .../drill/exec/store/httpd/TestHTTPDLogReader.java |   4 +-
 .../store/httpd/TestHTTPDLogReaderUserAgent.java   |   4 +-
 .../drill/exec/store/image/ImageFormatPlugin.java  |   8 +-
 .../exec/store/image/TestImageRecordReader.java    |   6 +-
 .../store/pcap/plugin/BasePcapFormatPlugin.java    |   6 +-
 .../drill/exec/store/pcap/TestPcapEVFReader.java   |   4 +-
 .../exec/store/pcapng/TestPcapngRecordReader.java  |   4 +-
 .../store/pcapng/TestPcapngStatRecordReader.java   |   4 +-
 .../drill/exec/store/pdf/PdfFormatPlugin.java      |   7 +-
 .../apache/drill/exec/store/pdf/TestPdfFormat.java |   4 +-
 .../drill/exec/store/sas/SasFormatPlugin.java      |   7 +-
 .../apache/drill/exec/store/sas/TestSasReader.java |   4 +-
 .../drill/exec/store/spss/SpssFormatPlugin.java    |   7 +-
 .../drill/exec/store/spss/TestSpssReader.java      |   4 +-
 .../exec/store/syslog/SyslogFormatPlugin.java      |   7 +-
 .../drill/exec/store/syslog/TestSyslogFormat.java  |   4 +-
 .../apache/drill/exec/store/xml/TestXMLReader.java |   4 +-
 .../exec/store/phoenix/PhoenixCommandTest.java     |   4 +-
 .../exec/store/phoenix/PhoenixDataTypeTest.java    |   4 +-
 .../drill/exec/store/phoenix/PhoenixSQLTest.java   |   4 +-
 .../phoenix/secured/SecuredPhoenixCommandTest.java |   4 +-
 .../secured/SecuredPhoenixDataTypeTest.java        |   4 +-
 .../phoenix/secured/SecuredPhoenixSQLTest.java     |   4 +-
 .../phoenix/secured/SecuredPhoenixTestSuite.java   |   4 +-
 docs/dev/{JUnit.md => JUnit4.md}                   |   0
 docs/dev/Testing.md                                |   2 +-
 .../java/org/apache/drill/exec/ExecConstants.java  |   3 +
 .../org/apache/drill/exec/client/DrillClient.java  |   2 +-
 .../drill/exec/expr/fn/impl/TypeFunctions.java     |   1 -
 .../org/apache/drill/exec/ops/OperatorStats.java   |   8 +-
 .../physical/impl/common/HashTableTemplate.java    |   4 +-
 .../impl/protocol/VectorContainerAccessor.java     |  12 +-
 .../exec/physical/impl/scan/ScanOperatorExec.java  |   3 +-
 .../impl/scan/file/FileMetadataColumnsParser.java  |  11 +-
 .../scan/project/ExplicitSchemaProjection.java     |  73 ++-
 .../impl/scan/project/ReaderLevelProjection.java   |   3 +-
 .../scan/project/ReaderSchemaOrchestrator.java     |  11 +-
 .../physical/impl/scan/project/ResolvedTuple.java  |  22 +-
 .../impl/scan/project/ScanLevelProjection.java     |   6 +-
 .../physical/impl/validate/BatchValidator.java     |  10 +-
 .../physical/resultSet/impl/ProjectionFilter.java  |  12 +-
 .../resultSet/impl/ResultSetLoaderImpl.java        |   2 +-
 .../physical/resultSet/impl/SingleVectorState.java |  55 +-
 .../model/single/SimpleReaderBuilder.java          |  18 +-
 .../resultSet/project/ProjectionChecker.java       |   5 +-
 .../planner/sql/handlers/CreateTableHandler.java   |   3 +-
 .../org/apache/drill/exec/record/BatchSchema.java  |  66 ++-
 .../exec/record/VectorAccessibleUtilities.java     |   2 +-
 .../apache/drill/exec/record/VectorContainer.java  |   2 +-
 .../exec/record/selection/SelectionVector2.java    |  28 +-
 .../drill/exec/rpc/user/QueryResultHandler.java    |   6 +-
 .../exec/server/options/SystemOptionManager.java   |   1 +
 .../org/apache/drill/exec/store/RecordReader.java  |   3 +
 .../exec/store/dfs/WorkspaceSchemaFactory.java     |   6 +-
 .../exec/store/dfs/easy/EasyFormatPlugin.java      |  18 +-
 .../drill/exec/store/dfs/easy/EasyGroupScan.java   |   6 +-
 .../exec/store/dfs/easy/EvfV1ScanBuilder.java      |  10 +-
 .../exec/store/easy/json/JSONFormatConfig.java     | 123 ++++
 .../exec/store/easy/json/JSONFormatPlugin.java     | 187 +++---
 .../exec/store/easy/json/JSONRecordReader.java     |  11 +-
 .../exec/store/easy/json/JsonBatchReader.java      |  90 +++
 .../store/easy/json/loader/BaseFieldFactory.java   |  39 +-
 .../exec/store/easy/json/loader/FieldDefn.java     |   6 +-
 .../easy/json/loader/InferredFieldFactory.java     |  29 +-
 .../store/easy/json/loader/JsonLoaderImpl.java     |   3 +-
 .../exec/store/easy/json/loader/TupleParser.java   |   9 +-
 .../easy/json/parser/JsonStructureParser.java      |  35 +-
 .../store/easy/json/parser/JsonValueParser.java    |   4 +-
 .../store/easy/json/parser/ObjectValueParser.java  |   1 +
 .../store/easy/json/values/DateValueListener.java  |   6 +-
 .../json/values/UtcTimestampValueListener.java     |   6 +-
 .../drill/exec/store/log/LogFormatPlugin.java      |   5 +-
 .../exec/vector/complex/fn/ExtendedJsonOutput.java |   2 -
 .../drill/exec/vector/complex/fn/ExtendedType.java |   3 -
 .../exec/vector/complex/fn/ExtendedTypeName.java   |  23 +-
 .../exec/vector/complex/fn/FieldSelection.java     |   1 -
 .../drill/exec/vector/complex/fn/JsonReader.java   |  14 +-
 .../drill/exec/vector/complex/fn/JsonWriter.java   |  47 +-
 .../drill/exec/vector/complex/fn/VectorOutput.java |  84 ++-
 .../java-exec/src/main/resources/drill-module.conf |   1 +
 .../java/org/apache/drill/TestFrameworkTest.java   |  40 +-
 .../java/org/apache/drill/TestStarQueries.java     | 212 ++++---
 .../org/apache/drill/exec/TestEmptyInputSql.java   |  19 +
 .../drill/exec/expr/fn/impl/TestTypeFns.java       |  47 +-
 .../TestMetastoreWithEasyFormatPlugin.java         |  63 +-
 .../physical/impl/TopN/TestTopNSchemaChanges.java  | 111 ++--
 .../impl/join/TestMergeJoinWithSchemaChanges.java  |  16 +-
 .../impl/lateraljoin/TestE2EUnnestAndLateral.java  |  28 +-
 .../impl/protocol/TestOperatorRecordBatch.java     |  69 +--
 .../exec/physical/impl/scan/TestColumnsArray.java  |   4 +-
 .../impl/scan/TestColumnsArrayFramework.java       |   4 +-
 .../physical/impl/scan/TestColumnsArrayParser.java |   4 +-
 .../physical/impl/scan/TestFileScanFramework.java  |   4 +-
 .../impl/scan/TestImplicitColumnParser.java        |   4 +-
 .../impl/scan/TestImplicitColumnProjection.java    |   4 +-
 .../physical/impl/scan/TestScanBatchWriters.java   |   4 +-
 .../physical/impl/scan/TestScanOperExecBasics.java |   4 +-
 .../impl/scan/TestScanOperExecEarlySchema.java     |   4 +-
 .../impl/scan/TestScanOperExecLateSchema.java      |   4 +-
 .../impl/scan/TestScanOperExecOuputSchema.java     |   4 +-
 .../impl/scan/TestScanOperExecOverflow.java        |   4 +-
 .../impl/scan/TestScanOperExecSmoothing.java       |   4 +-
 .../impl/scan/TestScanOrchestratorEarlySchema.java |   4 +-
 .../scan/TestScanOrchestratorImplicitColumns.java  |   4 +-
 .../impl/scan/TestScanOrchestratorLateSchema.java  |   4 +-
 .../impl/scan/convert/TestColumnConverter.java     |   4 +-
 .../scan/project/TestConstantColumnLoader.java     |   4 +-
 .../impl/scan/project/TestNullColumnLoader.java    |   4 +-
 .../scan/project/TestReaderLevelProjection.java    |   8 +-
 .../impl/scan/project/TestRowBatchMerger.java      |   4 +-
 .../impl/scan/project/TestSchemaSmoothing.java     |   4 +-
 .../physical/impl/validate/TestBatchValidator.java |   4 +-
 .../exec/physical/impl/xsort/TestExternalSort.java |  42 +-
 .../impl/TestResultSetLoaderDictArray.java         |   4 +-
 .../resultSet/impl/TestResultSetLoaderDicts.java   |   4 +-
 .../impl/TestResultSetLoaderEmptyProject.java      |   4 +-
 .../resultSet/impl/TestResultSetLoaderLimits.java  |   4 +-
 .../impl/TestResultSetLoaderMapArray.java          |   4 +-
 .../resultSet/impl/TestResultSetLoaderMaps.java    |   4 +-
 .../impl/TestResultSetLoaderOmittedValues.java     |   9 +-
 .../impl/TestResultSetLoaderOverflow.java          |   4 +-
 .../impl/TestResultSetLoaderProtocol.java          |   4 +-
 .../impl/TestResultSetLoaderRepeatedList.java      |   4 +-
 .../resultSet/impl/TestResultSetLoaderTorture.java |   4 +-
 .../resultSet/impl/TestResultSetLoaderUnions.java  |   4 +-
 .../resultSet/impl/TestResultSetSchemaChange.java  |   4 +-
 .../resultSet/impl/TestResultVectorCache.java      |   4 +-
 .../resultSet/project/TestTupleProjection.java     |   4 +-
 .../exec/physical/rowSet/TestDummyWriter.java      |   4 +-
 .../exec/physical/rowSet/TestFillEmpties.java      |   4 +-
 .../exec/physical/rowSet/TestFixedWidthWriter.java |   4 +-
 .../physical/rowSet/TestHyperVectorReaders.java    |   4 +-
 .../exec/physical/rowSet/TestIndirectReaders.java  |   4 +-
 .../exec/physical/rowSet/TestMapAccessors.java     |   4 +-
 .../physical/rowSet/TestOffsetVectorWriter.java    |   4 +-
 .../physical/rowSet/TestRepeatedListAccessors.java |   4 +-
 .../drill/exec/physical/rowSet/TestRowSet.java     |   4 +-
 .../exec/physical/rowSet/TestScalarAccessors.java  |   4 +-
 .../exec/physical/rowSet/TestSchemaBuilder.java    |   4 +-
 .../physical/rowSet/TestVariableWidthWriter.java   |   4 +-
 .../exec/physical/rowSet/TestVariantAccessors.java |   4 +-
 .../drill/exec/server/rest/TestRestJson.java       |   2 +-
 .../org/apache/drill/exec/sql/TestAnalyze.java     |   9 +-
 .../drill/exec/sql/TestMetastoreCommands.java      | 165 +++---
 .../drill/exec/store/DropboxFileSystemTest.java    |   2 +-
 .../drill/exec/store/TestImplicitFileColumns.java  |   4 +-
 .../store/easy/json/loader/TestRepeatedList.java   |   2 +-
 .../exec/store/easy/json/loader/TestUnknowns.java  |   2 +-
 .../drill/exec/store/json/BaseTestJsonReader.java  |  60 ++
 .../json}/TestJsonEscapeAnyChar.java               |  32 +-
 .../drill/exec/store/json/TestJsonModes.java       |  12 +-
 .../writer => store/json}/TestJsonNanInf.java      | 161 ++++--
 .../writer => store/json}/TestJsonReader.java      | 444 ++++-----------
 .../drill/exec/store/json/TestJsonReaderFns.java   | 268 +++++++++
 .../exec/store/json/TestJsonReaderQueries.java     | 633 +++++++++++++++++++++
 .../TestJsonReaderWithSchema.java}                 |  38 +-
 .../exec/store/json/TestJsonRecordReader.java      | 283 ++++++---
 .../drill/exec/store/json/TestJsonScanOp.java      | 271 +++++++++
 .../apache/drill/exec/store/log/TestLogReader.java |   4 +-
 .../drill/exec/store/mock/TestMockPlugin.java      |   4 +-
 .../drill/exec/store/mock/TestMockRowReader.java   |   4 +-
 .../exec/store/parquet/TestVarlenDecimal.java      |   4 -
 .../store/sequencefile/TestSequenceFileReader.java |   4 +-
 .../complex/writer/TestComplexTypeWriter.java      |  27 +-
 .../vector/complex/writer/TestExtendedTypes.java   |  61 +-
 .../java/org/apache/drill/test/ClusterFixture.java |   4 +-
 .../java/org/apache/drill/test/ClusterTest.java    |   2 +-
 .../java/org/apache/drill/test/TestBuilder.java    |  10 +
 .../test/rowSet/test/TestRowSetComparison.java     |   4 +-
 exec/java-exec/src/test/resources/rest/cust20.json |  44 +-
 exec/java-exec/src/test/resources/rest/small.json  |  24 +-
 .../main/codegen/templates/HolderReaderImpl.java   |   7 +-
 .../record/metadata/AbstractColumnMetadata.java    |   8 +-
 .../drill/exec/record/metadata/ColumnMetadata.java |   1 +
 .../drill/exec/record/metadata/MetadataUtils.java  |  21 +-
 .../record/metadata/PrimitiveColumnMetadata.java   |  17 +
 .../apache/drill/exec/vector/NullableVector.java   |   2 +-
 .../accessor/writer/AbstractTupleWriter.java       |   6 +-
 .../vector/complex/impl/SingleMapReaderImpl.java   |   2 -
 .../record/metadata/TestMetadataProperties.java    |   4 +-
 .../exec/record/metadata/TestTupleSchema.java      |   4 +-
 190 files changed, 3211 insertions(+), 1619 deletions(-)
 copy contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/config/HBasePStoreProvider.java => common/src/main/java/org/apache/drill/common/util/DrillDateTimeFormatter.java (55%)
 rename common/src/test/java/org/apache/drill/categories/{RowSetTests.java => RowSetTest.java} (97%)
 rename docs/dev/{JUnit.md => JUnit4.md} (100%)
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatConfig.java
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
 create mode 100644 exec/java-exec/src/test/java/org/apache/drill/exec/store/json/BaseTestJsonReader.java
 rename exec/java-exec/src/test/java/org/apache/drill/exec/{vector/complex/writer => store/json}/TestJsonEscapeAnyChar.java (76%)
 rename exec/java-exec/src/test/java/org/apache/drill/exec/{vector/complex/writer => store/json}/TestJsonNanInf.java (80%)
 rename exec/java-exec/src/test/java/org/apache/drill/exec/{vector/complex/writer => store/json}/TestJsonReader.java (58%)
 create mode 100644 exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
 create mode 100644 exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
 copy exec/java-exec/src/test/java/org/apache/drill/exec/store/{enumerable/EnumPluginTest.java => json/TestJsonReaderWithSchema.java} (50%)
 create mode 100644 exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonScanOp.java


[drill] 02/03: DRILL-8037: Add V2 JSON Format Plugin based on EVF

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit e98793badd27272316e60ea2a1ea1c230bff7036
Author: Vitalii Diravka <vi...@apache.org>
AuthorDate: Fri Nov 5 04:21:40 2021 +0200

    DRILL-8037: Add V2 JSON Format Plugin based on EVF
    
    * Enable store.json.enable_v2_reader by default
    * Fix TestJsonReader doulbe quotes test cases. Update jackson 2.12.1 -> 2.13.0
    * Disable V2 for experimental UNION datatype
    * Fix regressions
    * Fix json Schema Provision (it wasn't provided for JsonLoaderBuilder). The previous schema provision was a fake, the reader schema was infered from the json content. It fixes the scan and reader schema validation. And it starts to apply the provided schema to ANALYZE COMMANDS, fixed TestMetastoreWithEasyFormatPlugin#testAnalyzeOnJsonTable
---
 .../drill/exec/store/esri/ShpFormatPlugin.java     |   7 +-
 .../drill/exec/store/excel/ExcelFormatPlugin.java  |   6 +-
 .../drill/exec/store/hdf5/HDF5FormatPlugin.java    |   5 +-
 .../exec/store/httpd/HttpdLogFormatPlugin.java     |   1 -
 .../drill/exec/store/image/ImageFormatPlugin.java  |   8 +-
 .../store/pcap/plugin/BasePcapFormatPlugin.java    |   6 +-
 .../drill/exec/store/pdf/PdfFormatPlugin.java      |   7 +-
 .../drill/exec/store/sas/SasFormatPlugin.java      |   7 +-
 .../drill/exec/store/spss/SpssFormatPlugin.java    |   7 +-
 .../exec/store/syslog/SyslogFormatPlugin.java      |   7 +-
 docs/dev/{JUnit.md => JUnit4.md}                   |   0
 docs/dev/Testing.md                                |   2 +-
 .../org/apache/drill/exec/client/DrillClient.java  |   2 +-
 .../physical/impl/common/HashTableTemplate.java    |   4 +-
 .../exec/physical/impl/scan/ScanOperatorExec.java  |   3 +-
 .../impl/scan/file/FileMetadataColumnsParser.java  |  11 +-
 .../scan/project/ExplicitSchemaProjection.java     |  71 ++++----
 .../impl/scan/project/ReaderLevelProjection.java   |   3 +-
 .../scan/project/ReaderSchemaOrchestrator.java     |  11 +-
 .../physical/impl/scan/project/ResolvedTuple.java  |  22 +--
 .../impl/scan/project/ScanLevelProjection.java     |   6 +-
 .../physical/impl/validate/BatchValidator.java     |   4 +-
 .../physical/resultSet/impl/ProjectionFilter.java  |  12 +-
 .../resultSet/impl/ResultSetLoaderImpl.java        |   2 +-
 .../physical/resultSet/impl/SingleVectorState.java |   2 +-
 .../resultSet/project/ProjectionChecker.java       |   5 +-
 .../planner/sql/handlers/CreateTableHandler.java   |   3 +-
 .../exec/record/VectorAccessibleUtilities.java     |   2 +-
 .../org/apache/drill/exec/store/RecordReader.java  |   3 +
 .../exec/store/dfs/WorkspaceSchemaFactory.java     |   6 +-
 .../exec/store/dfs/easy/EasyFormatPlugin.java      |  13 +-
 .../drill/exec/store/dfs/easy/EasyGroupScan.java   |   6 +-
 .../exec/store/dfs/easy/EvfV1ScanBuilder.java      |  10 +-
 .../exec/store/easy/json/JSONFormatConfig.java     | 123 ++++++++++++++
 .../exec/store/easy/json/JSONFormatPlugin.java     | 187 ++++++++-------------
 .../exec/store/easy/json/JSONRecordReader.java     |   6 +-
 .../exec/store/easy/json/JsonBatchReader.java      |   5 +
 .../store/easy/json/loader/BaseFieldFactory.java   |  38 ++---
 .../exec/store/easy/json/loader/FieldDefn.java     |   6 +-
 .../easy/json/loader/InferredFieldFactory.java     |  29 ++--
 .../store/easy/json/loader/JsonLoaderImpl.java     |   3 +-
 .../exec/store/easy/json/loader/TupleParser.java   |   9 +-
 .../easy/json/parser/JsonStructureParser.java      |  35 ++--
 .../store/easy/json/parser/JsonValueParser.java    |   4 +-
 .../store/easy/json/parser/ObjectValueParser.java  |   1 +
 .../store/easy/json/values/DateValueListener.java  |   6 +-
 .../drill/exec/store/log/LogFormatPlugin.java      |   3 +-
 .../drill/exec/vector/complex/fn/JsonReader.java   |  14 +-
 .../drill/exec/vector/complex/fn/VectorOutput.java |  23 ++-
 .../java/org/apache/drill/TestFrameworkTest.java   |  40 +++--
 .../java/org/apache/drill/TestStarQueries.java     |   1 -
 .../drill/exec/expr/fn/impl/TestTypeFns.java       |  54 +++---
 .../TestMetastoreWithEasyFormatPlugin.java         |  63 +------
 .../physical/impl/TopN/TestTopNSchemaChanges.java  | 111 ++++++------
 .../impl/join/TestMergeJoinWithSchemaChanges.java  |  16 +-
 .../impl/lateraljoin/TestE2EUnnestAndLateral.java  |  28 ++-
 .../scan/project/TestReaderLevelProjection.java    |   4 +-
 .../exec/physical/impl/xsort/TestExternalSort.java |  42 +++--
 .../impl/TestResultSetLoaderProtocol.java          |   3 -
 .../drill/exec/server/rest/TestRestJson.java       |   2 +-
 .../drill/exec/sql/TestMetastoreCommands.java      | 165 +++++++++---------
 .../drill/exec/store/DropboxFileSystemTest.java    |   2 +-
 .../drill/exec/store/TestImplicitFileColumns.java  |   4 +-
 .../store/easy/json/loader/TestRepeatedList.java   |   2 +-
 .../exec/store/easy/json/loader/TestUnknowns.java  |   2 +-
 .../json}/TestJsonEscapeAnyChar.java               |  12 +-
 .../drill/exec/store/json/TestJsonModes.java       |   8 +
 .../writer => store/json}/TestJsonNanInf.java      | 102 +++++------
 .../writer => store/json}/TestJsonReader.java      |  59 ++++---
 .../drill/exec/store/json/TestJsonReaderFns.java   |  25 ++-
 .../exec/store/json/TestJsonReaderQueries.java     |  13 +-
 .../exec/store/json/TestJsonReaderWithSchema.java  |  25 +++
 .../exec/store/json/TestJsonRecordReader.java      | 141 +++++++++-------
 .../exec/store/parquet/TestVarlenDecimal.java      |   4 -
 .../vector/complex/writer/TestExtendedTypes.java   |   4 +-
 .../java/org/apache/drill/test/ClusterFixture.java |   4 +-
 .../java/org/apache/drill/test/ClusterTest.java    |   2 +-
 .../java/org/apache/drill/test/TestBuilder.java    |  10 ++
 exec/java-exec/src/test/resources/rest/cust20.json |  44 ++---
 exec/java-exec/src/test/resources/rest/small.json  |  24 +--
 .../record/metadata/AbstractColumnMetadata.java    |   8 +-
 .../drill/exec/record/metadata/ColumnMetadata.java |   1 +
 .../drill/exec/record/metadata/MetadataUtils.java  |  21 ++-
 .../record/metadata/PrimitiveColumnMetadata.java   |  17 ++
 .../apache/drill/exec/vector/NullableVector.java   |   2 +-
 .../accessor/writer/AbstractTupleWriter.java       |   6 +-
 86 files changed, 971 insertions(+), 856 deletions(-)

diff --git a/contrib/format-esri/src/main/java/org/apache/drill/exec/store/esri/ShpFormatPlugin.java b/contrib/format-esri/src/main/java/org/apache/drill/exec/store/esri/ShpFormatPlugin.java
index 1d84491058..5023ff61ce 100644
--- a/contrib/format-esri/src/main/java/org/apache/drill/exec/store/esri/ShpFormatPlugin.java
+++ b/contrib/format-esri/src/main/java/org/apache/drill/exec/store/esri/ShpFormatPlugin.java
@@ -17,7 +17,6 @@
  */
 package org.apache.drill.exec.store.esri;
 
-import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.Types;
@@ -27,7 +26,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchem
 import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileReaderFactory;
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.hadoop.conf.Configuration;
@@ -55,12 +54,12 @@ public class ShpFormatPlugin extends EasyFormatPlugin<ShpFormatConfig> {
   }
 
   @Override
-  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionManager options) throws ExecutionSetupException {
+  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionSet options) {
     return new ShpBatchReader(scan.getMaxRecords());
   }
 
   @Override
-  protected FileScanFramework.FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanFramework.FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanFramework.FileScanBuilder builder = new FileScanFramework.FileScanBuilder();
     builder.setReaderFactory(new ShpReaderFactory(scan.getMaxRecords()));
     initScanBuilder(builder, scan);
diff --git a/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatPlugin.java b/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatPlugin.java
index 063f1f7a96..e37027bb61 100644
--- a/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatPlugin.java
+++ b/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatPlugin.java
@@ -28,7 +28,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchem
 
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.hadoop.conf.Configuration;
@@ -80,12 +80,12 @@ public class ExcelFormatPlugin extends EasyFormatPlugin<ExcelFormatConfig> {
 
   @Override
   public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(
-    EasySubScan scan, OptionManager options) {
+    EasySubScan scan, OptionSet options) {
     return new ExcelBatchReader(formatConfig.getReaderConfig(this), scan.getMaxRecords());
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanBuilder builder = new FileScanBuilder();
     ExcelReaderConfig readerConfig = new ExcelReaderConfig(this);
 
diff --git a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5FormatPlugin.java b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5FormatPlugin.java
index 5551af50f7..d56ffa462b 100644
--- a/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5FormatPlugin.java
+++ b/contrib/format-hdf5/src/main/java/org/apache/drill/exec/store/hdf5/HDF5FormatPlugin.java
@@ -26,14 +26,13 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileScanB
 
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.hdf5.HDF5BatchReader.HDF5ReaderConfig;
 
 
-
 public class HDF5FormatPlugin extends EasyFormatPlugin<HDF5FormatConfig> {
 
   public static final String DEFAULT_NAME = "hdf5";
@@ -62,7 +61,7 @@ public class HDF5FormatPlugin extends EasyFormatPlugin<HDF5FormatConfig> {
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanBuilder builder = new FileScanBuilder();
 
     builder.setReaderFactory(new HDF5ReaderFactory(new HDF5BatchReader.HDF5ReaderConfig(this, formatConfig), scan.getMaxRecords()));
diff --git a/contrib/format-httpd/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java b/contrib/format-httpd/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
index 5c04fa3247..282859a5e4 100644
--- a/contrib/format-httpd/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
+++ b/contrib/format-httpd/src/main/java/org/apache/drill/exec/store/httpd/HttpdLogFormatPlugin.java
@@ -29,7 +29,6 @@ import org.apache.drill.exec.physical.impl.scan.v3.file.FileScanLifecycleBuilder
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
-import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin.ScanFrameworkVersion;
 import org.apache.hadoop.conf.Configuration;
 
 public class HttpdLogFormatPlugin extends EasyFormatPlugin<HttpdLogFormatConfig> {
diff --git a/contrib/format-image/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java b/contrib/format-image/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java
index 977faecdba..f3bd1ec527 100644
--- a/contrib/format-image/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java
+++ b/contrib/format-image/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java
@@ -26,7 +26,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileScanB
 import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin.ScanFrameworkVersion;
@@ -74,13 +74,13 @@ public class ImageFormatPlugin extends EasyFormatPlugin<ImageFormatConfig> {
   }
 
   @Override
-  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionManager options)
+  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionSet options)
       throws ExecutionSetupException {
     return new ImageBatchReader(formatConfig, scan);
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan)
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options)
       throws ExecutionSetupException {
     FileScanBuilder builder = new FileScanBuilder();
     builder.setReaderFactory(new ImageReaderFactory(formatConfig, scan));
@@ -89,4 +89,4 @@ public class ImageFormatPlugin extends EasyFormatPlugin<ImageFormatConfig> {
     builder.nullType(Types.optional(MinorType.VARCHAR));
     return builder;
   }
-}
\ No newline at end of file
+}
diff --git a/contrib/format-pcapng/src/main/java/org/apache/drill/exec/store/pcap/plugin/BasePcapFormatPlugin.java b/contrib/format-pcapng/src/main/java/org/apache/drill/exec/store/pcap/plugin/BasePcapFormatPlugin.java
index 836c254ed9..87b7c9562f 100644
--- a/contrib/format-pcapng/src/main/java/org/apache/drill/exec/store/pcap/plugin/BasePcapFormatPlugin.java
+++ b/contrib/format-pcapng/src/main/java/org/apache/drill/exec/store/pcap/plugin/BasePcapFormatPlugin.java
@@ -27,7 +27,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchem
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedScanFramework;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
@@ -111,7 +111,7 @@ public abstract class BasePcapFormatPlugin<T extends PcapFormatConfig> extends E
   }
 
   @Override
-  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionManager options) {
+  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionSet options) {
     return createReader(scan, formatConfig);
   }
 
@@ -125,7 +125,7 @@ public abstract class BasePcapFormatPlugin<T extends PcapFormatConfig> extends E
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanBuilder builder = new FileScanBuilder();
     builder.setReaderFactory(new PcapReaderFactory(formatConfig, scan));
 
diff --git a/contrib/format-pdf/src/main/java/org/apache/drill/exec/store/pdf/PdfFormatPlugin.java b/contrib/format-pdf/src/main/java/org/apache/drill/exec/store/pdf/PdfFormatPlugin.java
index 53fb4870bc..380653d66d 100644
--- a/contrib/format-pdf/src/main/java/org/apache/drill/exec/store/pdf/PdfFormatPlugin.java
+++ b/contrib/format-pdf/src/main/java/org/apache/drill/exec/store/pdf/PdfFormatPlugin.java
@@ -26,7 +26,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileScanB
 import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin.ScanFrameworkVersion;
@@ -74,13 +74,12 @@ public class PdfFormatPlugin extends EasyFormatPlugin<PdfFormatConfig> {
   }
 
   @Override
-  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(
-    EasySubScan scan, OptionManager options) {
+  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionSet options) {
     return new PdfBatchReader(formatConfig.getReaderConfig(this), scan.getMaxRecords());
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanBuilder builder = new FileScanBuilder();
     PdfBatchReader.PdfReaderConfig readerConfig = new PdfBatchReader.PdfReaderConfig(this);
     builder.setReaderFactory(new PdfReaderFactory(readerConfig, scan.getMaxRecords()));
diff --git a/contrib/format-sas/src/main/java/org/apache/drill/exec/store/sas/SasFormatPlugin.java b/contrib/format-sas/src/main/java/org/apache/drill/exec/store/sas/SasFormatPlugin.java
index da8bcbc607..b5a135d482 100644
--- a/contrib/format-sas/src/main/java/org/apache/drill/exec/store/sas/SasFormatPlugin.java
+++ b/contrib/format-sas/src/main/java/org/apache/drill/exec/store/sas/SasFormatPlugin.java
@@ -27,7 +27,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchem
 
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin.ScanFrameworkVersion;
@@ -75,13 +75,12 @@ public class SasFormatPlugin extends EasyFormatPlugin<SasFormatConfig> {
   }
 
   @Override
-  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(
-    EasySubScan scan, OptionManager options)  {
+  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionSet options)  {
     return new SasBatchReader(scan.getMaxRecords());
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanBuilder builder = new FileScanBuilder();
     builder.setReaderFactory(new SasReaderFactory(scan.getMaxRecords()));
 
diff --git a/contrib/format-spss/src/main/java/org/apache/drill/exec/store/spss/SpssFormatPlugin.java b/contrib/format-spss/src/main/java/org/apache/drill/exec/store/spss/SpssFormatPlugin.java
index e62699a273..35210e7928 100644
--- a/contrib/format-spss/src/main/java/org/apache/drill/exec/store/spss/SpssFormatPlugin.java
+++ b/contrib/format-spss/src/main/java/org/apache/drill/exec/store/spss/SpssFormatPlugin.java
@@ -27,7 +27,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchem
 
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin.ScanFrameworkVersion;
@@ -74,13 +74,12 @@ public class SpssFormatPlugin extends EasyFormatPlugin<SpssFormatConfig> {
   }
 
   @Override
-  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(
-    EasySubScan scan, OptionManager options)  {
+  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionSet options)  {
     return new SpssBatchReader(scan.getMaxRecords());
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanBuilder builder = new FileScanBuilder();
     builder.setReaderFactory(new SpssReaderFactory(scan.getMaxRecords()));
 
diff --git a/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatPlugin.java b/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatPlugin.java
index aec9369d18..c8af2325e7 100644
--- a/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatPlugin.java
+++ b/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatPlugin.java
@@ -26,7 +26,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileScanB
 import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin.ScanFrameworkVersion;
@@ -76,13 +76,12 @@ public class SyslogFormatPlugin extends EasyFormatPlugin<SyslogFormatConfig> {
   }
 
   @Override
-  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(
-    EasySubScan scan, OptionManager options)  {
+  public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(EasySubScan scan, OptionSet options)  {
     return new SyslogBatchReader(scan.getMaxRecords(), formatConfig, scan);
   }
 
   @Override
-  protected FileScanBuilder frameworkBuilder(OptionManager options, EasySubScan scan) {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) {
     FileScanBuilder builder = new FileScanBuilder();
     builder.setReaderFactory(new SyslogReaderFactory(scan.getMaxRecords(), formatConfig, scan));
 
diff --git a/docs/dev/JUnit.md b/docs/dev/JUnit4.md
similarity index 100%
rename from docs/dev/JUnit.md
rename to docs/dev/JUnit4.md
diff --git a/docs/dev/Testing.md b/docs/dev/Testing.md
index 337f932ba0..c465e814c2 100644
--- a/docs/dev/Testing.md
+++ b/docs/dev/Testing.md
@@ -8,7 +8,7 @@ Drill makes extensive use of [JUnit](http://junit.org/junit4/) and other librari
 
 * [Test Data Sets](TestDataSets.md)
 * [Temp Directory Utilities](TempDirectories.md)
-* [Testing with JUnit](JUnit.md)
+* [Testing with JUnit4](JUnit4.md)
 * [Test Logging](TestLogging.md)
 
 ## Deprecated Drill Testing Techniques
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
index 1e84608eea..372bbe666f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/DrillClient.java
@@ -894,7 +894,7 @@ public class DrillClient implements Closeable, ConnectionThrottle {
       results.add(result);
     }
 
-    public List<QueryDataBatch> getResults() throws RpcException{
+    public List<QueryDataBatch> getResults() throws RpcException {
       try {
         return future.get();
       } catch (Throwable t) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
index c93de9ef47..ab176905fd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/common/HashTableTemplate.java
@@ -981,11 +981,11 @@ public abstract class HashTableTemplate implements HashTable {
 
   @Override
   public void setTargetBatchRowCount(int batchRowCount) {
-    batchHolders.get(batchHolders.size()-1).targetBatchRowCount = batchRowCount;
+    batchHolders.get(batchHolders.size() - 1).targetBatchRowCount = batchRowCount;
   }
 
   @Override
   public int getTargetBatchRowCount() {
-    return batchHolders.get(batchHolders.size()-1).targetBatchRowCount;
+    return batchHolders.get(batchHolders.size() - 1).targetBatchRowCount;
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/ScanOperatorExec.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/ScanOperatorExec.java
index e1038d5600..c0ac30bc17 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/ScanOperatorExec.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/ScanOperatorExec.java
@@ -180,8 +180,7 @@ public class ScanOperatorExec implements OperatorExec {
   private int readerCount;
   private ReaderState readerState;
 
-  public ScanOperatorExec(ScanOperatorEvents factory,
-      boolean allowEmptyResult) {
+  public ScanOperatorExec(ScanOperatorEvents factory, boolean allowEmptyResult) {
     this.factory = factory;
     this.allowEmptyResult = allowEmptyResult;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataColumnsParser.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataColumnsParser.java
index cb51c76247..7d61f44e82 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataColumnsParser.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/file/FileMetadataColumnsParser.java
@@ -85,10 +85,7 @@ public class FileMetadataColumnsParser implements ScanProjectionParser {
     // Partition column
     int partitionIndex = Integer.parseInt(m.group(1));
     if (! referencedPartitions.contains(partitionIndex)) {
-      builder.addMetadataColumn(
-          new PartitionColumn(
-            inCol.name(),
-            partitionIndex));
+      builder.addMetadataColumn(new PartitionColumn(inCol.name(), partitionIndex));
 
       // Remember the partition for later wildcard expansion
       referencedPartitions.add(partitionIndex);
@@ -97,8 +94,7 @@ public class FileMetadataColumnsParser implements ScanProjectionParser {
     return true;
   }
 
-  private boolean buildMetadataColumn(FileMetadataColumnDefn defn,
-      RequestedColumn inCol) {
+  private boolean buildMetadataColumn(FileMetadataColumnDefn defn, RequestedColumn inCol) {
 
     // If the projected column is a map or array, then it shadows the
     // metadata column. Example: filename.x, filename[2].
@@ -136,8 +132,7 @@ public class FileMetadataColumnsParser implements ScanProjectionParser {
       if (referencedPartitions.contains(i)) {
         continue;
       }
-      builder.addMetadataColumn(new PartitionColumn(
-          metadataManager.partitionName(i), i));
+      builder.addMetadataColumn(new PartitionColumn(metadataManager.partitionName(i), i));
       referencedPartitions.add(i);
     }
     hasImplicitCols = true;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
index b287e6c822..7f1785da78 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
@@ -26,6 +26,8 @@ import org.apache.drill.exec.physical.resultSet.project.RequestedColumn;
 import org.apache.drill.exec.physical.resultSet.project.RequestedTuple;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
+import org.apache.drill.exec.record.metadata.MetadataUtils;
+import org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.complex.DictVector;
 import org.slf4j.Logger;
@@ -58,8 +60,7 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
     resolveRootTuple(rootTuple, readerSchema);
   }
 
-  private void resolveRootTuple(ResolvedTuple rootTuple,
-      TupleMetadata readerSchema) {
+  private void resolveRootTuple(ResolvedTuple rootTuple, TupleMetadata readerSchema) {
     for (ColumnProjection col : scanProj.columns()) {
       if (col instanceof UnresolvedColumn) {
         resolveColumn(rootTuple, ((UnresolvedColumn) col).element(), readerSchema);
@@ -69,15 +70,12 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
     }
   }
 
-  private void resolveColumn(ResolvedTuple outputTuple,
-      RequestedColumn inputCol, TupleMetadata readerSchema) {
+  private void resolveColumn(ResolvedTuple outputTuple, RequestedColumn inputCol, TupleMetadata readerSchema) {
     int tableColIndex = readerSchema.index(inputCol.name());
     if (tableColIndex == -1) {
       resolveNullColumn(outputTuple, inputCol);
     } else {
-      resolveTableColumn(outputTuple, inputCol,
-          readerSchema.metadata(tableColIndex),
-          tableColIndex);
+      resolveTableColumn(outputTuple, inputCol, readerSchema.metadata(tableColIndex), tableColIndex);
     }
   }
 
@@ -87,16 +85,12 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
     if (tableColIndex == -1) {
       resolveNullColumn(outputTuple, inputCol);
     } else {
-      resolveTableColumn(outputTuple, inputCol,
-          readerSchema.metadata(tableColIndex),
-          tableColIndex);
+      resolveTableColumn(outputTuple, inputCol, readerSchema.metadata(tableColIndex), tableColIndex);
     }
   }
 
   private void resolveTableColumn(ResolvedTuple outputTuple,
-      RequestedColumn requestedCol,
-      ColumnMetadata column, int sourceIndex) {
-
+      RequestedColumn requestedCol, ColumnMetadata column, int sourceIndex) {
     // Is the requested column implied to be a map?
     // A requested column is a map if the user requests x.y and we
     // are resolving column x. The presence of y as a member implies
@@ -128,30 +122,30 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
     }
   }
 
-  private void resolveMap(ResolvedTuple outputTuple,
-      RequestedColumn requestedCol, ColumnMetadata column,
+  private void resolveMap(ResolvedTuple outputTuple, RequestedColumn requestedCol, ColumnMetadata column,
       int sourceIndex) {
 
-    // If the actual column isn't a map, then the request is invalid.
-
-    if (! column.isMap()) {
-      throw UserException
-        .validationError()
-        .message("Project list implies a map column, but actual column is not a map")
-        .addContext("Projected column:", requestedCol.fullName())
-        .addContext("Table column:", column.name())
-        .addContext("Type:", column.type().name())
-        .addContext(scanProj.context())
-        .build(logger);
+    // If the actual column isn't a map, try to change column datatype
+    if (!column.isMap()) {
+      if(column.isScalar() && ((PrimitiveColumnMetadata) column).isSchemaForUnknown()) {
+        column = MetadataUtils.newMap(column.name());
+      } else {
+        throw UserException
+          .validationError()
+          .message("Project list implies a map column, but actual column is not a map")
+          .addContext("Projected column:", requestedCol.fullName())
+          .addContext("Table column:", column.name())
+          .addContext("Type:", column.type().name())
+          .addContext(scanProj.context())
+          .build(logger);
+      }
     }
 
     // The requested column is implied to be a map because it lists
     // members to project. Project these.
 
-    ResolvedMapColumn mapCol = new ResolvedMapColumn(outputTuple,
-        column.schema(), sourceIndex);
-    resolveTuple(mapCol.members(), requestedCol.tuple(),
-        column.tupleSchema());
+    ResolvedMapColumn mapCol = new ResolvedMapColumn(outputTuple, column.schema(), sourceIndex);
+    resolveTuple(mapCol.members(), requestedCol.tuple(), column.tupleSchema());
 
     // If the projection is simple, then just project the map column
     // as is. A projection is simple if all map columns from the table
@@ -177,14 +171,15 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
     }
   }
 
-  private void resolveDict(ResolvedTuple outputTuple,
-                          RequestedColumn requestedCol, ColumnMetadata column,
-                          int sourceIndex) {
-
-    // If the actual column isn't a dict, then the request is invalid.
+  private void resolveDict(ResolvedTuple outputTuple, RequestedColumn requestedCol, ColumnMetadata column,
+      int sourceIndex) {
 
+    // If the actual column isn't a dict, try to change column datatype
     if (!column.isDict()) {
-      throw UserException
+      if(column.isScalar() && ((PrimitiveColumnMetadata) column).isSchemaForUnknown()) {
+        column = MetadataUtils.newDict(column.name());
+      } else {
+        throw UserException
           .validationError()
           .message("Project list implies a dict column, but actual column is not a dict")
           .addContext("Projected column:", requestedCol.fullName())
@@ -192,6 +187,7 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
           .addContext("Type:", column.type().name())
           .addContext(scanProj.context())
           .build(logger);
+      }
     }
 
     ResolvedDictColumn dictColumn = new ResolvedDictColumn(outputTuple, column.schema(), sourceIndex);
@@ -286,8 +282,7 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
    *           column as requested in the project list
    */
 
-  private void resolveNullColumn(ResolvedTuple outputTuple,
-      RequestedColumn requestedCol) {
+  private void resolveNullColumn(ResolvedTuple outputTuple, RequestedColumn requestedCol) {
     ResolvedColumn nullCol;
     if (requestedCol.isTuple()) {
       nullCol = resolveMapMembers(outputTuple, requestedCol);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderLevelProjection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderLevelProjection.java
index ff14269929..2ecdcf55c5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderLevelProjection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderLevelProjection.java
@@ -83,8 +83,7 @@ public class ReaderLevelProjection {
     }
   }
 
-  protected void resolveSpecial(ResolvedTuple rootOutputTuple, ColumnProjection col,
-      TupleMetadata tableSchema) {
+  protected void resolveSpecial(ResolvedTuple rootOutputTuple, ColumnProjection col, TupleMetadata tableSchema) {
     for (ReaderProjectionResolver resolver : resolvers) {
       if (resolver.resolveColumn(col, rootOutputTuple, tableSchema)) {
         return;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderSchemaOrchestrator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderSchemaOrchestrator.java
index 6c9d3fe6e5..6e36cf56a4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderSchemaOrchestrator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ReaderSchemaOrchestrator.java
@@ -272,14 +272,12 @@ public class ReaderSchemaOrchestrator implements VectorSource {
    */
   private void doWildcardProjection(TupleMetadata tableSchema) {
     rootTuple = newRootTuple();
-    new WildcardProjection(scanOrchestrator.scanProj,
-        tableSchema, rootTuple, scanOrchestrator.options.schemaResolvers);
+    new WildcardProjection(scanOrchestrator.scanProj, tableSchema, rootTuple, scanOrchestrator.options.schemaResolvers);
   }
 
   private void doStrictWildcardProjection(TupleMetadata tableSchema) {
     rootTuple = newRootTuple();
-    new WildcardSchemaProjection(scanOrchestrator.scanProj,
-        tableSchema, rootTuple, scanOrchestrator.options.schemaResolvers);
+    new WildcardSchemaProjection(scanOrchestrator.scanProj, tableSchema, rootTuple, scanOrchestrator.options.schemaResolvers);
   }
 
   private ResolvedRow newRootTuple() {
@@ -300,9 +298,8 @@ public class ReaderSchemaOrchestrator implements VectorSource {
    */
   private void doExplicitProjection(TupleMetadata tableSchema) {
     rootTuple = newRootTuple();
-    new ExplicitSchemaProjection(scanOrchestrator.scanProj,
-            tableSchema, rootTuple,
-            scanOrchestrator.options.schemaResolvers);
+    new ExplicitSchemaProjection(scanOrchestrator.scanProj, tableSchema, rootTuple,
+      scanOrchestrator.options.schemaResolvers);
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedTuple.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedTuple.java
index 5b8294c26c..ec079c7dfe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedTuple.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ResolvedTuple.java
@@ -204,13 +204,13 @@ public abstract class ResolvedTuple implements VectorSource {
 
     public AbstractMapVector buildMap() {
       if (parentColumn.sourceIndex() != -1) {
-        ResolvedTuple parentTuple = parentColumn.parent();
-        inputMap = (AbstractMapVector) parentTuple.vector(parentColumn.sourceIndex());
+        ValueVector vector = parentColumn.parent().vector(parentColumn.sourceIndex());
+        if(vector instanceof AbstractMapVector) {
+          inputMap = (AbstractMapVector) vector;
+        }
       }
       MaterializedField colSchema = parentColumn.schema();
-      outputMap = createMap(inputMap,
-          MaterializedField.create(
-              colSchema.getName(), colSchema.getType()),
+      outputMap = createMap(inputMap, MaterializedField.create(colSchema.getName(), colSchema.getType()),
           parentColumn.parent().allocator());
       buildColumns();
       return outputMap;
@@ -237,8 +237,7 @@ public abstract class ResolvedTuple implements VectorSource {
     @Override
     protected AbstractMapVector createMap(AbstractMapVector inputMap,
         MaterializedField schema, BufferAllocator allocator) {
-      return new MapVector(schema,
-          allocator, null);
+      return new MapVector(schema, allocator, null);
     }
 
     @Override
@@ -280,8 +279,7 @@ public abstract class ResolvedTuple implements VectorSource {
       RepeatedMapVector source = (RepeatedMapVector) inputMap;
       UInt4Vector offsets = source.getOffsetVector();
       valueCount = offsets.getAccessor().getValueCount();
-      return new RepeatedMapVector(schema,
-          offsets, null);
+      return new RepeatedMapVector(schema, offsets, null);
     }
 
     @Override
@@ -336,8 +334,10 @@ public abstract class ResolvedTuple implements VectorSource {
     @Override
     public ValueVector buildVector() {
       if (parentColumn.sourceIndex() != -1) {
-        ResolvedTuple parentTuple = parentColumn.parent();
-        inputVector = (DictVector) parentTuple.vector(parentColumn.sourceIndex());
+        ValueVector vector = parentColumn.parent().vector(parentColumn.sourceIndex());
+        if(vector instanceof DictVector) {
+          inputVector = (DictVector) vector;
+        }
       }
       MaterializedField colSchema = parentColumn.schema();
       MaterializedField dictField = MaterializedField.create(colSchema.getName(), colSchema.getType());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanLevelProjection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanLevelProjection.java
index 9c57918ec6..934f32f9b8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanLevelProjection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ScanLevelProjection.java
@@ -405,8 +405,7 @@ public class ScanLevelProjection {
       }
       rootProjection = Projections.build(outputProj);
     }
-    readerProjection = ProjectionFilter.providedSchemaFilter(
-        rootProjection, readerSchema, errorContext);
+    readerProjection = ProjectionFilter.providedSchemaFilter(rootProjection, readerSchema, errorContext);
   }
 
   /**
@@ -445,8 +444,7 @@ public class ScanLevelProjection {
     // If not consumed, put the wildcard column into the projection list as a
     // placeholder to be filled in later with actual table columns.
     if (expanded) {
-      projectionType =
-          readerSchema.booleanProperty(TupleMetadata.IS_STRICT_SCHEMA_PROP)
+      projectionType = readerSchema.booleanProperty(TupleMetadata.IS_STRICT_SCHEMA_PROP)
           ? ScanProjectionType.STRICT_SCHEMA_WILDCARD
           : ScanProjectionType.SCHEMA_WILDCARD;
     } else if (wildcardPosn != -1) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
index 3bf6e71705..922652d76b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
@@ -179,8 +179,8 @@ public class BatchValidator {
   public static boolean validate(RecordBatch batch) {
     // This is a handy place to trace batches as they flow up
     // the DAG. Works best for single-threaded runs with a few records.
-    // System.out.println(batch.getClass().getSimpleName());
-    // RowSetFormatter.print(batch);
+//     System.out.println(batch.getClass().getSimpleName());
+//     RowSetFormatter.print(RowSets.wrap(batch));
     ErrorReporter reporter = errorReporter(batch);
     int rowCount = batch.getRecordCount();
     int valueCount = rowCount;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ProjectionFilter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ProjectionFilter.java
index 9bcbe5bbf1..a53b740d5e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ProjectionFilter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ProjectionFilter.java
@@ -82,8 +82,8 @@ public interface ProjectionFilter {
     }
   }
 
-  static ProjectionFilter providedSchemaFilter(RequestedTuple tupleProj,
-      TupleMetadata providedSchema, CustomErrorContext errorContext) {
+  static ProjectionFilter providedSchemaFilter(RequestedTuple tupleProj, TupleMetadata providedSchema,
+                                               CustomErrorContext errorContext) {
     if (tupleProj.type() == TupleProjectionType.NONE) {
       return PROJECT_NONE;
     }
@@ -247,8 +247,8 @@ public interface ProjectionFilter {
       } else {
         validateColumn(providedCol, col);
         if (providedCol.isMap()) {
-          return new ProjResult(true, providedCol,
-              new TypeProjectionFilter(providedCol.tupleSchema(), errorContext));
+          return new ProjResult(true, providedCol, new TypeProjectionFilter(providedCol.tupleSchema(),
+            errorContext));
         } else {
           return new ProjResult(true, providedCol);
         }
@@ -279,8 +279,8 @@ public interface ProjectionFilter {
       } else {
         validateColumn(providedCol, col);
         if (providedCol.isMap()) {
-          return new ProjResult(true, providedCol,
-              new SchemaProjectionFilter(providedCol.tupleSchema(), errorContext));
+          return new ProjResult(true, providedCol, new SchemaProjectionFilter(providedCol.tupleSchema(),
+            errorContext));
         } else {
           return new ProjResult(true, providedCol);
         }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ResultSetLoaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ResultSetLoaderImpl.java
index 48e0f07275..5ec24ac871 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ResultSetLoaderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/ResultSetLoaderImpl.java
@@ -307,7 +307,7 @@ public class ResultSetLoaderImpl implements ResultSetLoader, LoaderInternals {
       // provided schema. The schema can be extended later, but normally
       // won't be if known up front.
 
-      logger.debug("Schema: " + options.schema.toString());
+      logger.debug("Schema: " + options.schema);
       BuildFromSchema.instance().buildTuple(rootWriter, options.schema);
     }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
index 1723451413..bf471e0bed 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
@@ -236,7 +236,7 @@ public abstract class SingleVectorState implements VectorState {
       sourceVector.getMutator().setValueCount(offsetLength );
 
       // Getting offsets right was a pain. If you modify this code,
-      // you'll likely relive that experience. Enabling the next two
+      // you'll likely relive that experience. Enabling the three two
       // lines will help reveal some of the mystery around offsets and their
       // confusing off-by-one design.
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/project/ProjectionChecker.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/project/ProjectionChecker.java
index 3769f7298e..5480e6c8dd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/project/ProjectionChecker.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/project/ProjectionChecker.java
@@ -22,6 +22,7 @@ import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
+import org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -95,7 +96,9 @@ public class ProjectionChecker {
       return true;
     }
     if (colReq.isTuple() && !(readCol.isMap() || readCol.isDict() || readCol.isVariant())) {
-      return false;
+      if(!(readCol.isScalar() && ((PrimitiveColumnMetadata) readCol).isSchemaForUnknown())) { // allow unknown schema
+        return false;
+      }
     }
     if (colReq.isArray()) {
       if (colReq.arrayDims() == 1) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
index 082fe2783b..341a81760d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
@@ -127,8 +127,7 @@ public class CreateTableHandler extends DefaultSqlHandler {
                                  String tableName,
                                  List<String> partitionColumns,
                                  RelDataType queryRowType,
-                                 StorageStrategy storageStrategy)
-      throws RelConversionException, SqlUnsupportedException {
+                                 StorageStrategy storageStrategy) throws SqlUnsupportedException {
     final DrillRel convertedRelNode = convertToRawDrel(relNode);
 
     // Put a non-trivial topProject to ensure the final output field name is preserved, when necessary.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessibleUtilities.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessibleUtilities.java
index f8fcb3f25d..160528edb5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessibleUtilities.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessibleUtilities.java
@@ -22,7 +22,7 @@ import org.apache.drill.exec.vector.ValueVector;
 
 /**
  * VectorAccessible is an interface. Yet, several operations are done
- * on VectorAccessible over and over gain. While Java 8 allows static
+ * on VectorAccessible over and over gain. TODO. While Java 8 allows static
  * methods on an interface, Drill uses Java 7, which does not. This
  * class is a placeholder for common VectorAccessible methods that
  * can migrate into the interface when Drill upgrades to Java 8.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
index 210b0a0e7c..c3024ce100 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordReader.java
@@ -29,6 +29,9 @@ import org.apache.drill.exec.planner.sql.handlers.FindLimit0Visitor;
 import org.apache.drill.exec.store.pojo.PojoRecordReader;
 import org.apache.drill.exec.vector.ValueVector;
 
+/**
+ * For new implementations please use new {@link org.apache.drill.exec.physical.impl.scan.framework.ManagedReader}
+ */
 @JsonTypeInfo(
     use = JsonTypeInfo.Id.NAME,
     include = JsonTypeInfo.As.PROPERTY,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index 00f378c5a4..b117b43f35 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -67,12 +67,12 @@ import org.apache.drill.exec.record.metadata.schema.FsMetastoreSchemaProvider;
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.PartitionNotFoundException;
 import org.apache.drill.exec.store.SchemaConfig;
+import org.apache.drill.exec.store.easy.json.JSONFormatPlugin;
 import org.apache.drill.exec.store.table.function.TableParamDef;
 import org.apache.drill.exec.store.table.function.TableSignature;
 import org.apache.drill.exec.store.table.function.WithOptionsTableMacro;
 import org.apache.drill.exec.util.DrillFileSystemUtil;
 import org.apache.drill.exec.store.StorageStrategy;
-import org.apache.drill.exec.store.easy.json.JSONFormatPlugin;
 import org.apache.drill.exec.util.ImpersonationUtil;
 import org.apache.drill.metastore.MetastoreRegistry;
 import org.apache.drill.metastore.components.tables.MetastoreTableInfo;
@@ -554,7 +554,7 @@ public class WorkspaceSchemaFactory {
     public CreateTableEntry createStatsTable(String tableName) {
       ensureNotStatsTable(tableName);
       final String statsTableName = getStatsTableName(tableName);
-      FormatPlugin formatPlugin = plugin.getFormatPlugin(JSONFormatPlugin.DEFAULT_NAME);
+      FormatPlugin formatPlugin = plugin.getFormatPlugin(JSONFormatPlugin.PLUGIN_NAME);
       return createOrAppendToTable(statsTableName, formatPlugin, Collections.emptyList(),
           StorageStrategy.DEFAULT);
     }
@@ -563,7 +563,7 @@ public class WorkspaceSchemaFactory {
     public CreateTableEntry appendToStatsTable(String tableName) {
       ensureNotStatsTable(tableName);
       final String statsTableName = getStatsTableName(tableName);
-      FormatPlugin formatPlugin = plugin.getFormatPlugin(JSONFormatPlugin.DEFAULT_NAME);
+      FormatPlugin formatPlugin = plugin.getFormatPlugin(JSONFormatPlugin.PLUGIN_NAME);
       return createOrAppendToTable(statsTableName, formatPlugin, Collections.emptyList(),
           StorageStrategy.DEFAULT);
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index fad3634d27..681bf4ed06 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -49,7 +49,6 @@ import org.apache.drill.exec.record.CloseableRecordBatch;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.server.options.OptionSet;
-import org.apache.drill.exec.store.ColumnExplorer;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
 import org.apache.drill.exec.store.StatisticsRecordWriter;
@@ -65,7 +64,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-
 /**
  * Base class for file readers.
  * <p>
@@ -131,7 +129,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
     /**
      *  Choose whether to use the "traditional" or "enhanced" reader
      *  structure. Can also be selected at runtime by overriding
-     *  {@link #useEnhancedScan(OptionSet)}.
+     *  {@link #scanVersion()}.
      */
     private final ScanFrameworkVersion scanVersion;
 
@@ -546,15 +544,15 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
   /**
    * Initialize the scan framework builder with standard options.
    * Call this from the plugin-specific
-   * {@link #frameworkBuilder(OptionSet, EasySubScan)} method.
+   * {@link #frameworkBuilder(EasySubScan, OptionSet)} method.
    * The plugin can then customize/revise options as needed.
    * <p>
    * For EVF V1, to be removed.
    *
    * @param builder the scan framework builder you create in the
-   * {@link #frameworkBuilder(OptionSet, EasySubScan)} method
+   * {@link #frameworkBuilder(EasySubScan, OptionSet)} method
    * @param scan the physical scan operator definition passed to
-   * the {@link #frameworkBuilder(OptionSet, EasySubScan)} method
+   * the {@link #frameworkBuilder(EasySubScan, OptionSet)} method
    */
   protected void initScanBuilder(FileScanBuilder builder, EasySubScan scan) {
     EvfV1ScanBuilder.initScanBuilder(this, builder, scan);
@@ -583,8 +581,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * potentially many files
    * @throws ExecutionSetupException for all setup failures
    */
-  protected FileScanBuilder frameworkBuilder(
-      OptionSet options, EasySubScan scan) throws ExecutionSetupException {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) throws ExecutionSetupException {
     throw new ExecutionSetupException("Must implement frameworkBuilder() if using the enhanced framework.");
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
index 3b3ea83cef..3c2708a83e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
@@ -81,7 +81,7 @@ public class EasyGroupScan extends AbstractGroupScanWithMetadata<TableMetadataPr
 
   private final EasyFormatPlugin<?> formatPlugin;
   private FileSelection selection;
-  private int partitionDepth;
+  private int partitionDepth = -1;
   private int maxWidth;
   private int minWidth = 1;
 
@@ -293,8 +293,8 @@ public class EasyGroupScan extends AbstractGroupScanWithMetadata<TableMetadataPr
     Preconditions.checkArgument(!filesForMinor.isEmpty(),
         String.format("MinorFragmentId %d has no read entries assigned", minorFragmentId));
 
-    EasySubScan subScan = new EasySubScan(getUserName(), convert(filesForMinor), formatPlugin,
-        columns, selectionRoot, partitionDepth, getSchema(), limit);
+    EasySubScan subScan = new EasySubScan(getUserName(), convert(filesForMinor), formatPlugin, columns, selectionRoot,
+      partitionDepth, getSchema(), limit);
     subScan.setOperatorId(getOperatorId());
     return subScan;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EvfV1ScanBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EvfV1ScanBuilder.java
index b486d381d3..887d647eca 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EvfV1ScanBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EvfV1ScanBuilder.java
@@ -26,7 +26,7 @@ import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileScanB
 import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
 import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.record.CloseableRecordBatch;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.hadoop.fs.Path;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -94,7 +94,7 @@ class EvfV1ScanBuilder {
    * vector and batch sizes. Use this for new format plugins.
    */
   public CloseableRecordBatch build() throws ExecutionSetupException {
-    final FileScanBuilder builder = plugin.frameworkBuilder(context.getOptions(), scan);
+    final FileScanBuilder builder = plugin.frameworkBuilder(scan, context.getOptions());
 
     // Add batch reader, if none specified
 
@@ -107,13 +107,13 @@ class EvfV1ScanBuilder {
   /**
    * Initialize the scan framework builder with standard options.
    * Call this from the plugin-specific
-   * {@link #frameworkBuilder(OptionManager, EasySubScan)} method.
+   * {@link EasyFormatPlugin#frameworkBuilder(EasySubScan, OptionSet)} method.
    * The plugin can then customize/revise options as needed.
    *
    * @param builder the scan framework builder you create in the
-   * {@link #frameworkBuilder(OptionManager, EasySubScan)} method
+   * {@link EasyFormatPlugin#frameworkBuilder(EasySubScan, OptionSet)} method
    * @param scan the physical scan operator definition passed to
-   * the {@link #frameworkBuilder(OptionManager, EasySubScan)} method
+   * the {@link EasyFormatPlugin#frameworkBuilder(EasySubScan, OptionSet)} method
    */
   protected static void initScanBuilder(EasyFormatPlugin<? extends FormatPluginConfig> plugin,
       FileScanBuilder builder, EasySubScan scan) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatConfig.java
new file mode 100644
index 0000000000..0ec66ce967
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatConfig.java
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.easy.json;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
+
+import java.util.List;
+import java.util.Objects;
+
+import static org.apache.drill.exec.store.easy.json.JSONFormatPlugin.PLUGIN_NAME;
+
+@JsonTypeName(PLUGIN_NAME)
+public class JSONFormatConfig implements FormatPluginConfig {
+  private static final List<String> DEFAULT_EXTS = ImmutableList.of("json");
+
+  private final List<String> extensions;
+  private final Boolean allTextMode;
+  private final Boolean readNumbersAsDouble;
+  private final Boolean skipMalformedJSONRecords;
+  private final Boolean escapeAnyChar;
+  private final Boolean nanInf;
+
+  @JsonCreator
+  public JSONFormatConfig(
+      @JsonProperty("extensions") List<String> extensions,
+      @JsonProperty("allTextMode") Boolean allTextMode,
+      @JsonProperty("readNumbersAsDouble") Boolean readNumbersAsDouble,
+      @JsonProperty("skipMalformedJSONRecords") Boolean skipMalformedJSONRecords,
+      @JsonProperty("escapeAnyChar") Boolean escapeAnyChar,
+      @JsonProperty("nanInf") Boolean nanInf) {
+    this.extensions = extensions == null ? DEFAULT_EXTS : ImmutableList.copyOf(extensions);
+    this.allTextMode = allTextMode;
+    this.readNumbersAsDouble = readNumbersAsDouble;
+    this.skipMalformedJSONRecords = skipMalformedJSONRecords;
+    this.escapeAnyChar = escapeAnyChar;
+    this.nanInf = nanInf;
+  }
+
+  @JsonInclude(JsonInclude.Include.NON_DEFAULT)
+  public List<String> getExtensions() {
+    return extensions;
+  }
+
+  @JsonInclude(JsonInclude.Include.NON_ABSENT)
+  public Boolean getAllTextMode() {
+    return allTextMode;
+  }
+
+  @JsonInclude(JsonInclude.Include.NON_ABSENT)
+  public Boolean getReadNumbersAsDouble() {
+    return readNumbersAsDouble;
+  }
+
+  @JsonInclude(JsonInclude.Include.NON_ABSENT)
+  public Boolean getSkipMalformedJSONRecords() {
+    return skipMalformedJSONRecords;
+  }
+
+  @JsonInclude(JsonInclude.Include.NON_ABSENT)
+  public Boolean getEscapeAnyChar() {
+    return escapeAnyChar;
+  }
+
+  @JsonInclude(JsonInclude.Include.NON_ABSENT)
+  public Boolean getNanInf() {
+    return nanInf;
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hash(extensions, allTextMode, readNumbersAsDouble, skipMalformedJSONRecords, escapeAnyChar, nanInf);
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null || getClass() != obj.getClass()) {
+      return false;
+    }
+    JSONFormatConfig other = (JSONFormatConfig) obj;
+    return Objects.deepEquals(extensions, other.extensions) &&
+      Objects.equals(allTextMode, other.allTextMode) &&
+      Objects.equals(readNumbersAsDouble, other.readNumbersAsDouble) &&
+      Objects.equals(skipMalformedJSONRecords, other.skipMalformedJSONRecords) &&
+      Objects.equals(escapeAnyChar, other.escapeAnyChar) &&
+      Objects.equals(nanInf, other.nanInf);
+  }
+
+  @Override
+  public String toString() {
+    return new PlanStringBuilder(this)
+      .field("extensions", extensions)
+      .field("allTextMode", allTextMode)
+      .field("readNumbersAsDouble", readNumbersAsDouble)
+      .field("skipMalformedRecords", skipMalformedJSONRecords)
+      .field("escapeAnyChar", escapeAnyChar)
+      .field("nanInf", nanInf)
+      .toString();
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
index f7a25d5301..52484309be 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
@@ -19,32 +19,35 @@ package org.apache.drill.exec.store.easy.json;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Objects;
 
-import com.fasterxml.jackson.annotation.JsonInclude.Include;
-import org.apache.drill.common.PlanStringBuilder;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.QueryContext.SqlStatementType;
+import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileReaderFactory;
+import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileScanBuilder;
+import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
+import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.planner.common.DrillStatsTable;
 import org.apache.drill.exec.planner.common.DrillStatsTable.TableStatistics;
 import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
 import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
 import org.apache.drill.exec.store.StatisticsRecordWriter;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
+import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.dfs.easy.EasyWriter;
 import org.apache.drill.exec.store.dfs.easy.FileWork;
-import org.apache.drill.exec.store.easy.json.JSONFormatPlugin.JSONFormatConfig;
-import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
-import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -52,32 +55,44 @@ import org.apache.hadoop.fs.Path;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
-
   private static final Logger logger = LoggerFactory.getLogger(JSONFormatPlugin.class);
-  public static final String DEFAULT_NAME = "json";
-
+  public static final String PLUGIN_NAME = "json";
   private static final boolean IS_COMPRESSIBLE = true;
 
-  public static final String OPERATOR_TYPE = "JSON_SUB_SCAN";
+  public static final String READER_OPERATOR_TYPE = "JSON_SUB_SCAN";
+  public static final String WRITER_OPERATOR_TYPE = "JSON_WRITER";
 
   public JSONFormatPlugin(String name, DrillbitContext context,
       Configuration fsConf, StoragePluginConfig storageConfig) {
     this(name, context, fsConf, storageConfig, new JSONFormatConfig(null, null, null, null, null, null));
   }
 
-  public JSONFormatPlugin(String name, DrillbitContext context,
-      Configuration fsConf, StoragePluginConfig config, JSONFormatConfig formatPluginConfig) {
-    super(name, context, fsConf, config, formatPluginConfig, true,
-          false, false, IS_COMPRESSIBLE, formatPluginConfig.getExtensions(), DEFAULT_NAME);
+  public JSONFormatPlugin(String name, DrillbitContext context, Configuration fsConf,
+      StoragePluginConfig config, JSONFormatConfig formatPluginConfig) {
+    super(name, easyConfig(fsConf, formatPluginConfig), context, config, formatPluginConfig);
+  }
+
+  private static EasyFormatConfig easyConfig(Configuration fsConf, JSONFormatConfig pluginConfig) {
+    return EasyFormatConfig.builder()
+      .readable(true)
+      .writable(true)
+      .blockSplittable(false)
+      .compressible(IS_COMPRESSIBLE)
+      .supportsProjectPushdown(true)
+      .extensions(pluginConfig.getExtensions())
+      .fsConf(fsConf)
+      .defaultName(PLUGIN_NAME)
+      .readerOperatorType(READER_OPERATOR_TYPE)
+      .writerOperatorType(WRITER_OPERATOR_TYPE)
+      .scanVersion(ScanFrameworkVersion.EVF_V1)
+      .supportsLimitPushdown(true)
+      .supportsStatistics(true)
+      .build();
   }
 
   @Override
@@ -95,10 +110,10 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
   }
 
   @Override
-  public StatisticsRecordWriter getStatisticsRecordWriter(FragmentContext context, EasyWriter writer)
-      throws IOException {
+  public StatisticsRecordWriter getStatisticsRecordWriter(FragmentContext context, EasyWriter writer) {
     StatisticsRecordWriter recordWriter;
-    //ANALYZE statement requires the special statistics writer
+
+    // ANALYZE statement requires the special statistics writer
     if (!isStatisticsRecordWriter(context, writer)) {
       return null;
     }
@@ -118,18 +133,19 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
   }
 
   private Map<String, String> setupOptions(FragmentContext context, EasyWriter writer, boolean statsOptions) {
-    Map<String, String> options = Maps.newHashMap();
+    Map<String, String> options = new HashMap<>();
     options.put("location", writer.getLocation());
 
+    OptionSet optionMgr = context.getOptions();
     FragmentHandle handle = context.getHandle();
     String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
     options.put("prefix", fragmentId);
     options.put("separator", " ");
     options.put("extension", "json");
-    options.put("extended", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_EXTENDED_TYPES)));
-    options.put("uglify", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_WRITER_UGLIFY)));
-    options.put("skipnulls", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_WRITER_SKIPNULLFIELDS)));
-    options.put("enableNanInf", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_WRITER_NAN_INF_NUMBERS_VALIDATOR)));
+    options.put("extended", Boolean.toString(optionMgr.getBoolean(ExecConstants.JSON_EXTENDED_TYPES_KEY)));
+    options.put("uglify", Boolean.toString(optionMgr.getBoolean(ExecConstants.JSON_WRITER_UGLIFY_KEY)));
+    options.put("skipnulls", Boolean.toString(optionMgr.getBoolean(ExecConstants.JSON_WRITER_SKIP_NULL_FIELDS_KEY)));
+    options.put("enableNanInf", Boolean.toString(optionMgr.getBoolean(ExecConstants.JSON_WRITER_NAN_INF_NUMBERS)));
     if (statsOptions) {
       options.put("queryid", context.getQueryIdString());
     }
@@ -174,107 +190,44 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
     }
   }
 
-  @JsonTypeName("json")
-  public static class JSONFormatConfig implements FormatPluginConfig {
-    private static final List<String> DEFAULT_EXTS = ImmutableList.of("json");
-
-    private final List<String> extensions;
-    private final Boolean allTextMode;
-    private final Boolean readNumbersAsDouble;
-    private final Boolean skipMalformedJSONRecords;
-    private final Boolean escapeAnyChar;
-    private final Boolean nanInf;
-
-    @JsonCreator
-    public JSONFormatConfig(
-        @JsonProperty("extensions") List<String> extensions,
-        @JsonProperty("allTextMode") Boolean allTextMode,
-        @JsonProperty("readNumbersAsDouble") Boolean readNumbersAsDouble,
-        @JsonProperty("skipMalformedJSONRecords") Boolean skipMalformedJSONRecords,
-        @JsonProperty("escapeAnyChar") Boolean escapeAnyChar,
-        @JsonProperty("nanInf") Boolean nanInf) {
-      this.extensions = extensions == null ?
-          DEFAULT_EXTS : ImmutableList.copyOf(extensions);
-      this.allTextMode = allTextMode;
-      this.readNumbersAsDouble = readNumbersAsDouble;
-      this.skipMalformedJSONRecords = skipMalformedJSONRecords;
-      this.escapeAnyChar = escapeAnyChar;
-      this.nanInf = nanInf;
-    }
-
-    @JsonInclude(JsonInclude.Include.NON_DEFAULT)
-    public List<String> getExtensions() {
-      return extensions;
-    }
-
-    @JsonInclude(JsonInclude.Include.NON_ABSENT)
-    public Boolean getAllTextMode() {
-      return allTextMode;
-    }
-
-    @JsonInclude(JsonInclude.Include.NON_ABSENT)
-    public Boolean getReadNumbersAsDouble() {
-      return readNumbersAsDouble;
-    }
-
-    @JsonInclude(JsonInclude.Include.NON_ABSENT)
-    public Boolean getSkipMalformedJSONRecords() {
-      return skipMalformedJSONRecords;
-    }
-
-    @JsonInclude(Include.NON_ABSENT)
-    public Boolean getEscapeAnyChar() {
-      return escapeAnyChar;
-    }
-
-    @JsonInclude(JsonInclude.Include.NON_ABSENT)
-    public Boolean getNanInf() {
-      return nanInf;
-    }
-
-    @Override
-    public int hashCode() {
-      return Objects.hash(extensions, allTextMode, readNumbersAsDouble, skipMalformedJSONRecords, escapeAnyChar, nanInf);
-    }
+  @Override
+  protected ScanFrameworkVersion scanVersion(OptionSet options) {
+    // Create the "legacy", "V1" reader or the new "V2" version based on
+    // the result set loader. The V2 version is a bit more robust, and
+    // supports the row set framework. However, V1 supports unions.
+    // This code should be temporary.
+    return options.getBoolean(ExecConstants.ENABLE_V2_JSON_READER_KEY)
+      ? ScanFrameworkVersion.EVF_V1
+      : ScanFrameworkVersion.CLASSIC;
+  }
 
-    @Override
-    public boolean equals(Object obj) {
-      if (this == obj) {
-        return true;
-      }
-      if (obj == null || getClass() != obj.getClass()) {
-        return false;
+  @Override
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) throws ExecutionSetupException {
+    FileScanBuilder builder = new FileScanBuilder();
+    initScanBuilder(builder, scan);
+    builder.setReaderFactory(new FileReaderFactory() {
+      @Override
+      public ManagedReader<? extends FileSchemaNegotiator> newReader() {
+        return new JsonBatchReader();
       }
-      JSONFormatConfig other = (JSONFormatConfig) obj;
-      return Objects.deepEquals(extensions, other.extensions) &&
-        Objects.equals(allTextMode, other.allTextMode) &&
-        Objects.equals(readNumbersAsDouble, other.readNumbersAsDouble) &&
-        Objects.equals(skipMalformedJSONRecords, other.skipMalformedJSONRecords) &&
-        Objects.equals(escapeAnyChar, other.escapeAnyChar) &&
-        Objects.equals(nanInf, other.nanInf);
-    }
+    });
 
-    @Override
-    public String toString() {
-      return new PlanStringBuilder(this)
-        .field("extensions", extensions)
-        .field("allTextMode", allTextMode)
-        .field("readNumbersAsDouble", readNumbersAsDouble)
-        .field("skipMalformedRecords", skipMalformedJSONRecords)
-        .field("escapeAnyChar", escapeAnyChar)
-        .field("nanInf", nanInf)
-        .toString();
-    }
+    // Project missing columns as Varchar, which is at least
+    // compatible with all-text mode. (JSON never returns a nullable
+    // int, so don't use the default.)
+    builder.nullType(Types.optional(MinorType.VARCHAR));
+
+    return builder;
   }
 
   @Override
   public String getReaderOperatorType() {
-    return OPERATOR_TYPE;
+    return READER_OPERATOR_TYPE;
   }
 
   @Override
   public String getWriterOperatorType() {
-     return "JSON_WRITER";
+     return WRITER_OPERATOR_TYPE;
   }
 
   @Override
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
index 7fe6ffaa55..535f24f243 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
@@ -34,7 +34,6 @@ import org.apache.drill.exec.server.options.OptionManager;
 import org.apache.drill.exec.server.options.OptionValue.OptionScope;
 import org.apache.drill.exec.store.AbstractRecordReader;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
-import org.apache.drill.exec.store.easy.json.JSONFormatPlugin.JSONFormatConfig;
 import org.apache.drill.exec.store.easy.json.JsonProcessor.ReadState;
 import org.apache.drill.exec.store.easy.json.reader.CountingJsonReader;
 import org.apache.drill.exec.vector.BaseValueVector;
@@ -54,8 +53,9 @@ import com.fasterxml.jackson.databind.JsonNode;
  * but is used by some "mini-plan" unit tests, and by the VALUES
  * reader. As a result, this reader cannot be removed and must be
  * maintained until the other uses are converted to the new-style
- * JSON reader.
+ * JSON reader - {@link JsonBatchReader}.
  */
+@Deprecated
 public class JSONRecordReader extends AbstractRecordReader {
   private static final Logger logger = LoggerFactory.getLogger(JSONRecordReader.class);
 
@@ -261,7 +261,7 @@ public class JSONRecordReader extends AbstractRecordReader {
             .build();
       }
       setupParser();
-    } catch (Exception e){
+    } catch (Exception e) {
       handleAndRaise("Failure reading JSON file", e);
     }
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
index 48d44f42a4..fae978ac8c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
@@ -32,6 +32,10 @@ import org.apache.hadoop.mapred.FileSplit;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * EVF based reader. It is using by default to read JSON files - store.json.enable_v2_reader = true
+ * The old deprecated one is {@link JSONRecordReader}
+ */
 public class JsonBatchReader implements ManagedReader<FileSchemaNegotiator> {
   private static final Logger logger = LoggerFactory.getLogger(JsonBatchReader.class);
 
@@ -64,6 +68,7 @@ public class JsonBatchReader implements ManagedReader<FileSchemaNegotiator> {
     jsonLoader = new JsonLoaderBuilder()
         .resultSetLoader(negotiator.build())
         .standardOptions(negotiator.queryOptions())
+        .providedSchema(negotiator.providedSchema())
         .errorContext(errorContext)
         .fromStream(stream)
         .build();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
index ad0cba9ffb..03dad7d25b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
@@ -84,8 +84,7 @@ public abstract class BaseFieldFactory implements FieldFactory {
   }
 
   protected ElementParser scalarArrayParserFor(ValueParser element) {
-    return parserFactory().scalarArrayValueParser(
-        new SimpleArrayListener(), element);
+    return parserFactory().scalarArrayValueParser(new SimpleArrayListener(), element);
   }
 
   protected ElementParser scalarArrayParserFor(ArrayWriter writer) {
@@ -96,8 +95,7 @@ public abstract class BaseFieldFactory implements FieldFactory {
    * Create a repeated list listener for a scalar value.
    */
   protected ElementParser multiDimScalarArrayFor(ObjectWriter writer, int dims) {
-    return buildOuterArrays(writer, dims,
-        innerWriter -> scalarArrayParserFor(innerWriter.array()));
+    return buildOuterArrays(writer, dims, innerWriter -> scalarArrayParserFor(innerWriter.array()));
   }
 
   /**
@@ -112,11 +110,8 @@ public abstract class BaseFieldFactory implements FieldFactory {
    * Create a map column and its associated object value listener for the
    * given key and optional provided schema.
    */
-  protected ElementParser objectParserFor(FieldDefn fieldDefn,
-      ColumnMetadata colSchema, TupleMetadata providedSchema) {
-    return objectParserFor(
-            fieldDefn.fieldWriterFor(colSchema).tuple(),
-            providedSchema);
+  protected ElementParser objectParserFor(FieldDefn fieldDefn, ColumnMetadata colSchema, TupleMetadata providedSchema) {
+    return objectParserFor(fieldDefn.fieldWriterFor(colSchema).tuple(), providedSchema);
   }
 
   /**
@@ -129,24 +124,19 @@ public abstract class BaseFieldFactory implements FieldFactory {
   }
 
   protected ElementParser objectArrayParserFor(ArrayWriter arrayWriter, TupleMetadata providedSchema) {
-    return parserFactory().arrayValueParser(
-        new StructureArrayListener(arrayWriter),
-        objectParserFor(arrayWriter.tuple(), providedSchema));
+    return parserFactory().arrayValueParser(new StructureArrayListener(arrayWriter),
+      objectParserFor(arrayWriter.tuple(), providedSchema));
   }
 
   protected ElementParser objectParserFor(TupleWriter writer, TupleMetadata providedSchema) {
-    return parserFactory().objectValueParser(
-        new TupleParser(loader, writer, providedSchema));
+    return parserFactory().objectValueParser(new TupleParser(loader, writer, providedSchema));
   }
 
   /**
    * Create a repeated list listener for a Map.
    */
-  public ElementParser multiDimObjectArrayFor(
-      ObjectWriter writer, int dims, TupleMetadata providedSchema) {
-    return buildOuterArrays(writer, dims,
-        innerWriter ->
-          objectArrayParserFor(innerWriter.array(), providedSchema));
+  public ElementParser multiDimObjectArrayFor(ObjectWriter writer, int dims, TupleMetadata providedSchema) {
+    return buildOuterArrays(writer, dims, innerWriter -> objectArrayParserFor(innerWriter.array(), providedSchema));
   }
 
   /**
@@ -162,19 +152,15 @@ public abstract class BaseFieldFactory implements FieldFactory {
    * a column schema.
    */
   protected ElementParser variantArrayParserFor(ArrayWriter arrayWriter) {
-    return parserFactory().arrayValueParser(
-        new ListArrayListener(arrayWriter),
-        variantParserFor(arrayWriter.variant()));
+    return parserFactory().arrayValueParser(new ListArrayListener(arrayWriter), variantParserFor(arrayWriter.variant()));
   }
 
   /**
    * Create a repeated list listener for a variant. Here, the inner
    * array is provided by a List (which is a repeated Union.)
    */
-  protected ElementParser multiDimVariantArrayParserFor(
-      ObjectWriter writer, int dims) {
-    return buildOuterArrays(writer, dims,
-        innerWriter -> variantArrayParserFor(innerWriter.array()));
+  protected ElementParser multiDimVariantArrayParserFor(ObjectWriter writer, int dims) {
+    return buildOuterArrays(writer, dims, innerWriter -> variantArrayParserFor(innerWriter.array()));
   }
 
   /**
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/FieldDefn.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/FieldDefn.java
index e1d3238c0a..945496129b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/FieldDefn.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/FieldDefn.java
@@ -112,7 +112,11 @@ public class FieldDefn {
   }
 
   public ColumnMetadata schemaFor(MinorType type, boolean isArray) {
-    return MetadataUtils.newScalar(key, type, mode(isArray));
+    return schemaFor(type, isArray, false);
+  }
+
+  public ColumnMetadata schemaFor(MinorType type, boolean isArray, boolean forUnknownSchema) {
+    return MetadataUtils.newScalar(key, type, mode(isArray), forUnknownSchema);
   }
 
   public DataMode mode(boolean isArray) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/InferredFieldFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/InferredFieldFactory.java
index 2f224e547c..6a9da7235f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/InferredFieldFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/InferredFieldFactory.java
@@ -105,25 +105,19 @@ public class InferredFieldFactory extends BaseFieldFactory {
   }
 
   private ValueParser forceResolution(FieldDefn fieldDefn, boolean isArray) {
-    return unknownParserFor(
-        fieldDefn.scalarWriterFor(
-            schemaForUnknown(fieldDefn, isArray)));
+    return unknownParserFor(fieldDefn.scalarWriterFor(schemaForUnknown(fieldDefn, isArray)));
   }
 
   private ColumnMetadata schemaForUnknown(FieldDefn fieldDefn, boolean isArray) {
-    if (loader.options().unknownsAsJson) {
-      return fieldDefn.schemaFor(MinorType.VARCHAR, isArray);
-    } else {
-      return fieldDefn.schemaFor(loader.options().nullType, isArray);
-    }
+    return loader.options().unknownsAsJson
+      ? fieldDefn.schemaFor(MinorType.VARCHAR, isArray, true)
+      : fieldDefn.schemaFor(loader.options().nullType, isArray, true);
   }
 
   private ValueParser unknownParserFor(ScalarWriter writer) {
-    if (loader.options().unknownsAsJson) {
-      return parserFactory().jsonTextParser(new VarCharListener(loader, writer));
-    } else {
-      return parserFactory().simpleValueParser(scalarListenerFor(writer));
-    }
+    return loader.options().unknownsAsJson
+      ? parserFactory().jsonTextParser(new VarCharListener(loader, writer))
+      : parserFactory().simpleValueParser(scalarListenerFor(writer));
   }
 
   private ElementParser resolveField(FieldDefn fieldDefn) {
@@ -153,11 +147,9 @@ public class InferredFieldFactory extends BaseFieldFactory {
   public ValueParser scalarParserFor(FieldDefn fieldDefn, boolean isArray) {
     if (loader.options().allTextMode) {
       return parserFactory().textValueParser(
-          new VarCharListener(loader,
-              fieldDefn.scalarWriterFor(MinorType.VARCHAR, isArray)));
+        new VarCharListener(loader, fieldDefn.scalarWriterFor(MinorType.VARCHAR, isArray)));
     } else {
-      return scalarParserFor(fieldDefn,
-              fieldDefn.schemaFor(scalarTypeFor(fieldDefn), isArray));
+      return scalarParserFor(fieldDefn, fieldDefn.schemaFor(scalarTypeFor(fieldDefn), isArray));
     }
   }
 
@@ -215,8 +207,7 @@ public class InferredFieldFactory extends BaseFieldFactory {
   private MinorType scalarTypeFor(FieldDefn fieldDefn) {
     MinorType colType = drillTypeFor(fieldDefn.lookahead().type());
     if (colType == null) {
-      throw loader().unsupportedJsonTypeException(
-          fieldDefn.key(), fieldDefn.lookahead().type());
+      throw loader().unsupportedJsonTypeException(fieldDefn.key(), fieldDefn.lookahead().type());
     }
     return colType;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java
index edc687f4aa..e5755cb07b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/JsonLoaderImpl.java
@@ -46,7 +46,6 @@ import org.apache.drill.exec.vector.accessor.UnsupportedConversionError;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.esri.core.geometry.JsonReader;
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.JsonToken;
 
@@ -99,7 +98,7 @@ import com.fasterxml.jackson.core.JsonToken;
  *
  * <h4>Comparison to Original JSON Reader</h4>
  *
- * This class replaces the {@link JsonReader} class used in Drill versions 1.17
+ * This class replaces the {@link org.apache.drill.exec.vector.complex.fn.JsonReader} class used in Drill versions 1.17
  * and before. Compared with the previous version, this implementation:
  * <ul>
  * <li>Materializes parse states as classes rather than as methods and
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/TupleParser.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/TupleParser.java
index af492c6c9d..100ddc39a2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/TupleParser.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/TupleParser.java
@@ -143,17 +143,14 @@ public class TupleParser extends ObjectParser {
   }
 
   public ElementParser resolveArray(String key, TokenIterator tokenizer) {
-    return replaceFieldParser(key,
-        fieldFactory().fieldParser(new FieldDefn(this, key, tokenizer, true)));
+    return replaceFieldParser(key, fieldFactory().fieldParser(new FieldDefn(this, key, tokenizer, true)));
   }
 
   public void forceNullResolution(String key) {
-    replaceFieldParser(key,
-        fieldFactory().forceNullResolution(new FieldDefn(this, key, null)));
+    replaceFieldParser(key, fieldFactory().forceNullResolution(new FieldDefn(this, key, null)));
   }
 
   public void forceEmptyArrayResolution(String key) {
-    replaceFieldParser(key,
-        fieldFactory().forceArrayResolution(new FieldDefn(this, key, null)));
+    replaceFieldParser(key, fieldFactory().forceArrayResolution(new FieldDefn(this, key, null)));
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonStructureParser.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonStructureParser.java
index 2f71aa8101..98163fd053 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonStructureParser.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonStructureParser.java
@@ -202,25 +202,25 @@ public class JsonStructureParser {
     }
     switch (token) {
 
-      // File contains an array of records.
-      case START_ARRAY:
-        if (options.skipOuterList) {
-          return new RootArrayParser(this);
-        } else {
-          throw errorFactory().structureError(
-              "JSON includes an outer array, but outer array support is not enabled");
-        }
+    // File contains an array of records.
+    case START_ARRAY:
+      if (options.skipOuterList) {
+        return new RootArrayParser(this);
+      } else {
+        throw errorFactory().structureError(
+            "JSON includes an outer array, but outer array support is not enabled");
+      }
 
-      // File contains a sequence of one or more records,
-      // presumably sequentially.
-      case START_OBJECT:
-        tokenizer.unget(token);
-        return new RootObjectParser(this);
+    // File contains a sequence of one or more records,
+    // presumably sequentially.
+    case START_OBJECT:
+      tokenizer.unget(token);
+      return new RootObjectParser(this);
 
-      // Not a valid JSON file for Drill.
-      // Won't get here because the Jackson parser catches errors.
-      default:
-        throw errorFactory().syntaxError(token);
+    // Not a valid JSON file for Drill.
+    // Won't get here because the Jackson parser catches errors.
+    default:
+      throw errorFactory().syntaxError(token);
     }
   }
 
@@ -254,6 +254,7 @@ public class JsonStructureParser {
     }
     while (true) {
       try {
+        // System.out.println(tokenizer.stringValue());
         return rootState.parseRoot(tokenizer);
       } catch (RecoverableJsonException e) {
         if (! recover()) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonValueParser.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonValueParser.java
index a8622bb22f..b6c6708a0e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonValueParser.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/JsonValueParser.java
@@ -56,9 +56,9 @@ public class JsonValueParser extends ValueParser {
         break;
 
       case VALUE_STRING:
-        json.append("\"");
+        //json.append("\"");
         json.append(textValue);
-        json.append("\"");
+        //json.append("\"");
         break;
 
       default:
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/ObjectValueParser.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/ObjectValueParser.java
index 975e0be865..5a76f0ebb1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/ObjectValueParser.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/parser/ObjectValueParser.java
@@ -39,6 +39,7 @@ public class ObjectValueParser extends AbstractElementParser {
         objectParser.parse(tokenizer);
         break;
       case VALUE_NULL:
+      case VALUE_STRING:
         // Silently ignore, treat as a missing field
         break;
       default:
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/DateValueListener.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/DateValueListener.java
index 8609a1e66c..3d221453a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/DateValueListener.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/DateValueListener.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.store.easy.json.values;
 
 import java.time.Duration;
 import java.time.LocalDate;
+import java.time.format.DateTimeFormatter;
 
 import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl;
 import org.apache.drill.exec.store.easy.json.parser.TokenIterator;
@@ -56,7 +57,10 @@ public class DateValueListener extends ScalarListener {
           // want to copy the offset since the epoch from UTC to our local
           // time, so that we retain the date, even if the span of the date
           // is different locally than UTC. A mess.
-          LocalDate localDate = LocalDate.parse(tokenizer.stringValue());
+          final String formatValue = schema().format();
+          DateTimeFormatter dateTimeFormatter = formatValue == null
+            ? DateTimeFormatter.ISO_LOCAL_DATE : DateTimeFormatter.ofPattern(formatValue);
+          LocalDate localDate = LocalDate.parse(tokenizer.stringValue(), dateTimeFormatter);
           writer.setLong(Duration.between(TimestampValueListener.LOCAL_EPOCH,
               localDate.atStartOfDay()).toMillis());
         } catch (Exception e) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
index 43812a22b2..b80bed4052 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
@@ -143,8 +143,7 @@ public class LogFormatPlugin extends EasyFormatPlugin<LogFormatConfig> {
    * </ul>
    */
   @Override
-  protected FileScanBuilder frameworkBuilder(
-      OptionSet options, EasySubScan scan) throws ExecutionSetupException {
+  protected FileScanBuilder frameworkBuilder(EasySubScan scan, OptionSet options) throws ExecutionSetupException {
 
     // Pattern and schema identical across readers; define
     // up front.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
index 1bbcb97551..222906224c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
@@ -42,9 +42,14 @@ import com.fasterxml.jackson.databind.JsonNode;
 
 import io.netty.buffer.DrillBuf;
 
+/**
+ * This is used by old-style {@link org.apache.drill.exec.store.easy.json.JSONRecordReader}.
+ * Please use new {@link org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl} along with
+ * {@link org.apache.drill.exec.store.easy.json.JsonBatchReader} instead of this reader
+ */
+@Deprecated
 public class JsonReader extends BaseJsonReader {
-  private static final Logger logger =
-      LoggerFactory.getLogger(JsonReader.class);
+  private static final Logger logger = LoggerFactory.getLogger(JsonReader.class);
   public final static int MAX_RECORD_SIZE = 128 * 1024;
 
   private final WorkingBuffer workingBuffer;
@@ -380,8 +385,7 @@ public class JsonReader extends BaseJsonReader {
    * @return
    * @throws IOException
    */
-  private boolean writeMapDataIfTyped(MapWriter writer, String fieldName)
-      throws IOException {
+  private boolean writeMapDataIfTyped(MapWriter writer, String fieldName) throws IOException {
     if (extended) {
       return mapOutput.run(writer, fieldName);
     } else {
@@ -426,7 +430,7 @@ public class JsonReader extends BaseJsonReader {
         workingBuffer.getBuf());
   }
 
-  private void writeData(ListWriter list) throws IOException {
+  private void writeData(ListWriter list) {
     list.startList();
     outside: while (true) {
       try {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
index ea3bfd4150..040679a2c3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
@@ -88,7 +88,7 @@ abstract class VectorOutput {
     this.parser = parser;
   }
 
-  protected boolean innerRun() throws IOException{
+  protected boolean innerRun() throws IOException {
     JsonToken t = parser.nextToken();
     if (t != JsonToken.FIELD_NAME) {
       return false;
@@ -155,25 +155,24 @@ abstract class VectorOutput {
     return checkToken(parser.getCurrentToken(), expected1, expected2);
   }
 
-  boolean hasType() throws JsonParseException, IOException {
+  boolean hasType() throws IOException {
     JsonToken token = parser.nextToken();
     return token == JsonToken.FIELD_NAME && parser.getText().equals(ExtendedTypeName.TYPE);
   }
 
-  boolean hasBinary() throws JsonParseException, IOException {
+  boolean hasBinary() throws IOException {
     JsonToken token = parser.nextToken();
     return token == JsonToken.FIELD_NAME && parser.getText().equals(ExtendedTypeName.BINARY);
   }
 
-  long getType() throws JsonParseException, IOException {
+  long getType() throws IOException {
     if (!checkNextToken(JsonToken.VALUE_NUMBER_INT, JsonToken.VALUE_STRING)) {
       long type = parser.getValueAsLong();
       //Advancing the token, as checking current token in binary
       parser.nextToken();
       return type;
     }
-    throw new JsonParseException("Failure while reading $type value. Expected a NUMBER or STRING",
-        parser.getCurrentLocation());
+    throw new JsonParseException(parser, "Failure while reading $type value. Expected a NUMBER or STRING");
   }
 
   public boolean checkToken(final JsonToken t, final JsonToken expected1, final JsonToken expected2) throws IOException{
@@ -184,8 +183,8 @@ abstract class VectorOutput {
     } else if (t == expected2) {
       return false;
     } else {
-      throw new JsonParseException(String.format("Failure while reading ExtendedJSON typed value. Expected a %s but "
-          + "received a token of type %s", expected1, t), parser.getCurrentLocation());
+      throw new JsonParseException(parser, String.format("Failure while reading ExtendedJSON typed value. Expected a %s but "
+          + "received a token of type %s", expected1, t));
     }
   }
 
@@ -197,7 +196,7 @@ abstract class VectorOutput {
   public abstract void writeInteger(boolean isNull) throws IOException;
   public abstract void writeDecimal(boolean isNull) throws IOException;
 
-  static class ListVectorOutput extends VectorOutput{
+  static class ListVectorOutput extends VectorOutput {
     private ListWriter writer;
 
     public ListVectorOutput(WorkingBuffer work) {
@@ -262,7 +261,7 @@ abstract class VectorOutput {
           // 1. https://docs.mongodb.com/manual/reference/mongodb-extended-json
           // 2. org.apache.drill.exec.store.easy.json.values.UtcTimestampValueListener
           Instant instant = isoDateTimeFormatter.parse(parser.getValueAsString(), Instant::from);
-          long offset = ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000;
+          long offset = ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000L;
           ts.writeTimeStamp(instant.toEpochMilli() + offset);
           break;
         default:
@@ -295,7 +294,7 @@ abstract class VectorOutput {
 
     @Override
     public void writeDecimal(boolean isNull) throws IOException {
-      throw new JsonParseException("Decimal Extended types not yet supported.", parser.getCurrentLocation());
+      throw new JsonParseException(parser, "Decimal Extended types not yet supported");
     }
   }
 
@@ -368,7 +367,7 @@ abstract class VectorOutput {
           // 1. https://docs.mongodb.com/manual/reference/mongodb-extended-json
           // 2. org.apache.drill.exec.store.easy.json.values.UtcTimestampValueListener
           Instant instant = isoDateTimeFormatter.parse(parser.getValueAsString(), Instant::from);
-          long offset = ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000;
+          long offset = ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000L;
           ts.writeTimeStamp(instant.toEpochMilli() + offset);
           break;
         default:
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java b/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java
index 5ff50b9610..1acfecdd26 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java
@@ -36,6 +36,7 @@ import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.drill.test.BaseTestQuery;
@@ -212,23 +213,28 @@ public class TestFrameworkTest extends BaseTestQuery {
     LocalDateTime localDT = LocalDateTime.of(2019, 9, 30, 20, 47, 43, 123);
     Instant instant = localDT.atZone(ZoneId.systemDefault()).toInstant();
     long ts = instant.toEpochMilli() + instant.getNano();
-    ts = ts + ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000;
-    testBuilder()
-        .sqlQuery("select * from cp.`jsoninput/input2.json` limit 1")
-        .ordered()
-        .baselineColumns("integer", "float", "x", "z", "l", "rl", "`date`")
-        .baselineValues(2010l,
-                        17.4,
-                        mapOf("y", "kevin",
-                            "z", "paul"),
-                        listOf(mapOf("orange", "yellow",
-                                "pink", "red"),
-                            mapOf("pink", "purple")),
-                        listOf(4l, 2l),
-                        listOf(listOf(2l, 1l),
-                            listOf(4l, 6l)),
-                        LocalDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneId.systemDefault()))
-        .build().run();
+    ts = ts + ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000L;
+    try {
+      testBuilder()
+          .ordered()
+          .enableSessionOption(ExecConstants.JSON_EXTENDED_TYPES_KEY)
+          .sqlQuery("select * from cp.`jsoninput/input2.json` limit 1")
+          .baselineColumns("integer", "float", "x", "z", "l", "rl", "`date`")
+          .baselineValues(2010l,
+                          17.4,
+                          mapOf("y", "kevin",
+                              "z", "paul"),
+                          listOf(mapOf("orange", "yellow",
+                                  "pink", "red"),
+                              mapOf("pink", "purple")),
+                          listOf(4l, 2l),
+                          listOf(listOf(2l, 1l),
+                              listOf(4l, 6l)),
+                          LocalDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneId.systemDefault()))
+          .build().run();
+    } finally {
+      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES_KEY);
+    }
   }
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index df83dbc159..4c042d04b4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -221,7 +221,6 @@ public class TestStarQueries extends BaseTestQuery {
         .sqlQuery("select *, n_nationkey as key2 from cp.`tpch/nation.parquet` order by n_name limit 2")
         .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey, n_nationkey as key2 from cp.`tpch/nation.parquet` order by n_name limit 2")
         .build().run();
-
   }
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
index 3870c9d657..ebeb3c0dd8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
@@ -17,9 +17,10 @@
  */
 package org.apache.drill.exec.expr.fn.impl;
 
+import static org.apache.drill.exec.ExecConstants.ENABLE_UNION_TYPE_KEY;
+import static org.apache.drill.exec.ExecConstants.ENABLE_V2_JSON_READER_KEY;
 import static org.junit.Assert.assertEquals;
 
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.test.ClusterFixture;
@@ -313,7 +314,7 @@ public class TestTypeFns extends ClusterTest {
   @Test
   public void testTypeOfWithFileV1() throws Exception {
     try {
-      enableV2Reader(false);
+      client.alterSession(ENABLE_V2_JSON_READER_KEY, false);
       // Column `x` does not actually appear in the file.
       String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
                   "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
@@ -326,7 +327,7 @@ public class TestTypeFns extends ClusterTest {
         .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "NULL", "NULL")
         .go();
     } finally {
-      resetV2Reader();
+      client.resetSession(ENABLE_V2_JSON_READER_KEY);
     }
   }
 
@@ -335,31 +336,26 @@ public class TestTypeFns extends ClusterTest {
    */
   @Test
   public void testTypeOfWithFileV2() throws Exception {
-    try {
-      enableV2Reader(true);
-      // Column `x` does not actually appear in the file.
-      String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
-                  "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
-                  "       typeof(x) AS x_t\n" +
-                  "FROM cp.`jsoninput/allTypes.json`";
-       testBuilder()
-        .sqlQuery(sql)
-        .ordered()
-        .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t",    "x_t")
-        .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "VARCHAR", "VARCHAR")
-        .go();
-    } finally {
-      resetV2Reader();
-    }
+    // Column `x` does not actually appear in the file.
+    String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
+                "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
+                "       typeof(x) AS x_t\n" +
+                "FROM cp.`jsoninput/allTypes.json`";
+     testBuilder()
+      .sqlQuery(sql)
+      .ordered()
+      .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t",    "x_t")
+      .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "VARCHAR", "VARCHAR")
+      .go();
   }
 
   @Test
   public void testUnionType() throws Exception {
-    String sql ="SELECT typeof(a) AS t, modeof(a) AS m, drilltypeof(a) AS dt\n" +
-                "FROM cp.`jsoninput/union/c.json`";
+    String sql ="SELECT typeof(a) AS t, modeof(a) AS m, drilltypeof(a) AS dt FROM cp.`jsoninput/union/c.json`";
     try {
       testBuilder()
-        .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+        .enableSessionOption(ENABLE_UNION_TYPE_KEY)
+        .disableSessionOption(ENABLE_V2_JSON_READER_KEY)
         .sqlQuery(sql)
         .ordered()
         .baselineColumns("t",       "m",        "dt")
@@ -371,17 +367,9 @@ public class TestTypeFns extends ClusterTest {
         .baselineValues( "LIST",    "NULLABLE", "UNION")
         .baselineValues( "NULL",    "NULLABLE", "UNION")
         .go();
+    } finally {
+      client.resetSession(ENABLE_UNION_TYPE_KEY);
+      client.resetSession(ENABLE_V2_JSON_READER_KEY);
     }
-    finally {
-      client.resetSession(ExecConstants.ENABLE_UNION_TYPE_KEY);
-    }
-  }
-
-  private void enableV2Reader(boolean enable) throws Exception {
-    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
-  }
-
-  private void resetV2Reader() throws Exception {
-    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/metastore/TestMetastoreWithEasyFormatPlugin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/metastore/TestMetastoreWithEasyFormatPlugin.java
index ec88737e3e..d0cb8954a1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/metastore/TestMetastoreWithEasyFormatPlugin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/metastore/TestMetastoreWithEasyFormatPlugin.java
@@ -610,38 +610,12 @@ public class TestMetastoreWithEasyFormatPlugin extends ClusterTest {
 
     Path tablePath = new Path(table.toURI().getPath());
 
-    TupleMetadata schema = new SchemaBuilder()
-        .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
-        .addNullable("dir1", TypeProtos.MinorType.VARCHAR)
-        .addNullable("o_orderkey", TypeProtos.MinorType.BIGINT)
-        .addNullable("o_custkey", TypeProtos.MinorType.BIGINT)
-        .addNullable("o_orderstatus", TypeProtos.MinorType.VARCHAR)
-        .addNullable("o_totalprice", TypeProtos.MinorType.FLOAT8)
-        .addNullable("o_orderdate", TypeProtos.MinorType.VARCHAR)
-        .addNullable("o_orderpriority", TypeProtos.MinorType.VARCHAR)
-        .addNullable("o_clerk", TypeProtos.MinorType.VARCHAR)
-        .addNullable("o_shippriority", TypeProtos.MinorType.BIGINT)
-        .addNullable("o_comment", TypeProtos.MinorType.VARCHAR)
-        .build();
-
     Map<SchemaPath, ColumnStatistics<?>> tableColumnStatistics = new HashMap<>(TABLE_COLUMN_STATISTICS);
-    tableColumnStatistics.put(SchemaPath.getSimplePath("o_custkey"),
-        getColumnStatistics(25L,
-            1498L, 120L, TypeProtos.MinorType.BIGINT));
-    tableColumnStatistics.put(SchemaPath.getSimplePath("o_orderdate"),
-        getColumnStatistics("1994-01-01T00:00:00.000-08:00",
-            "1996-12-19T00:00:00.000-08:00", 120L, TypeProtos.MinorType.VARCHAR));
-    tableColumnStatistics.put(SchemaPath.getSimplePath("o_orderkey"),
-        getColumnStatistics(1L,
-            1319L, 120L, TypeProtos.MinorType.BIGINT));
-    tableColumnStatistics.put(SchemaPath.getSimplePath("o_shippriority"),
-        getColumnStatistics(0L,
-            0L, 120L, TypeProtos.MinorType.BIGINT));
 
     BaseTableMetadata expectedTableMetadata = BaseTableMetadata.builder()
         .tableInfo(tableInfo)
         .metadataInfo(TABLE_META_INFO)
-        .schema(schema)
+        .schema(SCHEMA)
         .location(new Path(table.toURI().getPath()))
         .columnsStatistics(tableColumnStatistics)
         .metadataStatistics(Arrays.asList(new StatisticsHolder<>(120L, TableStatisticsKind.ROW_COUNT),
@@ -657,18 +631,6 @@ public class TestMetastoreWithEasyFormatPlugin extends ClusterTest {
         .build();
 
     Map<SchemaPath, ColumnStatistics<?>> dir0CSVStats = new HashMap<>(DIR0_1994_SEGMENT_COLUMN_STATISTICS);
-    dir0CSVStats.put(SchemaPath.getSimplePath("o_custkey"),
-        getColumnStatistics(25L,
-            1469L, 40L, TypeProtos.MinorType.BIGINT));
-    dir0CSVStats.put(SchemaPath.getSimplePath("o_orderdate"),
-        getColumnStatistics("1994-01-01T00:00:00.000-08:00",
-            "1994-12-23T00:00:00.000-08:00", 40L, TypeProtos.MinorType.VARCHAR));
-    dir0CSVStats.put(SchemaPath.getSimplePath("o_orderkey"),
-        getColumnStatistics(5L,
-            1031L, 40L, TypeProtos.MinorType.BIGINT));
-    dir0CSVStats.put(SchemaPath.getSimplePath("o_shippriority"),
-        getColumnStatistics(0L,
-            0L, 40L, TypeProtos.MinorType.BIGINT));
 
     SegmentMetadata dir0 = SegmentMetadata.builder()
         .tableInfo(baseTableInfo)
@@ -678,7 +640,7 @@ public class TestMetastoreWithEasyFormatPlugin extends ClusterTest {
             .key("1994")
             .build())
         .path(new Path(tablePath, "1994"))
-        .schema(schema)
+        .schema(SCHEMA)
         .lastModifiedTime(getMaxLastModified(new File(table, "1994")))
         .column(SchemaPath.getSimplePath("dir0"))
         .columnsStatistics(dir0CSVStats)
@@ -720,19 +682,6 @@ public class TestMetastoreWithEasyFormatPlugin extends ClusterTest {
     expectedSegmentFilesLocations.add(segmentFiles);
 
     Map<SchemaPath, ColumnStatistics<?>> dir0q1Stats = new HashMap<>(DIR0_1994_Q1_SEGMENT_COLUMN_STATISTICS);
-    dir0q1Stats.put(SchemaPath.getSimplePath("o_custkey"),
-        getColumnStatistics(392L,
-            1411L, 10L, TypeProtos.MinorType.BIGINT));
-    dir0q1Stats.put(SchemaPath.getSimplePath("o_orderdate"),
-        getColumnStatistics("1994-01-01T00:00:00.000-08:00",
-            "1994-03-26T00:00:00.000-08:00", 10L, TypeProtos.MinorType.VARCHAR));
-    dir0q1Stats.put(SchemaPath.getSimplePath("o_orderkey"),
-        getColumnStatistics(66L,
-            833L, 10L, TypeProtos.MinorType.BIGINT));
-    dir0q1Stats.put(SchemaPath.getSimplePath("o_shippriority"),
-        getColumnStatistics(0L,
-            0L, 10L, TypeProtos.MinorType.BIGINT));
-
     long dir0q1lastModified = new File(new File(new File(table, "1994"), "Q1"), "orders_94_q1.json").lastModified();
     FileMetadata dir01994q1File = FileMetadata.builder()
         .tableInfo(baseTableInfo)
@@ -741,7 +690,7 @@ public class TestMetastoreWithEasyFormatPlugin extends ClusterTest {
             .identifier("1994/Q1/orders_94_q1.json")
             .key("1994")
             .build())
-        .schema(schema)
+        .schema(SCHEMA)
         .lastModifiedTime(dir0q1lastModified)
         .columnsStatistics(dir0q1Stats)
         .metadataStatistics(Collections.singletonList(new StatisticsHolder<>(10L, TableStatisticsKind.ROW_COUNT)))
@@ -785,9 +734,7 @@ public class TestMetastoreWithEasyFormatPlugin extends ClusterTest {
           .collect(Collectors.toSet());
 
       // verify top segments locations
-      assertEquals(
-          expectedTopLevelSegmentLocations,
-          topLevelSegmentLocations);
+      assertEquals(expectedTopLevelSegmentLocations, topLevelSegmentLocations);
 
       Set<Set<Path>> segmentFilesLocations = topSegmentMetadata.stream()
           .map(SegmentMetadata::getLocations)
@@ -815,7 +762,7 @@ public class TestMetastoreWithEasyFormatPlugin extends ClusterTest {
               .key("1994")
               .build())
           .path(new Path(new Path(tablePath, "1994"), "Q1"))
-          .schema(schema)
+          .schema(SCHEMA)
           .lastModifiedTime(getMaxLastModified(new File(new File(table, "1994"), "Q1")))
           .column(SchemaPath.getSimplePath("dir1"))
           .columnsStatistics(dir0q1Stats)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java
index 4aab0a4275..a91cb6d88f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestTopNSchemaChanges.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.TopN;
 
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.test.TestBuilder;
@@ -145,21 +146,27 @@ public class TestTopNSchemaChanges extends BaseTestQuery {
     }
     writer.close();
 
-    TestBuilder builder = testBuilder()
-      .sqlQuery("select * from dfs.`%s` order by kl limit 8", TABLE)
-      .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
-      .ordered()
-      .baselineColumns("kl", "vl");
-
-    builder.baselineValues(0l, 0l);
-    builder.baselineValues(1.0d, 1.0d);
-    builder.baselineValues("2", "2");
-    builder.baselineValues(3l, 3l);
-    builder.baselineValues(4.0d, 4.0d);
-    builder.baselineValues("5", "5");
-    builder.baselineValues(6l, 6l);
-    builder.baselineValues(7.0d, 7.0d);
-    builder.go();
+    try {
+      TestBuilder builder = testBuilder()
+        .sqlQuery("select * from dfs.`%s` order by kl limit 8", TABLE)
+        .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+        .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
+        .ordered()
+        .baselineColumns("kl", "vl");
+
+      builder.baselineValues(0l, 0l);
+      builder.baselineValues(1.0d, 1.0d);
+      builder.baselineValues("2", "2");
+      builder.baselineValues(3l, 3l);
+      builder.baselineValues(4.0d, 4.0d);
+      builder.baselineValues("5", "5");
+      builder.baselineValues(6l, 6l);
+      builder.baselineValues(7.0d, 7.0d);
+      builder.go();
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   @Test
@@ -181,38 +188,46 @@ public class TestTopNSchemaChanges extends BaseTestQuery {
     }
     writer.close();
 
-    TestBuilder builder = testBuilder()
-      .sqlQuery("select kl, vl, kl1, vl1, kl2, vl2 from dfs.`%s` order by kl limit 3", TABLE)
-      .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
-      .ordered()
-      .baselineColumns("kl", "vl", "kl1", "vl1", "kl2", "vl2")
-      .baselineValues(100.0d, 100.0d, null, null, null, null)
-      .baselineValues(101.0d, 101.0d, null, null, null, null)
-      .baselineValues(102.0d, 102.0d, null, null, null, null);
-    builder.go();
-
-    builder = testBuilder()
-      .sqlQuery("select kl, vl, kl1, vl1, kl2, vl2  from dfs.`%s` order by kl1 limit 3", TABLE)
-      .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
-      .ordered()
-      .baselineColumns("kl", "vl", "kl1", "vl1", "kl2", "vl2")
-      .baselineValues(null, null, 0l, 0l, null, null)
-      .baselineValues(null, null, 1l, 1l, null, null)
-      .baselineValues(null, null, 2l, 2l, null, null);
-    builder.go();
-
-    builder = testBuilder()
-      .sqlQuery("select kl, vl, kl1, vl1, kl2, vl2 from dfs.`%s` order by kl2 limit 3", TABLE)
-      .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
-      .ordered()
-      .baselineColumns("kl", "vl", "kl1", "vl1", "kl2", "vl2")
-      .baselineValues(null, null, null, null, "200", "200")
-      .baselineValues(null, null, null, null, "201", "201")
-      .baselineValues(null, null, null, null, "202", "202");
-    builder.go();
-
-    // Since client can't handle new columns which are not in first batch, we won't test output of query.
-    // Query should run w/o any errors.
-    test("select * from dfs.`%s` order by kl limit 3", TABLE);
+    try {
+      TestBuilder builder = testBuilder()
+        .sqlQuery("select kl, vl, kl1, vl1, kl2, vl2 from dfs.`%s` order by kl limit 3", TABLE)
+        .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+        .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
+        .ordered()
+        .baselineColumns("kl", "vl", "kl1", "vl1", "kl2", "vl2")
+        .baselineValues(100.0d, 100.0d, null, null, null, null)
+        .baselineValues(101.0d, 101.0d, null, null, null, null)
+        .baselineValues(102.0d, 102.0d, null, null, null, null);
+      builder.go();
+
+      builder = testBuilder()
+        .sqlQuery("select kl, vl, kl1, vl1, kl2, vl2  from dfs.`%s` order by kl1 limit 3", TABLE)
+        .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+        .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
+        .ordered()
+        .baselineColumns("kl", "vl", "kl1", "vl1", "kl2", "vl2")
+        .baselineValues(null, null, 0l, 0l, null, null)
+        .baselineValues(null, null, 1l, 1l, null, null)
+        .baselineValues(null, null, 2l, 2l, null, null);
+      builder.go();
+
+      builder = testBuilder()
+        .sqlQuery("select kl, vl, kl1, vl1, kl2, vl2 from dfs.`%s` order by kl2 limit 3", TABLE)
+        .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+        .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
+        .ordered()
+        .baselineColumns("kl", "vl", "kl1", "vl1", "kl2", "vl2")
+        .baselineValues(null, null, null, null, "200", "200")
+        .baselineValues(null, null, null, null, "201", "201")
+        .baselineValues(null, null, null, null, "202", "202");
+      builder.go();
+
+      // Since client can't handle new columns which are not in first batch, we won't test output of query.
+      // Query should run w/o any errors.
+      test("select * from dfs.`%s` order by kl limit 3", TABLE);
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
index e109557d55..3eba1f47a4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
@@ -32,6 +32,10 @@ import java.io.FileWriter;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 
+import static org.apache.drill.exec.ExecConstants.ENABLE_UNION_TYPE_KEY;
+import static org.apache.drill.exec.ExecConstants.ENABLE_V2_JSON_READER_KEY;
+import static org.apache.drill.exec.planner.physical.PlannerSettings.ENABLE_HASH_JOIN_OPTION;
+
 @Category(OperatorTest.class)
 public class TestMergeJoinWithSchemaChanges extends BaseTestQuery {
   public static final Path LEFT_DIR = Paths.get("mergejoin-schemachanges-left");
@@ -263,7 +267,9 @@ public class TestMergeJoinWithSchemaChanges extends BaseTestQuery {
 
     TestBuilder builder = testBuilder()
       .sqlQuery(query)
-      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false; alter session set `exec.enable_union_type` = true")
+      .disableSessionOption(ENABLE_HASH_JOIN_OPTION)
+      .enableSessionOption(ENABLE_UNION_TYPE_KEY)
+      .disableSessionOption(ENABLE_V2_JSON_READER_KEY)
       .unOrdered()
       .baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1", "kr2", "vr2");
 
@@ -278,7 +284,9 @@ public class TestMergeJoinWithSchemaChanges extends BaseTestQuery {
 
     builder = testBuilder()
       .sqlQuery(query)
-      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false; alter session set `exec.enable_union_type` = true")
+      .disableSessionOption(ENABLE_HASH_JOIN_OPTION)
+      .enableSessionOption(ENABLE_UNION_TYPE_KEY)
+      .disableSessionOption(ENABLE_V2_JSON_READER_KEY)
       .unOrdered()
       .baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1", "kr2", "vr2");
 
@@ -299,7 +307,9 @@ public class TestMergeJoinWithSchemaChanges extends BaseTestQuery {
 
     builder = testBuilder()
       .sqlQuery(query)
-      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false; alter session set `exec.enable_union_type` = true")
+      .disableSessionOption(ENABLE_HASH_JOIN_OPTION)
+      .enableSessionOption(ENABLE_UNION_TYPE_KEY)
+      .disableSessionOption(ENABLE_V2_JSON_READER_KEY)
       .unOrdered()
       .baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1", "kr2", "vr2");
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestE2EUnnestAndLateral.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestE2EUnnestAndLateral.java
index 7b29091636..e277022072 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestE2EUnnestAndLateral.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestE2EUnnestAndLateral.java
@@ -17,13 +17,21 @@
  */
 package org.apache.drill.exec.physical.impl.lateraljoin;
 
+import ch.qos.logback.classic.Level;
 import org.apache.drill.categories.OperatorTest;
+import org.apache.drill.exec.physical.impl.ScanBatch;
+import org.apache.drill.exec.physical.impl.aggregate.HashAggBatch;
+import org.apache.drill.exec.physical.impl.aggregate.HashAggTemplate;
+import org.apache.drill.exec.physical.impl.join.LateralJoinBatch;
+import org.apache.drill.exec.physical.impl.protocol.OperatorRecordBatch;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
 import org.apache.drill.test.ClusterTest;
+import org.apache.drill.test.LogFixture;
 import org.apache.drill.test.TestBuilder;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -33,6 +41,8 @@ import static junit.framework.TestCase.fail;
 
 @Category(OperatorTest.class)
 public class TestE2EUnnestAndLateral extends ClusterTest {
+  private static LogFixture logFixture;
+  private final static Level CURRENT_LOG_LEVEL = Level.INFO;
 
   private static final String regularTestFile_1 = "cust_order_10_1.json";
   private static final String regularTestFile_2 = "cust_order_10_2.json";
@@ -48,6 +58,14 @@ public class TestE2EUnnestAndLateral extends ClusterTest {
         .sessionOption(PlannerSettings.ENABLE_UNNEST_LATERAL_KEY, true)
         .maxParallelization(1);
     startCluster(builder);
+    logFixture = LogFixture.builder()
+      .toConsole()
+      .logger(HashAggBatch.class, CURRENT_LOG_LEVEL)
+      .logger(HashAggTemplate.class, CURRENT_LOG_LEVEL)
+      .logger(ScanBatch.class, CURRENT_LOG_LEVEL)
+      .logger(OperatorRecordBatch.class, CURRENT_LOG_LEVEL)
+      .logger(LateralJoinBatch.class, CURRENT_LOG_LEVEL)
+      .build();
   }
 
   /***********************************************************************************************
@@ -370,7 +388,6 @@ public class TestE2EUnnestAndLateral extends ClusterTest {
       "FROM dfs.`lateraljoin/multipleFiles` customer, LATERAL " +
       "(SELECT t.ord.o_orderkey as o_orderkey, t.ord.o_totalprice as o_totalprice FROM UNNEST(customer.c_orders) t(ord)" +
       " ORDER BY o_totalprice DESC) orders WHERE customer.c_custkey = '7180' LIMIT 1";
-
     testBuilder()
       .sqlQuery(sql)
       .ordered()
@@ -499,7 +516,7 @@ public class TestE2EUnnestAndLateral extends ClusterTest {
    *****************************************************************************************/
 
   @Test
-  public void testMultipleBatchesLateral_WithLimitInParent() throws Exception {
+  public void testMultipleBatchesLateral_WithLimitInParent() {
     String sql = "SELECT customer.c_name, customer.c_address, orders.o_orderkey, orders.o_totalprice " +
       "FROM dfs.`lateraljoin/multipleFiles` customer, LATERAL " +
       "(SELECT t.ord.o_orderkey as o_orderkey, t.ord.o_totalprice  as o_totalprice FROM UNNEST(customer.c_orders) t(ord) WHERE t.ord.o_totalprice > 100000 LIMIT 2) " +
@@ -508,7 +525,7 @@ public class TestE2EUnnestAndLateral extends ClusterTest {
   }
 
   @Test
-  public void testMultipleBatchesLateral_WithFilterInParent() throws Exception {
+  public void testMultipleBatchesLateral_WithFilterInParent() {
     String sql = "SELECT customer.c_name, customer.c_address, orders.o_orderkey, orders.o_totalprice " +
       "FROM dfs.`lateraljoin/multipleFiles` customer, LATERAL " +
       "(SELECT t.ord.o_orderkey as o_orderkey, t.ord.o_totalprice as o_totalprice FROM UNNEST(customer.c_orders) t(ord) WHERE t.ord.o_totalprice > 100000 LIMIT 2) " +
@@ -517,7 +534,8 @@ public class TestE2EUnnestAndLateral extends ClusterTest {
   }
 
   @Test
-  public void testMultipleBatchesLateral_WithGroupByInParent() throws Exception {
+  @Ignore("Disable until SchemaChange in HashAgg fixed")
+  public void testMultipleBatchesLateral_WithGroupByInParent() {
     String sql = "SELECT customer.c_name, avg(orders.o_totalprice) AS avgPrice " +
       "FROM dfs.`lateraljoin/multipleFiles` customer, LATERAL " +
       "(SELECT t.ord.o_totalprice as o_totalprice FROM UNNEST(customer.c_orders) t(ord) WHERE t.ord.o_totalprice > 100000 LIMIT 2) " +
@@ -526,7 +544,7 @@ public class TestE2EUnnestAndLateral extends ClusterTest {
   }
 
   @Test
-  public void testMultipleBatchesLateral_WithOrderByInParent() throws Exception {
+  public void testMultipleBatchesLateral_WithOrderByInParent() {
     String sql = "SELECT customer.c_name, customer.c_address, orders.o_orderkey, orders.o_totalprice " +
       "FROM dfs.`lateraljoin/multipleFiles` customer, LATERAL " +
       "(SELECT t.ord.o_orderkey as o_orderkey, t.ord.o_totalprice as o_totalprice FROM UNNEST(customer.c_orders) t(ord)) orders " +
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
index e0cccb0b71..5c91df6b21 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
@@ -503,9 +503,7 @@ public class TestReaderLevelProjection extends SubOperatorTest {
     final NullColumnBuilder builder = new NullBuilderBuilder().build();
     final ResolvedRow rootTuple = new ResolvedRow(builder);
     try {
-      new ExplicitSchemaProjection(
-          scanProj, tableSchema, rootTuple,
-          ScanTestUtils.resolvers());
+      new ExplicitSchemaProjection(scanProj, tableSchema, rootTuple, ScanTestUtils.resolvers());
       fail();
     } catch (final UserException e) {
       // Expected
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
index 32cddc0680..e74af23f9b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
@@ -35,6 +35,9 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import static org.apache.drill.exec.ExecConstants.ENABLE_UNION_TYPE_KEY;
+import static org.apache.drill.exec.ExecConstants.ENABLE_V2_JSON_READER_KEY;
+
 @Category({SlowTest.class, OperatorTest.class})
 public class TestExternalSort extends BaseTestQuery {
 
@@ -156,7 +159,7 @@ public class TestExternalSort extends BaseTestQuery {
     builder.go();
   }
 
-  @Test
+  @Test // V2_UNION
   public void testNewColumns() throws Exception {
     final int record_count = 10000;
     final String tableDirName = "newColumns";
@@ -194,24 +197,29 @@ public class TestExternalSort extends BaseTestQuery {
       new JsonFileBuilder(rowSet).build(tableFile);
       rowSet.clear();
     }
-
-    // Test framework currently doesn't handle changing schema (i.e. new
-    // columns) on the client side
-    TestBuilder builder = testBuilder()
-        .sqlQuery("select a, b, c from dfs.`%s` order by a desc", tableDirName)
-        .ordered()
-        .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
-        .baselineColumns("a", "b", "c");
-    for (int i = record_count; i >= 0;) {
-      builder.baselineValues((long) i, (long) i--, null);
-      if (i >= 0) {
-        builder.baselineValues((long) i, null, (long) i--);
+    try {
+      // Test framework currently doesn't handle changing schema (i.e. new
+      // columns) on the client side
+      TestBuilder builder = testBuilder()
+          .sqlQuery("select a, b, c from dfs.`%s` order by a desc", tableDirName)
+          .ordered()
+          .enableSessionOption(ENABLE_UNION_TYPE_KEY)
+          .disableSessionOption(ENABLE_V2_JSON_READER_KEY)
+          .baselineColumns("a", "b", "c");
+      for (int i = record_count; i >= 0;) {
+        builder.baselineValues((long) i, (long) i--, null);
+        if (i >= 0) {
+          builder.baselineValues((long) i, null, (long) i--);
+        }
       }
-    }
-    builder.go();
+      builder.go();
 
-    // TODO: Useless test: just dumps to console
-    test("select * from dfs.`%s` order by a desc", tableDirName);
+      // TODO: Useless test: just dumps to console
+      test("select * from dfs.`%s` order by a desc", tableDirName);
+    } finally {
+      resetSessionOption(ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   private File createTableFile(final String tableDirName, final String fileName) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
index 14d5bc6e16..645cdc54c0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
@@ -52,9 +52,6 @@ import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.exec.vector.accessor.TupleWriter.UndefinedColumnException;
 import org.apache.drill.test.SubOperatorTest;
-import org.apache.drill.exec.physical.rowSet.RowSet;
-import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
-import org.apache.drill.exec.physical.rowSet.RowSetReader;
 import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/TestRestJson.java b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/TestRestJson.java
index b4d25a0c5f..5449a3c42e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/TestRestJson.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/server/rest/TestRestJson.java
@@ -182,7 +182,7 @@ public class TestRestJson extends ClusterTest {
       System.out.println(
           client.queryBuilder().sql(sql).singletonLong());
       long end = System.currentTimeMillis();
-      System.out.println(String.format("COUNT(*) - Elapsed: %d ms", end - start));
+      System.out.printf("COUNT(*) - Elapsed: %d ms%n", end - start);
     }
 
     // Run the query and dump to a file to do a rough check
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java
index 75f90868e4..5172e47fc9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestMetastoreCommands.java
@@ -64,10 +64,12 @@ import java.nio.file.Paths;
 import java.time.Instant;
 import java.time.LocalDateTime;
 import java.time.ZoneId;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -100,85 +102,87 @@ public class TestMetastoreCommands extends ClusterTest {
       .build();
 
   public static final Map<SchemaPath, ColumnStatistics<?>> TABLE_COLUMN_STATISTICS =
-      ImmutableMap.<SchemaPath, ColumnStatistics<?>>builder()
-      .put(SchemaPath.getSimplePath("o_shippriority"),
-          getColumnStatistics(0, 0, 120L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("o_orderstatus"),
-          getColumnStatistics("F", "P", 120L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderpriority"),
-          getColumnStatistics("1-URGENT", "5-LOW", 120L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderkey"),
-          getColumnStatistics(1, 1319, 120L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("o_clerk"),
-          getColumnStatistics("Clerk#000000004", "Clerk#000000995", 120L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_totalprice"),
-          getColumnStatistics(3266.69, 350110.21, 120L, TypeProtos.MinorType.FLOAT8))
-      .put(SchemaPath.getSimplePath("o_comment"),
+    new LinkedHashMap<SchemaPath, ColumnStatistics<?>>()
+    {{
+      put(SchemaPath.getSimplePath("o_shippriority"),
+          getColumnStatistics(0, 0, 120L, TypeProtos.MinorType.INT));
+      put(SchemaPath.getSimplePath("o_orderstatus"),
+          getColumnStatistics("F", "P", 120L, TypeProtos.MinorType.VARCHAR));
+      put(SchemaPath.getSimplePath("o_orderpriority"),
+          getColumnStatistics("1-URGENT", "5-LOW", 120L, TypeProtos.MinorType.VARCHAR));
+      put(SchemaPath.getSimplePath("o_orderkey"),
+          getColumnStatistics(1, 1319, 120L, TypeProtos.MinorType.INT));
+      put(SchemaPath.getSimplePath("o_clerk"),
+          getColumnStatistics("Clerk#000000004", "Clerk#000000995", 120L, TypeProtos.MinorType.VARCHAR));
+      put(SchemaPath.getSimplePath("o_totalprice"),
+          getColumnStatistics(3266.69, 350110.21, 120L, TypeProtos.MinorType.FLOAT8));
+      put(SchemaPath.getSimplePath("o_comment"),
           getColumnStatistics(" about the final platelets. dependen",
-              "zzle. carefully enticing deposits nag furio", 120L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_custkey"),
-          getColumnStatistics(25, 1498, 120L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("dir0"),
-          getColumnStatistics("1994", "1996", 120L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("dir1"),
-          getColumnStatistics("Q1", "Q4", 120L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderdate"),
-          getColumnStatistics(757382400000L, 850953600000L, 120L, TypeProtos.MinorType.DATE))
-      .build();
+              "zzle. carefully enticing deposits nag furio", 120L, TypeProtos.MinorType.VARCHAR));
+      put(SchemaPath.getSimplePath("o_custkey"),
+          getColumnStatistics(25, 1498, 120L, TypeProtos.MinorType.INT));
+      put(SchemaPath.getSimplePath("dir0"),
+          getColumnStatistics("1994", "1996", 120L, TypeProtos.MinorType.VARCHAR));
+      put(SchemaPath.getSimplePath("dir1"),
+          getColumnStatistics("Q1", "Q4", 120L, TypeProtos.MinorType.VARCHAR));
+      put(SchemaPath.getSimplePath("o_orderdate"),
+          getColumnStatistics(757382400000L, 850953600000L, 120L, TypeProtos.MinorType.DATE));
+    }};
 
   public static final Map<SchemaPath, ColumnStatistics<?>> DIR0_1994_SEGMENT_COLUMN_STATISTICS =
-      ImmutableMap.<SchemaPath, ColumnStatistics<?>>builder()
-      .put(SchemaPath.getSimplePath("o_shippriority"),
-          getColumnStatistics(0, 0, 40L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("o_orderstatus"),
-          getColumnStatistics("F", "F", 40L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderpriority"),
-          getColumnStatistics("1-URGENT", "5-LOW", 40L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderkey"),
-          getColumnStatistics(5, 1031, 40L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("o_clerk"),
-          getColumnStatistics("Clerk#000000004", "Clerk#000000973", 40L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_totalprice"),
-          getColumnStatistics(3266.69, 350110.21, 40L, TypeProtos.MinorType.FLOAT8))
-      .put(SchemaPath.getSimplePath("o_comment"),
+    new LinkedHashMap<SchemaPath, ColumnStatistics<?>>()
+    {{
+        put(SchemaPath.getSimplePath("o_shippriority"),
+          getColumnStatistics(0, 0, 40L, TypeProtos.MinorType.INT));
+        put(SchemaPath.getSimplePath("o_orderstatus"),
+          getColumnStatistics("F", "F", 40L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_orderpriority"),
+          getColumnStatistics("1-URGENT", "5-LOW", 40L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_orderkey"),
+          getColumnStatistics(5, 1031, 40L, TypeProtos.MinorType.INT));
+        put(SchemaPath.getSimplePath("o_clerk"),
+          getColumnStatistics("Clerk#000000004", "Clerk#000000973", 40L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_totalprice"),
+          getColumnStatistics(3266.69, 350110.21, 40L, TypeProtos.MinorType.FLOAT8));
+        put(SchemaPath.getSimplePath("o_comment"),
           getColumnStatistics(" accounts nag slyly. ironic, ironic accounts wake blithel",
-              "yly final requests over the furiously regula", 40L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_custkey"),
-          getColumnStatistics(25, 1469, 40L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("dir0"),
-          getColumnStatistics("1994", "1994", 40L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("dir1"),
-          getColumnStatistics("Q1", "Q4", 40L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderdate"),
-          getColumnStatistics(757382400000L, 788140800000L, 40L, TypeProtos.MinorType.DATE))
-      .build();
+            "yly final requests over the furiously regula", 40L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_custkey"),
+          getColumnStatistics(25, 1469, 40L, TypeProtos.MinorType.INT));
+        put(SchemaPath.getSimplePath("dir0"),
+          getColumnStatistics("1994", "1994", 40L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("dir1"),
+          getColumnStatistics("Q1", "Q4", 40L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_orderdate"),
+          getColumnStatistics(757382400000L, 788140800000L, 40L, TypeProtos.MinorType.DATE));
+    }};
 
   public static final Map<SchemaPath, ColumnStatistics<?>> DIR0_1994_Q1_SEGMENT_COLUMN_STATISTICS =
-      ImmutableMap.<SchemaPath, ColumnStatistics<?>>builder()
-      .put(SchemaPath.getSimplePath("o_shippriority"),
-          getColumnStatistics(0, 0, 10L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("o_orderstatus"),
-          getColumnStatistics("F", "F", 10L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderpriority"),
-          getColumnStatistics("1-URGENT", "5-LOW", 10L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderkey"),
-          getColumnStatistics(66, 833, 10L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("o_clerk"),
-          getColumnStatistics("Clerk#000000062", "Clerk#000000973", 10L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_totalprice"),
-          getColumnStatistics(3266.69, 132531.73, 10L, TypeProtos.MinorType.FLOAT8))
-      .put(SchemaPath.getSimplePath("o_comment"),
+    new LinkedHashMap<SchemaPath, ColumnStatistics<?>>() {{
+        put(SchemaPath.getSimplePath("o_shippriority"),
+          getColumnStatistics(0, 0, 10L, TypeProtos.MinorType.INT));
+        put(SchemaPath.getSimplePath("o_orderstatus"),
+          getColumnStatistics("F", "F", 10L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_orderpriority"),
+          getColumnStatistics("1-URGENT", "5-LOW", 10L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_orderkey"),
+          getColumnStatistics(66, 833, 10L, TypeProtos.MinorType.INT));
+        put(SchemaPath.getSimplePath("o_clerk"),
+          getColumnStatistics("Clerk#000000062", "Clerk#000000973", 10L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_totalprice"),
+          getColumnStatistics(3266.69, 132531.73, 10L, TypeProtos.MinorType.FLOAT8));
+        put(SchemaPath.getSimplePath("o_comment"),
           getColumnStatistics(" special pinto beans use quickly furiously even depende",
-              "y pending requests integrate", 10L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_custkey"),
-          getColumnStatistics(392, 1411, 10L, TypeProtos.MinorType.INT))
-      .put(SchemaPath.getSimplePath("dir0"),
-          getColumnStatistics("1994", "1994", 10L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("dir1"),
-          getColumnStatistics("Q1", "Q1", 10L, TypeProtos.MinorType.VARCHAR))
-      .put(SchemaPath.getSimplePath("o_orderdate"),
-          getColumnStatistics(757382400000L, 764640000000L, 10L, TypeProtos.MinorType.DATE))
-      .build();
+            "y pending requests integrate", 10L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_custkey"),
+          getColumnStatistics(392, 1411, 10L, TypeProtos.MinorType.INT));
+        put(SchemaPath.getSimplePath("dir0"),
+          getColumnStatistics("1994", "1994", 10L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("dir1"),
+          getColumnStatistics("Q1", "Q1", 10L, TypeProtos.MinorType.VARCHAR));
+        put(SchemaPath.getSimplePath("o_orderdate"),
+          getColumnStatistics(757382400000L, 764640000000L, 10L, TypeProtos.MinorType.DATE));
+    }};
 
   public static final MetadataInfo TABLE_META_INFO = MetadataInfo.builder()
       .type(MetadataType.TABLE)
@@ -3564,15 +3568,16 @@ public class TestMetastoreCommands extends ClusterTest {
     }
   }
 
-  public static <T> ColumnStatistics<T> getColumnStatistics(T minValue, T maxValue,
-      long rowCount, TypeProtos.MinorType minorType) {
+  public static <T> ColumnStatistics<T> getColumnStatistics(T minValue, T maxValue, long rowCount,
+                                                            TypeProtos.MinorType minorType) {
     return new ColumnStatistics<>(
-        Arrays.asList(
-            new StatisticsHolder<>(minValue, ColumnStatisticsKind.MIN_VALUE),
-            new StatisticsHolder<>(maxValue, ColumnStatisticsKind.MAX_VALUE),
-            new StatisticsHolder<>(rowCount, TableStatisticsKind.ROW_COUNT),
-            new StatisticsHolder<>(rowCount, ColumnStatisticsKind.NON_NULL_VALUES_COUNT),
-            new StatisticsHolder<>(0L, ColumnStatisticsKind.NULLS_COUNT)),
+      new ArrayList() {{
+          add(new StatisticsHolder<>(minValue, ColumnStatisticsKind.MIN_VALUE));
+          add(new StatisticsHolder<>(maxValue, ColumnStatisticsKind.MAX_VALUE));
+          add(new StatisticsHolder<>(rowCount, TableStatisticsKind.ROW_COUNT));
+          add(new StatisticsHolder<>(rowCount, ColumnStatisticsKind.NON_NULL_VALUES_COUNT));
+          add(new StatisticsHolder<>(0L, ColumnStatisticsKind.NULLS_COUNT));
+        }},
         minorType);
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/DropboxFileSystemTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/DropboxFileSystemTest.java
index b48f29fc22..ae198ad731 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/DropboxFileSystemTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/DropboxFileSystemTest.java
@@ -27,7 +27,7 @@ import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.store.dfs.FileSystemConfig;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
-import org.apache.drill.exec.store.easy.json.JSONFormatPlugin.JSONFormatConfig;
+import org.apache.drill.exec.store.easy.json.JSONFormatConfig;
 import org.apache.drill.exec.store.easy.text.TextFormatConfig;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
index 923f12d05c..d533c7d268 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
@@ -164,9 +164,9 @@ public class TestImplicitFileColumns extends BaseTestQuery {
   @Test
   public void testStarColumnJson() throws Exception {
     SchemaBuilder schemaBuilder = new SchemaBuilder()
-        .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
         .addNullable("id", TypeProtos.MinorType.BIGINT)
-        .addNullable("name", TypeProtos.MinorType.VARCHAR);
+        .addNullable("name", TypeProtos.MinorType.VARCHAR)
+        .addNullable("dir0", TypeProtos.MinorType.VARCHAR);
     final BatchSchema expectedSchema = new BatchSchemaBuilder()
         .withSchemaBuilder(schemaBuilder)
         .build();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestRepeatedList.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestRepeatedList.java
index 997655eaa7..631b17b59d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestRepeatedList.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestRepeatedList.java
@@ -445,7 +445,7 @@ public class TestRepeatedList extends BaseJsonLoaderTest {
         .addSingleCol(objArray())
         .addSingleCol(objArray())
         .addSingleCol(singleObjArray(strArray()))
-        .addSingleCol(objArray(strArray("\"foo\""), strArray("20")))
+        .addSingleCol(objArray(strArray("foo"), strArray("20")))
         .build();
     RowSetUtilities.verify(expected, results);
     assertNull(loader.next());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestUnknowns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestUnknowns.java
index 8f8ad2425f..1d227c08cc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestUnknowns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/json/loader/TestUnknowns.java
@@ -246,7 +246,7 @@ public class TestUnknowns extends BaseJsonLoaderTest {
     RowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addSingleCol(strArray())
         .addSingleCol(strArray("null"))
-        .addSingleCol(strArray("\"foo\""))
+        .addSingleCol(strArray("foo"))
         .addSingleCol(strArray("10", "[20]", "{\"b\": 30}"))
         .build();
     RowSetUtilities.verify(expected, results);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonEscapeAnyChar.java
similarity index 92%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
rename to exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonEscapeAnyChar.java
index dcd65a6f92..cb148b8807 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonEscapeAnyChar.java
@@ -15,12 +15,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.vector.complex.writer;
+package org.apache.drill.exec.store.json;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.vector.complex.writer.TestJsonReader.TestWrapper;
+import org.apache.drill.exec.store.json.TestJsonReader.TestWrapper;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
 import org.junit.After;
@@ -59,7 +59,7 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
 
   @Test
   public void testwithOptionEnabled() throws Exception {
-    runBoth(() -> doTestWithOptionEnabled());
+    runBoth(this::doTestWithOptionEnabled);
   }
 
   private void doTestWithOptionEnabled() throws Exception {
@@ -76,14 +76,16 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
       resetJsonReaderEscapeAnyChar();
     }
   }
+
   @Test
   public void testwithOptionDisabled() throws Exception {
-    runBoth(() -> doTestWithOptionDisabled());
+    runBoth(this::doTestWithOptionDisabled);
   }
 
   private void doTestWithOptionDisabled() throws Exception {
     try {
-      queryBuilder().sql(QUERY)
+      queryBuilder()
+        .sql(QUERY)
         .run();
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString("DATA_READ ERROR: Error parsing JSON - Unrecognized character escape"));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
index 19b0b7be72..d6ce06496e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
@@ -22,6 +22,7 @@ import org.apache.drill.categories.RowSetTest;
 
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.physical.rowSet.DirectRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -30,6 +31,7 @@ import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
 import org.apache.drill.test.rowSet.RowSetComparison;
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -45,6 +47,7 @@ public class TestJsonModes extends ClusterTest {
   @BeforeClass
   public static void setup() throws Exception {
     ClusterTest.startCluster(ClusterFixture.builder(dirTestWatcher));
+    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, false);
   }
 
   @Test
@@ -159,4 +162,9 @@ public class TestJsonModes extends ClusterTest {
     long cnt = queryBuilder().physical(plan).singletonLong();
     assertEquals("Counts should match", 4L, cnt);
   }
+
+  @AfterClass
+  public static void resetOptions() {
+    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java
similarity index 87%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
rename to exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java
index 5b440a740b..e556ec16ea 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java
@@ -15,13 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.vector.complex.writer;
+package org.apache.drill.exec.store.json;
 
 import static org.apache.drill.test.TestBuilder.mapOf;
 import static org.hamcrest.CoreMatchers.containsString;
-import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
 import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.fail;
 
 import java.io.File;
@@ -36,12 +36,11 @@ import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.vector.VarCharVector;
-import org.apache.drill.exec.vector.complex.writer.TestJsonReader.TestWrapper;
+import org.apache.drill.exec.store.json.TestJsonReader.TestWrapper;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 
-// TODO: Move to JSON reader package after code review
 // TODO: Split or rename: this tests mor than NanInf
 public class TestJsonNanInf extends BaseTestQuery {
 
@@ -58,7 +57,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testNanInfSelect() throws Exception {
-    runBoth(() -> doTestNanInfSelect());
+    runBoth(this::doTestNanInfSelect);
   }
 
   private void doTestNanInfSelect() throws Exception {
@@ -73,8 +72,7 @@ public class TestJsonNanInf extends BaseTestQuery {
         .unOrdered()
         .baselineColumns("nan_col", "inf_col")
         .baselineValues(Double.NaN, Double.POSITIVE_INFINITY)
-        .build()
-        .run();
+        .go();
     } finally {
       FileUtils.deleteQuietly(file);
     }
@@ -83,7 +81,7 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testExcludePositiveInfinity() throws Exception {
-    runBoth(() -> doTestExcludePositiveInfinity());
+    runBoth(this::doTestExcludePositiveInfinity);
   }
 
   private void doTestExcludePositiveInfinity() throws Exception {
@@ -99,8 +97,7 @@ public class TestJsonNanInf extends BaseTestQuery {
         .unOrdered()
         .baselineColumns("inf_col")
         .baselineValues(5.0)
-        .build()
-        .run();
+        .go();
     } finally {
       FileUtils.deleteQuietly(file);
     }
@@ -109,7 +106,7 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testExcludeNegativeInfinity() throws Exception {
-    runBoth(() -> doTestExcludeNegativeInfinity());
+    runBoth(this::doTestExcludeNegativeInfinity);
   }
 
   private void doTestExcludeNegativeInfinity() throws Exception {
@@ -125,8 +122,7 @@ public class TestJsonNanInf extends BaseTestQuery {
         .unOrdered()
         .baselineColumns("inf_col")
         .baselineValues(5.0)
-        .build()
-        .run();
+        .go();
     } finally {
       FileUtils.deleteQuietly(file);
     }
@@ -135,7 +131,7 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testIncludePositiveInfinity() throws Exception {
-    runBoth(() -> doTestIncludePositiveInfinity());
+    runBoth(this::doTestIncludePositiveInfinity);
   }
 
   private void doTestIncludePositiveInfinity() throws Exception {
@@ -160,7 +156,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testExcludeNan() throws Exception {
-    runBoth(() -> doTestExcludeNan());
+    runBoth(this::doTestExcludeNan);
   }
 
   private void doTestExcludeNan() throws Exception {
@@ -176,8 +172,7 @@ public class TestJsonNanInf extends BaseTestQuery {
           .unOrdered()
           .baselineColumns("nan_col")
           .baselineValues(5.0)
-          .build()
-          .run();
+          .go();
     } finally {
       FileUtils.deleteQuietly(file);
     }
@@ -185,7 +180,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testIncludeNan() throws Exception {
-    runBoth(() -> doTestIncludeNan());
+    runBoth(this::doTestIncludeNan);
   }
 
   private void doTestIncludeNan() throws Exception {
@@ -201,8 +196,7 @@ public class TestJsonNanInf extends BaseTestQuery {
           .unOrdered()
           .baselineColumns("nan_col")
           .baselineValues(Double.NaN)
-          .build()
-          .run();
+          .go();
     } finally {
       FileUtils.deleteQuietly(file);
     }
@@ -210,7 +204,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testNanInfFailure() throws Exception {
-    runBoth(() -> doTestNanInfFailure());
+    runBoth(this::doTestNanInfFailure);
   }
 
   private void doTestNanInfFailure() throws Exception {
@@ -232,7 +226,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testCreateTableNanInf() throws Exception {
-    runBoth(() -> doTestCreateTableNanInf());
+    runBoth(this::doTestCreateTableNanInf);
   }
 
   private void doTestCreateTableNanInf() throws Exception {
@@ -249,11 +243,11 @@ public class TestJsonNanInf extends BaseTestQuery {
       File resultFile = new File(new File(file.getParent(), newTable),"0_0_0.json");
       String resultJson = FileUtils.readFileToString(resultFile);
       int nanIndex = resultJson.indexOf("NaN");
-      assertFalse("`NaN` must not be enclosed with \"\" ", resultJson.charAt(nanIndex - 1) == '"');
-      assertFalse("`NaN` must not be enclosed with \"\" ", resultJson.charAt(nanIndex + "NaN".length()) == '"');
+      assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex - 1));
+      assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex + "NaN".length()));
       int infIndex = resultJson.indexOf("Infinity");
-      assertFalse("`Infinity` must not be enclosed with \"\" ", resultJson.charAt(infIndex - 1) == '"');
-      assertFalse("`Infinity` must not be enclosed with \"\" ", resultJson.charAt(infIndex + "Infinity".length()) == '"');
+      assertNotEquals("`Infinity` must not be enclosed with \"\" ", '"', resultJson.charAt(infIndex - 1));
+      assertNotEquals("`Infinity` must not be enclosed with \"\" ", '"', resultJson.charAt(infIndex + "Infinity".length()));
     } finally {
       test("drop table if exists dfs.`%s`", newTable);
       FileUtils.deleteQuietly(file);
@@ -262,7 +256,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testConvertFromJsonFunction() throws Exception {
-    runBoth(() -> doTestConvertFromJsonFunction());
+    runBoth(this::doTestConvertFromJsonFunction);
   }
 
   private void doTestConvertFromJsonFunction() throws Exception {
@@ -276,8 +270,7 @@ public class TestJsonNanInf extends BaseTestQuery {
           .unOrdered()
           .baselineColumns("col")
           .baselineValues(mapOf("nan_col", Double.NaN))
-          .build()
-          .run();
+          .go();
     } finally {
       FileUtils.deleteQuietly(file);
     }
@@ -313,7 +306,7 @@ public class TestJsonNanInf extends BaseTestQuery {
       FileUtils.writeStringToFile(file, csv);
       List<QueryDataBatch> results = testSqlWithResults(query);
       RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
-      assertTrue("Query result must contain 1 row", results.size() == 1);
+      assertEquals("Query result must contain 1 row", 1, results.size());
       QueryDataBatch batch = results.get(0);
 
       batchLoader.load(batch.getHeader().getDef(), batch.getData());
@@ -321,8 +314,8 @@ public class TestJsonNanInf extends BaseTestQuery {
       // ensuring that `NaN` token ARE NOT enclosed with double quotes
       String resultJson = vw.getValueVector().getAccessor().getObject(0).toString();
       int nanIndex = resultJson.indexOf("NaN");
-      assertFalse("`NaN` must not be enclosed with \"\" ", resultJson.charAt(nanIndex - 1) == '"');
-      assertFalse("`NaN` must not be enclosed with \"\" ", resultJson.charAt(nanIndex + "NaN".length()) == '"');
+      assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex - 1));
+      assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex + "NaN".length()));
       batch.release();
       batchLoader.clear();
     } finally {
@@ -339,13 +332,12 @@ public class TestJsonNanInf extends BaseTestQuery {
           .unOrdered()
           .baselineColumns("sin_col", "sum_col")
           .baselineValues(Double.NaN, Double.POSITIVE_INFINITY)
-          .build()
-          .run();
+          .go();
   }
 
   @Test
   public void testOrderByWithNaN() throws Exception {
-    runBoth(() -> doTestOrderByWithNaN());
+    runBoth(this::doTestOrderByWithNaN);
   }
 
   private void doTestOrderByWithNaN() throws Exception {
@@ -368,8 +360,7 @@ public class TestJsonNanInf extends BaseTestQuery {
           .baselineValues("obj1", Double.NaN)
           .baselineValues("obj2", Double.NEGATIVE_INFINITY)
           .baselineValues("obj2", Double.NaN)
-          .build()
-          .run();
+          .go();
     } finally {
       test("alter session set `%s` = false", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
       FileUtils.deleteQuietly(file);
@@ -378,7 +369,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testNestedLoopJoinWithNaN() throws Exception {
-    runBoth(() -> doTestNestedLoopJoinWithNaN());
+    runBoth(this::doTestNestedLoopJoinWithNaN);
   }
 
   private void doTestNestedLoopJoinWithNaN() throws Exception {
@@ -410,8 +401,7 @@ public class TestJsonNanInf extends BaseTestQuery {
           .baselineValues("object2")
           .baselineValues("object3")
           .baselineValues("object4")
-          .build()
-          .run();
+          .go();
     } finally {
       test("alter session set `%s` = false", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
       JoinTestBase.resetJoinOptions();
@@ -421,7 +411,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testHashJoinWithNaN() throws Exception {
-    runBoth(() -> doTestHashJoinWithNaN());
+    runBoth(this::doTestHashJoinWithNaN);
   }
 
   private void doTestHashJoinWithNaN() throws Exception {
@@ -439,13 +429,12 @@ public class TestJsonNanInf extends BaseTestQuery {
       FileUtils.writeStringToFile(file, json);
       test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
       testBuilder()
-              .sqlQuery(query)
-              .ordered()
-              .baselineColumns("name")
-              .baselineValues("obj1")
-              .baselineValues("obj2")
-              .build()
-              .run();
+          .sqlQuery(query)
+          .ordered()
+          .baselineColumns("name")
+          .baselineValues("obj1")
+          .baselineValues("obj2")
+          .go();
     } finally {
       test("alter session set `%s` = false", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
       JoinTestBase.resetJoinOptions();
@@ -455,7 +444,7 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testMergeJoinWithNaN() throws Exception {
-    runBoth(() -> doTestMergeJoinWithNaN());
+    runBoth(this::doTestMergeJoinWithNaN);
   }
 
   private void doTestMergeJoinWithNaN() throws Exception {
@@ -473,13 +462,12 @@ public class TestJsonNanInf extends BaseTestQuery {
       FileUtils.writeStringToFile(file, json);
       test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
       testBuilder()
-              .sqlQuery(query)
-              .ordered()
-              .baselineColumns("name")
-              .baselineValues("obj1")
-              .baselineValues("obj2")
-              .build()
-              .run();
+          .sqlQuery(query)
+          .ordered()
+          .baselineColumns("name")
+          .baselineValues("obj1")
+          .baselineValues("obj2")
+          .go();
     } finally {
       test("alter session set `%s` = false", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
       JoinTestBase.resetJoinOptions();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReader.java
similarity index 89%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
rename to exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReader.java
index bd2517ceea..4ec019b5b6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReader.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.vector.complex.writer;
+package org.apache.drill.exec.store.json;
 
 import static org.apache.drill.test.TestBuilder.listOf;
 import static org.apache.drill.test.TestBuilder.mapOf;
@@ -53,7 +53,6 @@ import org.slf4j.LoggerFactory;
  * <li><tt>TestJsonReaderQuery</tt></li>
  * </ul>
  */
-//TODO: Move to JSON reader package after code review
 @Category(RowSetTest.class)
 public class TestJsonReader extends BaseTestQuery {
   private static final Logger logger = LoggerFactory.getLogger(TestJsonReader.class);
@@ -64,11 +63,11 @@ public class TestJsonReader extends BaseTestQuery {
     dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
   }
 
-  private void enableV2Reader(boolean enable) throws Exception {
+  private void enableV2Reader(boolean enable) {
     alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
   }
 
-  private void resetV2Reader() throws Exception {
+  private void resetV2Reader() {
     resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
   }
 
@@ -89,7 +88,7 @@ public class TestJsonReader extends BaseTestQuery {
 
    @Test
   public void schemaChange() throws Exception {
-    runBoth(() -> doSchemaChange());
+    runBoth(this::doSchemaChange);
   }
 
   private void doSchemaChange() throws Exception {
@@ -98,7 +97,7 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test
   public void testSplitAndTransferFailure() throws Exception {
-    runBoth(() -> doTestSplitAndTransferFailure());
+    runBoth(this::doTestSplitAndTransferFailure);
   }
 
   private void doTestSplitAndTransferFailure() throws Exception {
@@ -131,7 +130,7 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test // DRILL-1824
   public void schemaChangeValidate() throws Exception {
-    runBoth(() -> doSchemaChangeValidate());
+    runBoth(this::doSchemaChangeValidate);
   }
 
   private void doSchemaChangeValidate() throws Exception {
@@ -179,6 +178,7 @@ public class TestJsonReader extends BaseTestQuery {
               .sqlQuery("select * from cp.`jsoninput/union/a.json`")
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
               .baselineColumns("field1", "field2")
               .baselineValues(
                       1L, 1.2
@@ -221,6 +221,7 @@ public class TestJsonReader extends BaseTestQuery {
               ).go();
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
@@ -235,11 +236,13 @@ public class TestJsonReader extends BaseTestQuery {
                 "from cp.`jsoninput/union/a.json`) where a is not null")
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
               .baselineColumns("a", "type")
               .baselineValues(13L, "BIGINT")
               .go();
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
@@ -254,6 +257,7 @@ public class TestJsonReader extends BaseTestQuery {
                 "when is_map(field1) then t.field1.inner1 end f1 from cp.`jsoninput/union/a.json` t")
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
               .baselineColumns("f1")
               .baselineValues(1L)
               .baselineValues(2L)
@@ -262,6 +266,7 @@ public class TestJsonReader extends BaseTestQuery {
               .go();
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
@@ -271,17 +276,19 @@ public class TestJsonReader extends BaseTestQuery {
   public void testSumWithTypeCase() throws Exception {
     try {
       testBuilder()
-              .sqlQuery("select sum(cast(f1 as bigint)) sum_f1 from " +
-                "(select case when is_bigint(field1) then field1 " +
-                "when is_list(field1) then field1[0] when is_map(field1) then t.field1.inner1 end f1 " +
-                "from cp.`jsoninput/union/a.json` t)")
-              .ordered()
-              .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
-              .baselineColumns("sum_f1")
-              .baselineValues(9L)
-              .go();
+          .sqlQuery("select sum(cast(f1 as bigint)) sum_f1 from " +
+            "(select case when is_bigint(field1) then field1 " +
+            "when is_list(field1) then field1[0] when is_map(field1) then t.field1.inner1 end f1 " +
+            "from cp.`jsoninput/union/a.json` t)")
+          .ordered()
+          .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+          .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
+          .baselineColumns("sum_f1")
+          .baselineValues(9L)
+          .go();
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
@@ -294,6 +301,7 @@ public class TestJsonReader extends BaseTestQuery {
               .sqlQuery("select a + b c from cp.`jsoninput/union/b.json`")
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
               .baselineColumns("c")
               .baselineValues(3L)
               .baselineValues(7.0)
@@ -301,6 +309,7 @@ public class TestJsonReader extends BaseTestQuery {
               .go();
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
@@ -322,11 +331,13 @@ public class TestJsonReader extends BaseTestQuery {
               .sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_batch t")
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
               .baselineColumns("sum")
               .baselineValues(20000L)
               .go();
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
@@ -353,15 +364,17 @@ public class TestJsonReader extends BaseTestQuery {
               .sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_file t")
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
               .baselineColumns("sum")
               .baselineValues(20000L)
               .go();
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
-  // V1 version of the test. See TsetJsonReaderQueries for the V2 version.
+  // V1 version of the test. See TestJsonReaderQueries for the V2 version.
 
   @Test
   public void drill_4032() throws Exception {
@@ -377,10 +390,10 @@ public class TestJsonReader extends BaseTestQuery {
     os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
     os.flush();
     os.close();
-    testNoResult("select t.col2.col3 from dfs.tmp.drill_4032 t");
+    runBoth(() -> testNoResult("select t.col2.col3 from dfs.tmp.drill_4032 t"));
   }
 
-  @Test
+  @Test // todo: place this logic to beforeClass. And divide doDrill_4479 into 3 tests
   public void drill_4479() throws Exception {
     File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
     table_dir.mkdir();
@@ -394,7 +407,7 @@ public class TestJsonReader extends BaseTestQuery {
     os.flush();
     os.close();
 
-    runBoth(() -> doDrill_4479());
+    runBoth(this::doDrill_4479);
   }
 
   private void doDrill_4479() throws Exception {
@@ -436,7 +449,7 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
     }
 
-    runBoth(() -> doTestFlattenEmptyArrayWithAllTextMode());
+    runBoth(this::doTestFlattenEmptyArrayWithAllTextMode);
   }
 
   private void doTestFlattenEmptyArrayWithAllTextMode() throws Exception {
@@ -468,7 +481,7 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
     }
 
-    runBoth(() -> doTestFlattenEmptyArrayWithUnionType());
+    runBoth(this::doTestFlattenEmptyArrayWithUnionType);
   }
 
   private void doTestFlattenEmptyArrayWithUnionType() throws Exception {
@@ -554,6 +567,7 @@ public class TestJsonReader extends BaseTestQuery {
         .sqlQuery("select t.rk.a as a from dfs.`%s` t", fileName)
         .ordered()
         .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type`=true")
+        .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
         .baselineColumns("a")
         .baselineValues(map)
         .baselineValues("2")
@@ -561,6 +575,7 @@ public class TestJsonReader extends BaseTestQuery {
 
     } finally {
       resetSessionOption(ExecConstants.ENABLE_UNION_TYPE_KEY);
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
index 5b2fb24741..000dbaac39 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
@@ -24,7 +24,6 @@ import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
-import org.apache.drill.exec.vector.complex.writer.TestJsonReader;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.rowSet.RowSetComparison;
 import org.junit.BeforeClass;
@@ -56,7 +55,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testEmptyList() throws Exception {
-    runBoth(() -> doTestEmptyList());
+    runBoth(this::doTestEmptyList);
   }
 
   private void doTestEmptyList() throws Exception {
@@ -77,7 +76,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedCountStr() throws Exception {
-    runBoth(() -> doTestRepeatedCountStr());
+    runBoth(this::doTestRepeatedCountStr);
   }
 
   private void doTestRepeatedCountStr() throws Exception {
@@ -93,7 +92,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedCountInt() throws Exception {
-    runBoth(() -> doTestRepeatedCountInt());
+    runBoth(this::doTestRepeatedCountInt);
   }
 
   private void doTestRepeatedCountInt() throws Exception {
@@ -109,7 +108,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedCountFloat4() throws Exception {
-    runBoth(() -> doTestRepeatedCountFloat4());
+    runBoth(this::doTestRepeatedCountFloat4);
   }
 
   private void doTestRepeatedCountFloat4() throws Exception {
@@ -125,7 +124,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedCountVarchar() throws Exception {
-    runBoth(() -> doTestRepeatedCountVarchar());
+    runBoth(this::doTestRepeatedCountVarchar);
   }
 
   private void doTestRepeatedCountVarchar() throws Exception {
@@ -141,7 +140,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedCountBit() throws Exception {
-    runBoth(() -> doTestRepeatedCountBit());
+    runBoth(this::doTestRepeatedCountBit);
   }
 
   private void doTestRepeatedCountBit() throws Exception {
@@ -167,7 +166,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedContainsStr() throws Exception {
-    runBoth(() -> doTestRepeatedContainsStr());
+    runBoth(this::doTestRepeatedContainsStr);
   }
 
   private void doTestRepeatedContainsStr() throws Exception {
@@ -183,7 +182,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedContainsInt() throws Exception {
-    runBoth(() -> doTestRepeatedContainsInt());
+    runBoth(this::doTestRepeatedContainsInt);
   }
 
   private void doTestRepeatedContainsInt() throws Exception {
@@ -199,7 +198,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedContainsFloat4() throws Exception {
-    runBoth(() -> doTestRepeatedContainsFloat4());
+    runBoth(this::doTestRepeatedContainsFloat4);
   }
 
   private void doTestRepeatedContainsFloat4() throws Exception {
@@ -215,7 +214,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedContainsVarchar() throws Exception {
-    runBoth(() -> doTestRepeatedContainsVarchar());
+    runBoth(this::doTestRepeatedContainsVarchar);
   }
 
   private void doTestRepeatedContainsVarchar() throws Exception {
@@ -231,7 +230,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedContainsBitTrue() throws Exception {
-    runBoth(() -> doTestRepeatedContainsBitTrue());
+    runBoth(this::doTestRepeatedContainsBitTrue);
   }
 
   private void doTestRepeatedContainsBitTrue() throws Exception {
@@ -247,7 +246,7 @@ public class TestJsonReaderFns extends BaseTestJsonReader {
 
   @Test
   public void testRepeatedContainsBitFalse() throws Exception {
-    runBoth(() -> doTestRepeatedContainsBitFalse());
+    runBoth(this::doTestRepeatedContainsBitFalse);
   }
 
   private void doTestRepeatedContainsBitFalse() throws Exception {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
index 3fcad38e00..f8dff11f54 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
@@ -184,7 +184,7 @@ public class TestJsonReaderQueries extends BaseTestJsonReader {
   @Test
   @Ignore("broken")
   public void testFieldSelectionBug() throws Exception {
-    runBoth(() -> doTestFieldSelectionBug());
+    runBoth(this::doTestFieldSelectionBug);
   }
 
   private void doTestFieldSelectionBug() throws Exception {
@@ -221,7 +221,7 @@ public class TestJsonReaderQueries extends BaseTestJsonReader {
 
   @Test
   public void testReadCompressed() throws Exception {
-    runBoth(() -> doTestReadCompressed());
+    runBoth(this::doTestReadCompressed);
   }
 
   private void doTestReadCompressed() throws Exception {
@@ -269,7 +269,7 @@ public class TestJsonReaderQueries extends BaseTestJsonReader {
 
   @Test
   public void testDrill_1419() throws Exception {
-    runBoth(() -> doTestDrill_1419());
+    runBoth(this::doTestDrill_1419);
   }
 
   private void doTestDrill_1419() throws Exception {
@@ -293,7 +293,7 @@ public class TestJsonReaderQueries extends BaseTestJsonReader {
 
   @Test
   public void testSingleColumnRead_vector_fill_bug() throws Exception {
-    runBoth(() -> doTestSingleColumnRead_vector_fill_bug());
+    runBoth(this::doTestSingleColumnRead_vector_fill_bug);
   }
 
   private void doTestSingleColumnRead_vector_fill_bug() throws Exception {
@@ -304,7 +304,7 @@ public class TestJsonReaderQueries extends BaseTestJsonReader {
 
   @Test
   public void testNonExistentColumnReadAlone() throws Exception {
-    runBoth(() -> doTestNonExistentColumnReadAlone());
+    runBoth(this::doTestNonExistentColumnReadAlone);
   }
 
   private void doTestNonExistentColumnReadAlone() throws Exception {
@@ -315,7 +315,7 @@ public class TestJsonReaderQueries extends BaseTestJsonReader {
 
   @Test
   public void testAllTextMode() throws Exception {
-    runBoth(() -> doTestAllTextMode());
+    runBoth(this::doTestAllTextMode);
   }
 
   private void doTestAllTextMode() throws Exception {
@@ -583,7 +583,6 @@ public class TestJsonReaderQueries extends BaseTestJsonReader {
         os.write("{\"col1\": \"val4\", \"col2\": null}");
       }
       String sql = "select t.col1, t.col2.col3 from dfs.tmp.drill_4032 t order by col1";
-//      String sql = "select t.col1, t.col2.col3 from dfs.tmp.drill_4032 t";
       RowSet results = runTest(sql);
       results.print();
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java
index c0d4a4b80f..b2c26a6824 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java
@@ -1,10 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.drill.exec.store.json;
 
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.test.ClusterFixture;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestJsonReaderWithSchema extends BaseTestJsonReader {
 
+  @BeforeClass
+  public static void setup() throws Exception {
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+  }
+
   @Test
   public void testSelectFromListWithCase() throws Exception {
     try {
@@ -14,6 +38,7 @@ public class TestJsonReaderWithSchema extends BaseTestJsonReader {
                 "from cp.`jsoninput/union/a.json`) where a is not null")
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .optionSettingQueriesForTestQuery("alter session set `store.json.enable_v2_reader` = false")
               .baselineColumns("a", "type")
               .baselineValues(13L, "BIGINT")
               .go();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
index 7b0a61c496..aa55f791e0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
@@ -17,6 +17,9 @@
  */
 package org.apache.drill.exec.store.json;
 
+import static org.apache.drill.exec.ExecConstants.ENABLE_V2_JSON_READER_KEY;
+import static org.apache.drill.exec.ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG;
+import static org.apache.drill.exec.ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -45,12 +48,12 @@ public class TestJsonRecordReader extends BaseTestQuery {
     dirTestWatcher.copyResourceToRoot(Paths.get("jsoninput/drill_3353"));
   }
 
-  private void enableV2Reader(boolean enable) throws Exception {
-    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  private void enableV2Reader(boolean enable) {
+    alterSession(ENABLE_V2_JSON_READER_KEY, enable);
   }
 
-  private void resetV2Reader() throws Exception {
-    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  private void resetV2Reader() {
+    resetSessionOption(ENABLE_V2_JSON_READER_KEY);
   }
 
   public interface TestWrapper {
@@ -79,7 +82,12 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testDateJsonInput() throws Exception {
-    test("select `date`, AGE(`date`, CAST('2019-09-30 20:47:43' as timestamp)) from cp.`jsoninput/input2.json` limit 10 ");
+    try{
+      alterSession(ExecConstants.JSON_EXTENDED_TYPES_KEY, true);
+      test("select `date`, AGE(`date`, CAST('2019-09-30 20:47:43' as timestamp)) from cp.`jsoninput/input2.json` limit 10 ");
+    } finally {
+      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES_KEY);
+    }
   }
 
   @Test
@@ -115,7 +123,7 @@ public class TestJsonRecordReader extends BaseTestQuery {
   // DRILL-1634 : retrieve an element in a nested array in a repeated map.
   // RepeatedMap (Repeated List (Repeated varchar))
   public void testNestedArrayInRepeatedMap() throws Exception {
-    runBoth(() -> doTestNestedArrayInRepeatedMap());
+    runBoth(this::doTestNestedArrayInRepeatedMap);
   }
 
   private void doTestNestedArrayInRepeatedMap() throws Exception {
@@ -126,7 +134,7 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testEmptyMapDoesNotFailValueCapacityCheck() throws Exception {
-    runBoth(() -> doTestEmptyMapDoesNotFailValueCapacityCheck());
+    runBoth(this::doTestEmptyMapDoesNotFailValueCapacityCheck);
   }
 
   private void doTestEmptyMapDoesNotFailValueCapacityCheck() throws Exception {
@@ -136,13 +144,16 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testEnableAllTextMode() throws Exception {
-    runBoth(() -> doTestEnableAllTextMode());
+    runBoth(this::doTestEnableAllTextMode);
   }
 
   private void doTestEnableAllTextMode() throws Exception {
-    alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-    test("select * from cp.`jsoninput/big_numeric.json`");
-    resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
+    try{
+      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+      test("select * from cp.`jsoninput/big_numeric.json`");
+    } finally {
+      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
   }
 
   @Test
@@ -168,26 +179,32 @@ public class TestJsonRecordReader extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   // DRILL-1832
   public void testJsonWithNulls1() throws Exception {
-    runBoth(() -> doTestJsonWithNulls1());
+    runBoth(this::doTestJsonWithNulls1);
   }
 
   private void doTestJsonWithNulls1() throws Exception {
     final String query = "select * from cp.`jsoninput/twitter_43.json`";
-    testBuilder().sqlQuery(query).unOrdered()
-        .jsonBaselineFile("jsoninput/drill-1832-1-result.json").go();
+    testBuilder()
+      .sqlQuery(query)
+      .unOrdered()
+      .jsonBaselineFile("jsoninput/drill-1832-1-result.json")
+      .go();
   }
 
   @Test
   @Category(UnlikelyTest.class)
   // DRILL-1832
   public void testJsonWithNulls2() throws Exception {
-    runBoth(() -> doTestJsonWithNulls2());
+    runBoth(this::doTestJsonWithNulls2);
   }
 
   private void doTestJsonWithNulls2() throws Exception {
     final String query = "select SUM(1) as `sum_Number_of_Records_ok` from cp.`jsoninput/twitter_43.json` having (COUNT(1) > 0)";
-    testBuilder().sqlQuery(query).unOrdered()
-        .jsonBaselineFile("jsoninput/drill-1832-2-result.json").go();
+    testBuilder()
+      .sqlQuery(query)
+      .unOrdered()
+      .jsonBaselineFile("jsoninput/drill-1832-2-result.json")
+      .go();
   }
 
   // V1-only test. In V2, this works. See TestJsonReaderQueries.
@@ -197,16 +214,14 @@ public class TestJsonRecordReader extends BaseTestQuery {
     try {
       enableV2Reader(false);
       testBuilder()
-          .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
-          .unOrdered().jsonBaselineFile("jsoninput/mixed_number_types.json")
-          .build().run();
+        .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
+        .unOrdered().jsonBaselineFile("jsoninput/mixed_number_types.json")
+        .go();
       fail("Mixed number types verification failed, expected failure on conflicting number types.");
     } catch (Exception ex) {
       // this indicates successful completion of the test
-      assertTrue(ex
-          .getMessage()
-          .contains(
-              "You tried to write a BigInt type when you are using a ValueWriter of type NullableFloat8WriterImpl."));
+      assertTrue(ex.getMessage()
+        .contains("You tried to write a BigInt type when you are using a ValueWriter of type NullableFloat8WriterImpl."));
     } finally {
       resetV2Reader();
     }
@@ -214,16 +229,19 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testMixedNumberTypesInAllTextMode() throws Exception {
-    runBoth(() -> doTestMixedNumberTypesInAllTextMode());
+    runBoth(this::doTestMixedNumberTypesInAllTextMode);
   }
 
   private void doTestMixedNumberTypesInAllTextMode() throws Exception {
     try {
       alterSession("store.json.all_text_mode", true);
       testBuilder()
-          .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
-          .unOrdered().baselineColumns("a").baselineValues("5.2")
-          .baselineValues("6").build().run();
+        .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues("5.2")
+        .baselineValues("6")
+        .go();
     } finally {
       resetSessionOption("store.json.all_text_mode");
     }
@@ -234,9 +252,12 @@ public class TestJsonRecordReader extends BaseTestQuery {
     try {
       alterSession(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE, true);
       testBuilder()
-          .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
-          .unOrdered().baselineColumns("a").baselineValues(5.2D)
-          .baselineValues(6D).build().run();
+        .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues(5.2D)
+        .baselineValues(6D)
+        .go();
     } finally {
       resetSessionOption(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
     }
@@ -246,7 +267,7 @@ public class TestJsonRecordReader extends BaseTestQuery {
   public void drill_3353() throws Exception {
     try {
       alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-       test("create table dfs.tmp.drill_3353 as select a from dfs.`jsoninput/drill_3353` where e = true");
+      test("create table dfs.tmp.drill_3353 as select a from dfs.`jsoninput/drill_3353` where e = true");
       runBoth(this::doDrill_3353);
     } finally {
       resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
@@ -274,8 +295,11 @@ public class TestJsonRecordReader extends BaseTestQuery {
   private void doTestNestedFilter() throws Exception {
     String query = "select a from cp.`jsoninput/nestedFilter.json` t where t.a.b = 1";
     String baselineQuery = "select * from cp.`jsoninput/nestedFilter.json` t where t.a.b = 1";
-    testBuilder().sqlQuery(query).unOrdered().sqlBaselineQuery(baselineQuery)
-        .go();
+    testBuilder()
+      .sqlQuery(query)
+      .unOrdered()
+      .sqlBaselineQuery(baselineQuery)
+      .go();
   }
 
   @Test
@@ -284,24 +308,20 @@ public class TestJsonRecordReader extends BaseTestQuery {
   /* Test for CountingJSONReader */
   public void testCountingQuerySkippingInvalidJSONRecords() throws Exception {
     try {
-      String set = "alter session set `"
-        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
-      String set1 = "alter session set `"
-        + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
-        + "` = true";
       String query = "select count(*) from cp.`jsoninput/drill4653/file.json`";
 
-      testNoResult(set);
-      testNoResult(set1);
       testBuilder()
         .unOrdered()
+        .disableSessionOption(ENABLE_V2_JSON_READER_KEY)
+        .enableSessionOption(JSON_READER_SKIP_INVALID_RECORDS_FLAG)
+        .enableSessionOption(JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG)
         .sqlQuery(query)
         .sqlBaselineQuery(query)
         .go();
     } finally {
-      String set = "alter session set `"
-        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
-      testNoResult(set);
+      resetSessionOption(ENABLE_V2_JSON_READER_KEY);
+      resetSessionOption(JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG);
+      resetSessionOption(JSON_READER_SKIP_INVALID_RECORDS_FLAG);
     }
   }
 
@@ -316,8 +336,11 @@ public class TestJsonRecordReader extends BaseTestQuery {
   private void doTestCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
     try {
       String query = "select count(*) from cp.`jsoninput/drill4653/file.json`";
-      testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
-          .run();
+      testBuilder()
+        .unOrdered()
+        .sqlQuery(query)
+        .sqlBaselineQuery(query)
+        .go();
     } catch (Exception ex) {
       // do nothing just return
        return;
@@ -331,24 +354,20 @@ public class TestJsonRecordReader extends BaseTestQuery {
   /* Test for JSONReader */
   public void testNotCountingQuerySkippingInvalidJSONRecords() throws Exception {
     try {
-      String set = "alter session set `"
-        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
-      String set1 = "alter session set `"
-        + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
-        + "` = true";
       String query = "select sum(balance) from cp.`jsoninput/drill4653/file.json`";
-      testNoResult(set);
-      testNoResult(set1);
       testBuilder()
         .unOrdered()
+        .disableSessionOption(ENABLE_V2_JSON_READER_KEY)
+        .enableSessionOption(JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG)
+        .enableSessionOption(JSON_READER_SKIP_INVALID_RECORDS_FLAG)
         .sqlQuery(query)
         .sqlBaselineQuery(query)
         .go();
     }
     finally {
-      String set = "alter session set `"
-        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
-      testNoResult(set);
+      resetSessionOption(ENABLE_V2_JSON_READER_KEY);
+      resetSessionOption(JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG);
+      resetSessionOption(JSON_READER_SKIP_INVALID_RECORDS_FLAG);
     }
   }
 
@@ -356,16 +375,18 @@ public class TestJsonRecordReader extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   // See DRILL-4653
   /* Test for JSONReader */
-  public void testNotCountingQueryNotSkippingInvalidJSONRecords()
-      throws Exception {
+  public void testNotCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
     runBoth(this::doTestNotCountingQueryNotSkippingInvalidJSONRecords);
   }
 
   private void doTestNotCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
     try {
       String query = "select sum(balance) from cp.`jsoninput/drill4653/file.json`";
-      testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
-          .run();
+      testBuilder()
+        .unOrdered()
+        .sqlQuery(query)
+        .sqlBaselineQuery(query)
+        .go();
     } catch (Exception ex) {
       // do nothing just return
       return;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestVarlenDecimal.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestVarlenDecimal.java
index 8cae6689ab..822497cd8a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestVarlenDecimal.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestVarlenDecimal.java
@@ -49,10 +49,6 @@ public class TestVarlenDecimal extends ClusterTest {
   @BeforeClass
   public static void setUp() throws Exception {
     startCluster(ClusterFixture.builder(dirTestWatcher));
-  }
-
-  @BeforeClass
-  public static void enableDecimalDataType() {
     client.alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
index cb205453b1..2bc80d3b84 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
@@ -40,7 +40,7 @@ public class TestExtendedTypes extends BaseTestQuery {
 
   @Test
   public void checkReadWriteExtended() throws Exception {
-    runBoth(() -> doCheckReadWriteExtended());
+    runBoth(this::doCheckReadWriteExtended);
   }
 
   private void doCheckReadWriteExtended() throws Exception {
@@ -73,7 +73,7 @@ public class TestExtendedTypes extends BaseTestQuery {
 
   @Test
   public void testMongoExtendedTypes() throws Exception {
-    runBoth(() -> doTestMongoExtendedTypes());
+    runBoth(this::doTestMongoExtendedTypes);
   }
 
   private void doTestMongoExtendedTypes() throws Exception {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
index 16f302e750..3c94f4f4ea 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
@@ -196,7 +196,7 @@ public class ClusterFixture extends BaseFixture implements AutoCloseable {
     }
   }
 
-  private void createConfig() throws Exception {
+  private void createConfig() {
 
     // Create a config
     // Because of the way DrillConfig works, we can set the ZK
@@ -278,7 +278,7 @@ public class ClusterFixture extends BaseFixture implements AutoCloseable {
     pluginRegistry.put(MockStorageEngineConfig.NAME, config);
   }
 
-  private void applyOptions() throws Exception {
+  private void applyOptions() {
 
     // Apply system options
     if (builder.systemOptions != null) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
index 43f1396070..1fb916b5f7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
@@ -106,7 +106,7 @@ public class ClusterTest extends DrillTest {
     return ClusterFixture.getResource(resource);
   }
 
-  public void runAndLog(String sqlQuery) throws Exception {
+  public void runAndLog(String sqlQuery) {
     client.runQueriesAndLog(sqlQuery);
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java
index fecb540c1e..80ee9ac602 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/TestBuilder.java
@@ -220,6 +220,16 @@ public class TestBuilder {
     return this;
   }
 
+  public TestBuilder enableSessionOption(String option) {
+    optionSettingQueriesForTestQuery("ALTER SESSION SET `" + option + "` = 'true'");
+    return this;
+  }
+
+  public TestBuilder disableSessionOption(String option) {
+    optionSettingQueriesForTestQuery("ALTER SESSION SET `" + option + "` = 'false'");
+    return this;
+  }
+
   public TestBuilder approximateEquality() {
     return approximateEquality(0.1);
   }
diff --git a/exec/java-exec/src/test/resources/rest/cust20.json b/exec/java-exec/src/test/resources/rest/cust20.json
index eb0e55dd14..f7ec9ca404 100644
--- a/exec/java-exec/src/test/resources/rest/cust20.json
+++ b/exec/java-exec/src/test/resources/rest/cust20.json
@@ -1,28 +1,28 @@
 !\{"queryId":"[^"]+"
-,"columns":["employee_id","full_name","first_name","last_name","position_id","position_title","store_id","department_id","birth_date","hire_date","salary","supervisor_id","education_level","marital_status","gender","management_role"]
-,"metadata":["BIGINT","VARCHAR","VARCHAR","VARCHAR","BIGINT","VARCHAR","BIGINT","BIGINT","VARCHAR","VARCHAR","FLOAT8","BIGINT","VARCHAR","VARCHAR","VARCHAR","VARCHAR"]
+,"columns":["employee_id","full_name","first_name","last_name","position_id","position_title","store_id","department_id","birth_date","hire_date","salary","supervisor_id","education_level","marital_status","gender","management_role","end_date"]
+,"metadata":["BIGINT","VARCHAR","VARCHAR","VARCHAR","BIGINT","VARCHAR","BIGINT","BIGINT","VARCHAR","VARCHAR","FLOAT8","BIGINT","VARCHAR","VARCHAR","VARCHAR","VARCHAR","VARCHAR"]
 ,"attemptedAutoLimit":0
 ,"rows":[
-{"employee_id":1,"full_name":"Sheri Nowmer","first_name":"Sheri","last_name":"Nowmer","position_id":1,"position_title":"President","store_id":0,"department_id":1,"birth_date":"1961-08-26","hire_date":"1994-12-01 00:00:00.0","salary":80000.0,"supervisor_id":0,"education_level":"Graduate Degree","marital_status":"S","gender":"F","management_role":"Senior Management"}
-,{"employee_id":2,"full_name":"Derrick Whelply","first_name":"Derrick","last_name":"Whelply","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1915-07-03","hire_date":"1994-12-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"M","gender":"M","management_role":"Senior Management"}
-,{"employee_id":4,"full_name":"Michael Spence","first_name":"Michael","last_name":"Spence","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1969-06-20","hire_date":"1998-01-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Senior Management"}
-,{"employee_id":5,"full_name":"Maya Gutierrez","first_name":"Maya","last_name":"Gutierrez","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1951-05-10","hire_date":"1998-01-01 00:00:00.0","salary":35000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management"}
-,{"employee_id":6,"full_name":"Roberta Damstra","first_name":"Roberta","last_name":"Damstra","position_id":3,"position_title":"VP Information Systems","store_id":0,"department_id":2,"birth_date":"1942-10-08","hire_date":"1994-12-01 00:00:00.0","salary":25000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management"}
-,{"employee_id":7,"full_name":"Rebecca Kanagaki","first_name":"Rebecca","last_name":"Kanagaki","position_id":4,"position_title":"VP Human Resources","store_id":0,"department_id":3,"birth_date":"1949-03-27","hire_date":"1994-12-01 00:00:00.0","salary":15000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management"}
-,{"employee_id":8,"full_name":"Kim Brunner","first_name":"Kim","last_name":"Brunner","position_id":11,"position_title":"Store Manager","store_id":9,"department_id":11,"birth_date":"1922-08-10","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management"}
-,{"employee_id":9,"full_name":"Brenda Blumberg","first_name":"Brenda","last_name":"Blumberg","position_id":11,"position_title":"Store Manager","store_id":21,"department_id":11,"birth_date":"1979-06-23","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"M","gender":"F","management_role":"Store Management"}
-,{"employee_id":10,"full_name":"Darren Stanz","first_name":"Darren","last_name":"Stanz","position_id":5,"position_title":"VP Finance","store_id":0,"department_id":5,"birth_date":"1949-08-26","hire_date":"1994-12-01 00:00:00.0","salary":50000.0,"supervisor_id":1,"education_level":"Partial College","marital_status":"M","gender":"M","management_role":"Senior Management"}
-,{"employee_id":11,"full_name":"Jonathan Murraiin","first_name":"Jonathan","last_name":"Murraiin","position_id":11,"position_title":"Store Manager","store_id":1,"department_id":11,"birth_date":"1967-06-20","hire_date":"1998-01-01 00:00:00.0","salary":15000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Store Management"}
-,{"employee_id":12,"full_name":"Jewel Creek","first_name":"Jewel","last_name":"Creek","position_id":11,"position_title":"Store Manager","store_id":5,"department_id":11,"birth_date":"1971-10-18","hire_date":"1998-01-01 00:00:00.0","salary":8500.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"S","gender":"F","management_role":"Store Management"}
-,{"employee_id":13,"full_name":"Peggy Medina","first_name":"Peggy","last_name":"Medina","position_id":11,"position_title":"Store Manager","store_id":10,"department_id":11,"birth_date":"1975-10-12","hire_date":"1998-01-01 00:00:00.0","salary":15000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management"}
-,{"employee_id":14,"full_name":"Bryan Rutledge","first_name":"Bryan","last_name":"Rutledge","position_id":11,"position_title":"Store Manager","store_id":8,"department_id":11,"birth_date":"1912-07-09","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"M","gender":"M","management_role":"Store Management"}
-,{"employee_id":15,"full_name":"Walter Cavestany","first_name":"Walter","last_name":"Cavestany","position_id":11,"position_title":"Store Manager","store_id":4,"department_id":11,"birth_date":"1941-11-05","hire_date":"1998-01-01 00:00:00.0","salary":12000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"M","gender":"M","management_role":"Store Management"}
-,{"employee_id":16,"full_name":"Peggy Planck","first_name":"Peggy","last_name":"Planck","position_id":11,"position_title":"Store Manager","store_id":12,"department_id":11,"birth_date":"1919-06-02","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management"}
-,{"employee_id":17,"full_name":"Brenda Marshall","first_name":"Brenda","last_name":"Marshall","position_id":11,"position_title":"Store Manager","store_id":18,"department_id":11,"birth_date":"1928-03-20","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":5,"education_level":"Partial College","marital_status":"S","gender":"F","management_role":"Store Management"}
-,{"employee_id":18,"full_name":"Daniel Wolter","first_name":"Daniel","last_name":"Wolter","position_id":11,"position_title":"Store Manager","store_id":19,"department_id":11,"birth_date":"1914-09-21","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":4,"education_level":"Partial College","marital_status":"S","gender":"M","management_role":"Store Management"}
-,{"employee_id":19,"full_name":"Dianne Collins","first_name":"Dianne","last_name":"Collins","position_id":11,"position_title":"Store Manager","store_id":20,"department_id":11,"birth_date":"1953-07-20","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":4,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management"}
-,{"employee_id":20,"full_name":"Beverly Baker","first_name":"Beverly","last_name":"Baker","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1974-04-16","hire_date":"1994-12-01 00:00:00.0","salary":30000.0,"supervisor_id":2,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management"}
-,{"employee_id":21,"full_name":"Pedro Castillo","first_name":"Pedro","last_name":"Castillo","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1918-11-04","hire_date":"1994-12-01 00:00:00.0","salary":35000.0,"supervisor_id":2,"education_level":"Bachelors Degree","marital_status":"M","gender":"M","management_role":"Senior Management"}
+{"employee_id":1,"full_name":"Sheri Nowmer","first_name":"Sheri","last_name":"Nowmer","position_id":1,"position_title":"President","store_id":0,"department_id":1,"birth_date":"1961-08-26","hire_date":"1994-12-01 00:00:00.0","salary":80000.0,"supervisor_id":0,"education_level":"Graduate Degree","marital_status":"S","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":2,"full_name":"Derrick Whelply","first_name":"Derrick","last_name":"Whelply","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1915-07-03","hire_date":"1994-12-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"M","gender":"M","management_role":"Senior Management","end_date":null}
+,{"employee_id":4,"full_name":"Michael Spence","first_name":"Michael","last_name":"Spence","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1969-06-20","hire_date":"1998-01-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Senior Management","end_date":null}
+,{"employee_id":5,"full_name":"Maya Gutierrez","first_name":"Maya","last_name":"Gutierrez","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1951-05-10","hire_date":"1998-01-01 00:00:00.0","salary":35000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":6,"full_name":"Roberta Damstra","first_name":"Roberta","last_name":"Damstra","position_id":3,"position_title":"VP Information Systems","store_id":0,"department_id":2,"birth_date":"1942-10-08","hire_date":"1994-12-01 00:00:00.0","salary":25000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":7,"full_name":"Rebecca Kanagaki","first_name":"Rebecca","last_name":"Kanagaki","position_id":4,"position_title":"VP Human Resources","store_id":0,"department_id":3,"birth_date":"1949-03-27","hire_date":"1994-12-01 00:00:00.0","salary":15000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":8,"full_name":"Kim Brunner","first_name":"Kim","last_name":"Brunner","position_id":11,"position_title":"Store Manager","store_id":9,"department_id":11,"birth_date":"1922-08-10","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":9,"full_name":"Brenda Blumberg","first_name":"Brenda","last_name":"Blumberg","position_id":11,"position_title":"Store Manager","store_id":21,"department_id":11,"birth_date":"1979-06-23","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"M","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":10,"full_name":"Darren Stanz","first_name":"Darren","last_name":"Stanz","position_id":5,"position_title":"VP Finance","store_id":0,"department_id":5,"birth_date":"1949-08-26","hire_date":"1994-12-01 00:00:00.0","salary":50000.0,"supervisor_id":1,"education_level":"Partial College","marital_status":"M","gender":"M","management_role":"Senior Management","end_date":null}
+,{"employee_id":11,"full_name":"Jonathan Murraiin","first_name":"Jonathan","last_name":"Murraiin","position_id":11,"position_title":"Store Manager","store_id":1,"department_id":11,"birth_date":"1967-06-20","hire_date":"1998-01-01 00:00:00.0","salary":15000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Store Management","end_date":null}
+,{"employee_id":12,"full_name":"Jewel Creek","first_name":"Jewel","last_name":"Creek","position_id":11,"position_title":"Store Manager","store_id":5,"department_id":11,"birth_date":"1971-10-18","hire_date":"1998-01-01 00:00:00.0","salary":8500.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"S","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":13,"full_name":"Peggy Medina","first_name":"Peggy","last_name":"Medina","position_id":11,"position_title":"Store Manager","store_id":10,"department_id":11,"birth_date":"1975-10-12","hire_date":"1998-01-01 00:00:00.0","salary":15000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":14,"full_name":"Bryan Rutledge","first_name":"Bryan","last_name":"Rutledge","position_id":11,"position_title":"Store Manager","store_id":8,"department_id":11,"birth_date":"1912-07-09","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"M","gender":"M","management_role":"Store Management","end_date":null}
+,{"employee_id":15,"full_name":"Walter Cavestany","first_name":"Walter","last_name":"Cavestany","position_id":11,"position_title":"Store Manager","store_id":4,"department_id":11,"birth_date":"1941-11-05","hire_date":"1998-01-01 00:00:00.0","salary":12000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"M","gender":"M","management_role":"Store Management","end_date":null}
+,{"employee_id":16,"full_name":"Peggy Planck","first_name":"Peggy","last_name":"Planck","position_id":11,"position_title":"Store Manager","store_id":12,"department_id":11,"birth_date":"1919-06-02","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":17,"full_name":"Brenda Marshall","first_name":"Brenda","last_name":"Marshall","position_id":11,"position_title":"Store Manager","store_id":18,"department_id":11,"birth_date":"1928-03-20","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":5,"education_level":"Partial College","marital_status":"S","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":18,"full_name":"Daniel Wolter","first_name":"Daniel","last_name":"Wolter","position_id":11,"position_title":"Store Manager","store_id":19,"department_id":11,"birth_date":"1914-09-21","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":4,"education_level":"Partial College","marital_status":"S","gender":"M","management_role":"Store Management","end_date":null}
+,{"employee_id":19,"full_name":"Dianne Collins","first_name":"Dianne","last_name":"Collins","position_id":11,"position_title":"Store Manager","store_id":20,"department_id":11,"birth_date":"1953-07-20","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":4,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":20,"full_name":"Beverly Baker","first_name":"Beverly","last_name":"Baker","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1974-04-16","hire_date":"1994-12-01 00:00:00.0","salary":30000.0,"supervisor_id":2,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":21,"full_name":"Pedro Castillo","first_name":"Pedro","last_name":"Castillo","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1918-11-04","hire_date":"1994-12-01 00:00:00.0","salary":35000.0,"supervisor_id":2,"education_level":"Bachelors Degree","marital_status":"M","gender":"M","management_role":"Senior Management","end_date":null}
 ]
 ,"queryState":"COMPLETED"
 }
diff --git a/exec/java-exec/src/test/resources/rest/small.json b/exec/java-exec/src/test/resources/rest/small.json
index f97df9ac3a..3fa5beedc0 100644
--- a/exec/java-exec/src/test/resources/rest/small.json
+++ b/exec/java-exec/src/test/resources/rest/small.json
@@ -1,18 +1,18 @@
 !\{"queryId":"[^"]+"
-,"columns":["employee_id","full_name","first_name","last_name","position_id","position_title","store_id","department_id","birth_date","hire_date","salary","supervisor_id","education_level","marital_status","gender","management_role"]
-,"metadata":["BIGINT","VARCHAR","VARCHAR","VARCHAR","BIGINT","VARCHAR","BIGINT","BIGINT","VARCHAR","VARCHAR","FLOAT8","BIGINT","VARCHAR","VARCHAR","VARCHAR","VARCHAR"]
+,"columns":["employee_id","full_name","first_name","last_name","position_id","position_title","store_id","department_id","birth_date","hire_date","salary","supervisor_id","education_level","marital_status","gender","management_role","end_date"]
+,"metadata":["BIGINT","VARCHAR","VARCHAR","VARCHAR","BIGINT","VARCHAR","BIGINT","BIGINT","VARCHAR","VARCHAR","FLOAT8","BIGINT","VARCHAR","VARCHAR","VARCHAR","VARCHAR","VARCHAR"]
 ,"attemptedAutoLimit":10
 ,"rows":[
-{"employee_id":1,"full_name":"Sheri Nowmer","first_name":"Sheri","last_name":"Nowmer","position_id":1,"position_title":"President","store_id":0,"department_id":1,"birth_date":"1961-08-26","hire_date":"1994-12-01 00:00:00.0","salary":80000.0,"supervisor_id":0,"education_level":"Graduate Degree","marital_status":"S","gender":"F","management_role":"Senior Management"}
-,{"employee_id":2,"full_name":"Derrick Whelply","first_name":"Derrick","last_name":"Whelply","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1915-07-03","hire_date":"1994-12-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"M","gender":"M","management_role":"Senior Management"}
-,{"employee_id":4,"full_name":"Michael Spence","first_name":"Michael","last_name":"Spence","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1969-06-20","hire_date":"1998-01-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Senior Management"}
-,{"employee_id":5,"full_name":"Maya Gutierrez","first_name":"Maya","last_name":"Gutierrez","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1951-05-10","hire_date":"1998-01-01 00:00:00.0","salary":35000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management"}
-,{"employee_id":6,"full_name":"Roberta Damstra","first_name":"Roberta","last_name":"Damstra","position_id":3,"position_title":"VP Information Systems","store_id":0,"department_id":2,"birth_date":"1942-10-08","hire_date":"1994-12-01 00:00:00.0","salary":25000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management"}
-,{"employee_id":7,"full_name":"Rebecca Kanagaki","first_name":"Rebecca","last_name":"Kanagaki","position_id":4,"position_title":"VP Human Resources","store_id":0,"department_id":3,"birth_date":"1949-03-27","hire_date":"1994-12-01 00:00:00.0","salary":15000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management"}
-,{"employee_id":8,"full_name":"Kim Brunner","first_name":"Kim","last_name":"Brunner","position_id":11,"position_title":"Store Manager","store_id":9,"department_id":11,"birth_date":"1922-08-10","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management"}
-,{"employee_id":9,"full_name":"Brenda Blumberg","first_name":"Brenda","last_name":"Blumberg","position_id":11,"position_title":"Store Manager","store_id":21,"department_id":11,"birth_date":"1979-06-23","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"M","gender":"F","management_role":"Store Management"}
-,{"employee_id":10,"full_name":"Darren Stanz","first_name":"Darren","last_name":"Stanz","position_id":5,"position_title":"VP Finance","store_id":0,"department_id":5,"birth_date":"1949-08-26","hire_date":"1994-12-01 00:00:00.0","salary":50000.0,"supervisor_id":1,"education_level":"Partial College","marital_status":"M","gender":"M","management_role":"Senior Management"}
-,{"employee_id":11,"full_name":"Jonathan Murraiin","first_name":"Jonathan","last_name":"Murraiin","position_id":11,"position_title":"Store Manager","store_id":1,"department_id":11,"birth_date":"1967-06-20","hire_date":"1998-01-01 00:00:00.0","salary":15000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Store Management"}
+{"employee_id":1,"full_name":"Sheri Nowmer","first_name":"Sheri","last_name":"Nowmer","position_id":1,"position_title":"President","store_id":0,"department_id":1,"birth_date":"1961-08-26","hire_date":"1994-12-01 00:00:00.0","salary":80000.0,"supervisor_id":0,"education_level":"Graduate Degree","marital_status":"S","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":2,"full_name":"Derrick Whelply","first_name":"Derrick","last_name":"Whelply","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1915-07-03","hire_date":"1994-12-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"M","gender":"M","management_role":"Senior Management","end_date":null}
+,{"employee_id":4,"full_name":"Michael Spence","first_name":"Michael","last_name":"Spence","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1969-06-20","hire_date":"1998-01-01 00:00:00.0","salary":40000.0,"supervisor_id":1,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Senior Management","end_date":null}
+,{"employee_id":5,"full_name":"Maya Gutierrez","first_name":"Maya","last_name":"Gutierrez","position_id":2,"position_title":"VP Country Manager","store_id":0,"department_id":1,"birth_date":"1951-05-10","hire_date":"1998-01-01 00:00:00.0","salary":35000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":6,"full_name":"Roberta Damstra","first_name":"Roberta","last_name":"Damstra","position_id":3,"position_title":"VP Information Systems","store_id":0,"department_id":2,"birth_date":"1942-10-08","hire_date":"1994-12-01 00:00:00.0","salary":25000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":7,"full_name":"Rebecca Kanagaki","first_name":"Rebecca","last_name":"Kanagaki","position_id":4,"position_title":"VP Human Resources","store_id":0,"department_id":3,"birth_date":"1949-03-27","hire_date":"1994-12-01 00:00:00.0","salary":15000.0,"supervisor_id":1,"education_level":"Bachelors Degree","marital_status":"M","gender":"F","management_role":"Senior Management","end_date":null}
+,{"employee_id":8,"full_name":"Kim Brunner","first_name":"Kim","last_name":"Brunner","position_id":11,"position_title":"Store Manager","store_id":9,"department_id":11,"birth_date":"1922-08-10","hire_date":"1998-01-01 00:00:00.0","salary":10000.0,"supervisor_id":5,"education_level":"Bachelors Degree","marital_status":"S","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":9,"full_name":"Brenda Blumberg","first_name":"Brenda","last_name":"Blumberg","position_id":11,"position_title":"Store Manager","store_id":21,"department_id":11,"birth_date":"1979-06-23","hire_date":"1998-01-01 00:00:00.0","salary":17000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"M","gender":"F","management_role":"Store Management","end_date":null}
+,{"employee_id":10,"full_name":"Darren Stanz","first_name":"Darren","last_name":"Stanz","position_id":5,"position_title":"VP Finance","store_id":0,"department_id":5,"birth_date":"1949-08-26","hire_date":"1994-12-01 00:00:00.0","salary":50000.0,"supervisor_id":1,"education_level":"Partial College","marital_status":"M","gender":"M","management_role":"Senior Management","end_date":null}
+,{"employee_id":11,"full_name":"Jonathan Murraiin","first_name":"Jonathan","last_name":"Murraiin","position_id":11,"position_title":"Store Manager","store_id":1,"department_id":11,"birth_date":"1967-06-20","hire_date":"1998-01-01 00:00:00.0","salary":15000.0,"supervisor_id":5,"education_level":"Graduate Degree","marital_status":"S","gender":"M","management_role":"Store Management","end_date":null}
 ]
 ,"queryState":"COMPLETED"
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
index 3a20fadb2b..1604ff24a3 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
@@ -55,8 +55,7 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder;
 @JsonInclude(JsonInclude.Include.NON_DEFAULT)
 @JsonPropertyOrder({ "name", "type", "mode", "format", "default",
     "properties" })
-public abstract class AbstractColumnMetadata extends AbstractPropertied
-    implements ColumnMetadata {
+public abstract class AbstractColumnMetadata extends AbstractPropertied implements ColumnMetadata {
 
   // Capture the key schema information. We cannot use the MaterializedField
   // or MajorType because they encode child information that we encode here
@@ -156,6 +155,9 @@ public abstract class AbstractColumnMetadata extends AbstractPropertied
   @Override
   public boolean isDict() { return false; }
 
+  @Override
+  public boolean isScalar() { return false; }
+
   @Override
   public boolean isDynamic() { return false; }
 
@@ -376,4 +378,4 @@ public abstract class AbstractColumnMetadata extends AbstractPropertied
   protected String escapeSpecialSymbols(String value) {
     return value.replaceAll("(\\\\)|(`)", "\\\\$0");
   }
-}
\ No newline at end of file
+}
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java
index 2117bed12f..0f78f55ee9 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnMetadata.java
@@ -232,6 +232,7 @@ public interface ColumnMetadata extends Propertied {
   boolean isMap();
   boolean isVariant();
   boolean isDict();
+  boolean isScalar();
 
   /**
    * Reports if the column is dynamic. A dynamic column is one with
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
index e79412a2e1..375a43d589 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
@@ -200,10 +200,13 @@ public class MetadataUtils {
     return new DictColumnMetadata(name, DataMode.REPEATED);
   }
 
-  public static PrimitiveColumnMetadata newScalar(String name, MinorType type,
-      DataMode mode) {
+  public static PrimitiveColumnMetadata newScalar(String name, MinorType type, DataMode mode) {
+    return newScalar(name, type, mode, false);
+  }
+
+  public static PrimitiveColumnMetadata newScalar(String name, MinorType type, DataMode mode, boolean schemaForUnknown) {
     assert isScalar(type);
-    return new PrimitiveColumnMetadata(name, type, mode);
+    return new PrimitiveColumnMetadata(name, type, mode, schemaForUnknown);
   }
 
   public static PrimitiveColumnMetadata newScalar(String name, MajorType type) {
@@ -212,13 +215,11 @@ public class MetadataUtils {
     return new PrimitiveColumnMetadata(name, type);
   }
 
-  public static ColumnMetadata newDecimal(String name, DataMode mode,
-      int precision, int scale) {
+  public static ColumnMetadata newDecimal(String name, DataMode mode, int precision, int scale) {
     return newDecimal(name, MinorType.VARDECIMAL, mode, precision, scale);
   }
 
-  public static ColumnMetadata newDecimal(String name, MinorType type, DataMode mode,
-      int precision, int scale) {
+  public static ColumnMetadata newDecimal(String name, MinorType type, DataMode mode, int precision, int scale) {
     if (precision < 0 ) {
       throw new IllegalArgumentException("Precision cannot be negative : " +
           precision);
@@ -278,8 +279,7 @@ public class MetadataUtils {
            col.name().equals(DynamicColumn.WILDCARD);
   }
 
-  public static ColumnMetadata cloneMapWithSchema(ColumnMetadata source,
-      TupleMetadata members) {
+  public static ColumnMetadata cloneMapWithSchema(ColumnMetadata source, TupleMetadata members) {
     return newMap(source.name(), source.mode(), members);
   }
 
@@ -292,8 +292,7 @@ public class MetadataUtils {
     }
   }
 
-  public static TupleMetadata diffTuple(TupleMetadata base,
-      TupleMetadata subtend) {
+  public static TupleMetadata diffTuple(TupleMetadata base, TupleMetadata subtend) {
     TupleMetadata diff = new TupleSchema();
     for (ColumnMetadata col : base) {
       ColumnMetadata other = subtend.metadata(col.name());
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
index bb37b0ff66..78f4008caa 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
@@ -53,6 +53,8 @@ import org.slf4j.LoggerFactory;
 public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
   private static final Logger logger = LoggerFactory.getLogger(PrimitiveColumnMetadata.class);
 
+  private boolean forUnknownSchema;
+
   public PrimitiveColumnMetadata(MaterializedField schema) {
     super(schema);
   }
@@ -65,6 +67,11 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
     super(name, type, mode);
   }
 
+  public PrimitiveColumnMetadata(String name, MinorType type, DataMode mode, boolean forUnknownSchema) {
+    this(name, type, mode);
+    this.forUnknownSchema = forUnknownSchema;
+  }
+
   private int estimateWidth(MajorType majorType) {
     if (type() == MinorType.NULL || type() == MinorType.LATE) {
       return 0;
@@ -332,4 +339,14 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
         return true;
     }
   }
+
+  @Override
+  public boolean isScalar() { return true; }
+
+  /**
+   * @return true in case this primitive is created for unknown schema, for instance the column with all null values
+   */
+  public boolean isSchemaForUnknown() {
+    return forUnknownSchema;
+  }
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java
index 099f113111..ed429eb178 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java
@@ -23,7 +23,7 @@ import org.apache.drill.exec.record.MaterializedField;
 
 public interface NullableVector extends ValueVector {
 
-  public interface Mutator extends ValueVector.Mutator {
+  interface Mutator extends ValueVector.Mutator {
 
     /**
      * Used by the vector accessors to force the last set value.
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java
index 16155f4a0a..897df0c54e 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/accessor/writer/AbstractTupleWriter.java
@@ -246,15 +246,13 @@ public abstract class AbstractTupleWriter implements TupleWriter, WriterEvents {
   @Override
   public int addColumn(ColumnMetadata column) {
     verifyAddColumn(column.name());
-    return addColumnWriter(
-        (AbstractObjectWriter) listener.addColumn(this, column));
+    return addColumnWriter((AbstractObjectWriter) listener.addColumn(this, column));
   }
 
   @Override
   public int addColumn(MaterializedField field) {
     verifyAddColumn(field.getName());
-    return addColumnWriter(
-        (AbstractObjectWriter) listener.addColumn(this, field));
+    return addColumnWriter((AbstractObjectWriter) listener.addColumn(this, field));
   }
 
   private void verifyAddColumn(String colName) {


[drill] 01/03: DRILL-6953: Merge row set-based JSON reader

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit b464b9991ac34f3a8ea293e62a819a2b69504ed1
Author: Paul Rogers <pa...@yahoo.com>
AuthorDate: Mon Apr 13 19:45:16 2020 -0700

    DRILL-6953: Merge row set-based JSON reader
    
    * Snapshot: reworked JSON field parser creation
    * Updated JSON loader
    * Redo value listener with tokens
    * Extended long type works
    * More simple extended types
    * Added $date
    * Binary type
    * All extended type except arrays
    * Extended arrays partly working
    * More arrays work
    * Refactor element parser interfaces
    * Rename RowSetTests --> RowSetTest
    * More factory cleanup
    * Revised unknown field creation
    * In middle of factory/parser restructuring
    * Scalars, object, some variants work again
    * JSON loader tests pass
    * File cleanup
    * Old extended types test passes
    * Renamed JSON packages
    * Tested extended provided types
---
 .../{RowSetTests.java => RowSetTest.java}          |   2 +-
 .../exec/store/esri/TestShapefileFormatPlugin.java |   4 +-
 .../drill/exec/store/excel/TestExcelFormat.java    |   4 +-
 .../drill/exec/store/hdf5/TestHDF5Format.java      |   4 +-
 .../drill/exec/store/httpd/TestHTTPDLogReader.java |   4 +-
 .../store/httpd/TestHTTPDLogReaderUserAgent.java   |   4 +-
 .../exec/store/image/TestImageRecordReader.java    |   6 +-
 .../drill/exec/store/pcap/TestPcapEVFReader.java   |   4 +-
 .../exec/store/pcapng/TestPcapngRecordReader.java  |   4 +-
 .../store/pcapng/TestPcapngStatRecordReader.java   |   4 +-
 .../apache/drill/exec/store/pdf/TestPdfFormat.java |   4 +-
 .../apache/drill/exec/store/sas/TestSasReader.java |   4 +-
 .../drill/exec/store/spss/TestSpssReader.java      |   4 +-
 .../drill/exec/store/syslog/TestSyslogFormat.java  |   4 +-
 .../apache/drill/exec/store/xml/TestXMLReader.java |   4 +-
 .../exec/store/phoenix/PhoenixCommandTest.java     |   4 +-
 .../exec/store/phoenix/PhoenixDataTypeTest.java    |   4 +-
 .../drill/exec/store/phoenix/PhoenixSQLTest.java   |   4 +-
 .../phoenix/secured/SecuredPhoenixCommandTest.java |   4 +-
 .../secured/SecuredPhoenixDataTypeTest.java        |   4 +-
 .../phoenix/secured/SecuredPhoenixSQLTest.java     |   4 +-
 .../phoenix/secured/SecuredPhoenixTestSuite.java   |   4 +-
 .../java/org/apache/drill/exec/ExecConstants.java  |   3 +
 .../drill/exec/expr/fn/impl/TypeFunctions.java     |   1 -
 .../org/apache/drill/exec/ops/OperatorStats.java   |   8 +-
 .../impl/protocol/VectorContainerAccessor.java     |  12 +-
 .../scan/project/ExplicitSchemaProjection.java     |   2 +-
 .../physical/impl/validate/BatchValidator.java     |   6 +
 .../physical/resultSet/impl/SingleVectorState.java |  53 +-
 .../model/single/SimpleReaderBuilder.java          |  18 +-
 .../org/apache/drill/exec/record/BatchSchema.java  |  66 ++-
 .../apache/drill/exec/record/VectorContainer.java  |   2 +-
 .../exec/record/selection/SelectionVector2.java    |  28 +-
 .../drill/exec/rpc/user/QueryResultHandler.java    |   6 +-
 .../exec/server/options/SystemOptionManager.java   |   1 +
 .../exec/store/dfs/easy/EasyFormatPlugin.java      |  17 +-
 .../exec/store/easy/json/JSONRecordReader.java     |   7 +
 .../exec/store/easy/json/JsonBatchReader.java      |  85 +++
 .../store/easy/json/loader/BaseFieldFactory.java   |   1 -
 .../drill/exec/store/log/LogFormatPlugin.java      |   4 +-
 .../exec/vector/complex/fn/ExtendedJsonOutput.java |   2 -
 .../drill/exec/vector/complex/fn/ExtendedType.java |   3 -
 .../exec/vector/complex/fn/ExtendedTypeName.java   |  23 +-
 .../exec/vector/complex/fn/FieldSelection.java     |   1 -
 .../drill/exec/vector/complex/fn/JsonWriter.java   |  47 +-
 .../drill/exec/vector/complex/fn/VectorOutput.java |  47 +-
 .../java-exec/src/main/resources/drill-module.conf |   1 +
 .../java/org/apache/drill/TestStarQueries.java     | 211 ++++---
 .../org/apache/drill/exec/TestEmptyInputSql.java   |  19 +
 .../drill/exec/expr/fn/impl/TestTypeFns.java       |  63 +-
 .../impl/protocol/TestOperatorRecordBatch.java     |  69 +--
 .../exec/physical/impl/scan/TestColumnsArray.java  |   4 +-
 .../impl/scan/TestColumnsArrayFramework.java       |   4 +-
 .../physical/impl/scan/TestColumnsArrayParser.java |   4 +-
 .../physical/impl/scan/TestFileScanFramework.java  |   4 +-
 .../impl/scan/TestImplicitColumnParser.java        |   4 +-
 .../impl/scan/TestImplicitColumnProjection.java    |   4 +-
 .../physical/impl/scan/TestScanBatchWriters.java   |   4 +-
 .../physical/impl/scan/TestScanOperExecBasics.java |   4 +-
 .../impl/scan/TestScanOperExecEarlySchema.java     |   4 +-
 .../impl/scan/TestScanOperExecLateSchema.java      |   4 +-
 .../impl/scan/TestScanOperExecOuputSchema.java     |   4 +-
 .../impl/scan/TestScanOperExecOverflow.java        |   4 +-
 .../impl/scan/TestScanOperExecSmoothing.java       |   4 +-
 .../impl/scan/TestScanOrchestratorEarlySchema.java |   4 +-
 .../scan/TestScanOrchestratorImplicitColumns.java  |   4 +-
 .../impl/scan/TestScanOrchestratorLateSchema.java  |   4 +-
 .../impl/scan/convert/TestColumnConverter.java     |   4 +-
 .../scan/project/TestConstantColumnLoader.java     |   4 +-
 .../impl/scan/project/TestNullColumnLoader.java    |   4 +-
 .../scan/project/TestReaderLevelProjection.java    |   4 +-
 .../impl/scan/project/TestRowBatchMerger.java      |   4 +-
 .../impl/scan/project/TestSchemaSmoothing.java     |   4 +-
 .../physical/impl/validate/TestBatchValidator.java |   4 +-
 .../impl/TestResultSetLoaderDictArray.java         |   4 +-
 .../resultSet/impl/TestResultSetLoaderDicts.java   |   4 +-
 .../impl/TestResultSetLoaderEmptyProject.java      |   4 +-
 .../resultSet/impl/TestResultSetLoaderLimits.java  |   4 +-
 .../impl/TestResultSetLoaderMapArray.java          |   4 +-
 .../resultSet/impl/TestResultSetLoaderMaps.java    |   4 +-
 .../impl/TestResultSetLoaderOmittedValues.java     |   9 +-
 .../impl/TestResultSetLoaderOverflow.java          |   4 +-
 .../impl/TestResultSetLoaderProtocol.java          |   7 +-
 .../impl/TestResultSetLoaderRepeatedList.java      |   4 +-
 .../resultSet/impl/TestResultSetLoaderTorture.java |   4 +-
 .../resultSet/impl/TestResultSetLoaderUnions.java  |   4 +-
 .../resultSet/impl/TestResultSetSchemaChange.java  |   4 +-
 .../resultSet/impl/TestResultVectorCache.java      |   4 +-
 .../resultSet/project/TestTupleProjection.java     |   4 +-
 .../exec/physical/rowSet/TestDummyWriter.java      |   4 +-
 .../exec/physical/rowSet/TestFillEmpties.java      |   4 +-
 .../exec/physical/rowSet/TestFixedWidthWriter.java |   4 +-
 .../physical/rowSet/TestHyperVectorReaders.java    |   4 +-
 .../exec/physical/rowSet/TestIndirectReaders.java  |   4 +-
 .../exec/physical/rowSet/TestMapAccessors.java     |   4 +-
 .../physical/rowSet/TestOffsetVectorWriter.java    |   4 +-
 .../physical/rowSet/TestRepeatedListAccessors.java |   4 +-
 .../drill/exec/physical/rowSet/TestRowSet.java     |   4 +-
 .../exec/physical/rowSet/TestScalarAccessors.java  |   4 +-
 .../exec/physical/rowSet/TestSchemaBuilder.java    |   4 +-
 .../physical/rowSet/TestVariableWidthWriter.java   |   4 +-
 .../exec/physical/rowSet/TestVariantAccessors.java |   4 +-
 .../org/apache/drill/exec/sql/TestAnalyze.java     |   9 +-
 .../drill/exec/store/json/BaseTestJsonReader.java  |  60 ++
 .../drill/exec/store/json/TestJsonModes.java       |   4 +-
 .../drill/exec/store/json/TestJsonReaderFns.java   | 269 +++++++++
 .../exec/store/json/TestJsonReaderQueries.java     | 634 +++++++++++++++++++++
 .../exec/store/json/TestJsonReaderWithSchema.java  |  24 +
 .../exec/store/json/TestJsonRecordReader.java      | 196 +++++--
 .../drill/exec/store/json/TestJsonScanOp.java      | 271 +++++++++
 .../apache/drill/exec/store/log/TestLogReader.java |   4 +-
 .../drill/exec/store/mock/TestMockPlugin.java      |   4 +-
 .../drill/exec/store/mock/TestMockRowReader.java   |   4 +-
 .../store/sequencefile/TestSequenceFileReader.java |   4 +-
 .../complex/writer/TestComplexTypeWriter.java      |  27 +-
 .../vector/complex/writer/TestExtendedTypes.java   |  61 +-
 .../complex/writer/TestJsonEscapeAnyChar.java      |  28 +-
 .../exec/vector/complex/writer/TestJsonNanInf.java |  89 ++-
 .../exec/vector/complex/writer/TestJsonReader.java | 405 +++----------
 .../test/rowSet/test/TestRowSetComparison.java     |   4 +-
 .../main/codegen/templates/HolderReaderImpl.java   |   7 +-
 .../vector/complex/impl/SingleMapReaderImpl.java   |   2 -
 .../record/metadata/TestMetadataProperties.java    |   4 +-
 .../exec/record/metadata/TestTupleSchema.java      |   4 +-
 124 files changed, 2373 insertions(+), 847 deletions(-)

diff --git a/common/src/test/java/org/apache/drill/categories/RowSetTests.java b/common/src/test/java/org/apache/drill/categories/RowSetTest.java
similarity index 97%
rename from common/src/test/java/org/apache/drill/categories/RowSetTests.java
rename to common/src/test/java/org/apache/drill/categories/RowSetTest.java
index eb8300610b..44ea322072 100644
--- a/common/src/test/java/org/apache/drill/categories/RowSetTests.java
+++ b/common/src/test/java/org/apache/drill/categories/RowSetTest.java
@@ -21,7 +21,7 @@ package org.apache.drill.categories;
  * Junit category marker. <br>
  * A category for tests that test the RowSet, ResultSetLoader and related mechanisms.
  */
-public interface RowSetTests {
+public interface RowSetTest {
   /**
    * tag for JUnit5
    */
diff --git a/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java b/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
index 1df2687cc0..c9ce87c753 100644
--- a/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
+++ b/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.esri;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -36,7 +36,7 @@ import java.nio.file.Paths;
 
 import static org.junit.Assert.assertEquals;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestShapefileFormatPlugin extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
index 9faf64e95e..97c8fff753 100644
--- a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
+++ b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.excel;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -47,7 +47,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestExcelFormat extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java b/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
index e427216d07..9e1e04d9a9 100644
--- a/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
+++ b/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.hdf5;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -43,7 +43,7 @@ import java.util.List;
 import static org.junit.Assert.assertEquals;
 import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHDF5Format extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
index 4b41562dcb..877cff99cd 100644
--- a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
+++ b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.httpd;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -46,7 +46,7 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHTTPDLogReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java
index 8b23efbbfc..5edcc2bcaa 100644
--- a/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java
+++ b/contrib/format-httpd/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReaderUserAgent.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.httpd;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -36,7 +36,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHTTPDLogReaderUserAgent extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java b/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
index 10f2e03710..f6894a1601 100644
--- a/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
+++ b/contrib/format-image/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
@@ -27,7 +27,7 @@ import java.nio.file.Paths;
 import java.time.Instant;
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -42,7 +42,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestImageRecordReader extends ClusterTest {
 
   @BeforeClass
@@ -233,4 +233,4 @@ public class TestImageRecordReader extends ClusterTest {
 
     new RowSetComparison(expected).verifyAndClearAll(sets);
   }
-}
\ No newline at end of file
+}
diff --git a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
index 9a27276363..ed46540262 100644
--- a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
+++ b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.pcap;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.exec.store.pcap.plugin.PcapFormatConfig;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
@@ -29,7 +29,7 @@ import java.time.LocalDateTime;
 import java.time.Month;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPcapEVFReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java
index ceb76bfa51..7c9cd40044 100644
--- a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java
+++ b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngRecordReader.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals;
 import java.nio.file.Paths;
 import java.time.Instant;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -39,7 +39,7 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPcapngRecordReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java
index 9971a886e0..c202fdeabb 100644
--- a/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java
+++ b/contrib/format-pcapng/src/test/java/org/apache/drill/exec/store/pcapng/TestPcapngStatRecordReader.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 
 import java.nio.file.Paths;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -37,7 +37,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPcapngStatRecordReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java b/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java
index 1383448fb6..b304629406 100644
--- a/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java
+++ b/contrib/format-pdf/src/test/java/org/apache/drill/exec/store/pdf/TestPdfFormat.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.pdf;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -42,7 +42,7 @@ import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestPdfFormat extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java b/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java
index be0965ebea..40696baab6 100644
--- a/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java
+++ b/contrib/format-sas/src/test/java/org/apache/drill/exec/store/sas/TestSasReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.sas;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertEquals;
 import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSasReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java b/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java
index b54c4f88ac..5331b2f772 100644
--- a/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java
+++ b/contrib/format-spss/src/test/java/org/apache/drill/exec/store/spss/TestSpssReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.spss;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -37,7 +37,7 @@ import java.nio.file.Paths;
 import static org.junit.Assert.assertEquals;
 import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSpssReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java b/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
index c75aadd9e6..a3f5829c58 100644
--- a/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
+++ b/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
@@ -22,7 +22,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.DataMode;
@@ -43,7 +43,7 @@ import static org.apache.drill.test.QueryTestUtil.generateCompressedFile;
 import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
 import static org.junit.Assert.assertEquals;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSyslogFormat extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java b/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java
index b236416d2c..6a9fc11bf4 100644
--- a/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java
+++ b/contrib/format-xml/src/test/java/org/apache/drill/exec/store/xml/TestXMLReader.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.xml;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -39,7 +39,7 @@ import static org.apache.drill.test.rowSet.RowSetUtilities.objArray;
 import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
 import static org.junit.Assert.assertEquals;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestXMLReader extends ClusterTest {
 
   @BeforeClass
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java
index 43fc645cbc..a7ab2d32e8 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixCommandTest.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.store.phoenix;
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -34,7 +34,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.runners.MethodSorters;
 
 @FixMethodOrder(MethodSorters.JVM)
-@Category({ SlowTest.class, RowSetTests.class })
+@Category({ SlowTest.class, RowSetTest.class })
 public class PhoenixCommandTest extends PhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java
index 2e6c8d01c9..59b97ab04e 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixDataTypeTest.java
@@ -30,7 +30,7 @@ import java.time.Instant;
 import java.time.LocalDate;
 import java.time.LocalTime;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.runners.MethodSorters;
 
 @FixMethodOrder(MethodSorters.JVM)
-@Category({ SlowTest.class, RowSetTests.class })
+@Category({ SlowTest.class, RowSetTest.class })
 public class PhoenixDataTypeTest extends PhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
index ef083cecec..8e091b9b87 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/PhoenixSQLTest.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.store.phoenix;
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.runners.MethodSorters;
 
 @FixMethodOrder(MethodSorters.JVM)
-@Category({ SlowTest.class, RowSetTests.class })
+@Category({ SlowTest.class, RowSetTest.class })
 public class PhoenixSQLTest extends PhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
index e0f1ccee8d..1c54ff4482 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixCommandTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -31,7 +31,7 @@ import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.Test;
 
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixCommandTest extends SecuredPhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
index 5fd09629c1..2938f46102 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixDataTypeTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -44,7 +44,7 @@ import static org.apache.drill.test.rowSet.RowSetUtilities.shortArray;
 import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
 
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixDataTypeTest extends SecuredPhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
index c2209ce1f8..86ecd3f8a2 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixSQLTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
@@ -35,7 +35,7 @@ import org.junit.jupiter.api.Test;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixSQLTest extends SecuredPhoenixBaseTest {
 
   @Test
diff --git a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
index 5d0451efde..2c4c6fadeb 100644
--- a/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
+++ b/contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/SecuredPhoenixTestSuite.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.store.phoenix.secured;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.store.phoenix.QueryServerBasicsIT;
 import org.apache.drill.test.BaseTest;
@@ -41,7 +41,7 @@ import java.util.concurrent.atomic.AtomicInteger;
 })
 @Disabled
 @Tag(SlowTest.TAG)
-@Tag(RowSetTests.TAG)
+@Tag(RowSetTest.TAG)
 public class SecuredPhoenixTestSuite extends BaseTest {
 
   private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SecuredPhoenixTestSuite.class);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 3351869549..695f8b6b02 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -471,6 +471,9 @@ public final class ExecConstants {
   public static final OptionValidator PARQUET_COMPLEX_BATCH_NUM_RECORDS_VALIDATOR = new RangeLongValidator(PARQUET_COMPLEX_BATCH_NUM_RECORDS, 1, ValueVector.MAX_ROW_COUNT -1,
       new OptionDescription("Complex Parquet Reader maximum number of records per batch."));
 
+  public static final String ENABLE_V2_JSON_READER_KEY = "store.json.enable_v2_reader";
+  public static final BooleanValidator ENABLE_V2_JSON_READER_VALIDATOR = new BooleanValidator(ENABLE_V2_JSON_READER_KEY,
+      new OptionDescription("Enable the experimental \"version 2\" JSON reader."));
   public static final String JSON_ALL_TEXT_MODE = "store.json.all_text_mode";
   public static final BooleanValidator JSON_READER_ALL_TEXT_MODE_VALIDATOR = new BooleanValidator(JSON_ALL_TEXT_MODE,
       new OptionDescription("Drill reads all data from the JSON files as VARCHAR. Prevents schema change errors."));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java
index 3a1df8f299..b820c1bdcd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/TypeFunctions.java
@@ -33,7 +33,6 @@ import io.netty.buffer.DrillBuf;
  * Type functions for all types. See UnionFunctions for type functions
  * specifically for the UNION type.
  */
-
 public class TypeFunctions {
 
   @FunctionTemplate(name = "sqlTypeOf",
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
index 2f9300db6f..9ed6d8f8db 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
@@ -47,9 +47,9 @@ public class OperatorStats {
   private final long[] schemaCountByInput;
 
 
-  private boolean inProcessing = false;
-  private boolean inSetup = false;
-  private boolean inWait = false;
+  private boolean inProcessing;
+  private boolean inSetup;
+  private boolean inWait;
 
   protected long processingNanos;
   protected long setupNanos;
@@ -185,7 +185,7 @@ public class OperatorStats {
   public synchronized void batchReceived(int inputIndex, long records, boolean newSchema) {
     recordsReceivedByInput[inputIndex] += records;
     batchesReceivedByInput[inputIndex]++;
-    if(newSchema){
+    if (newSchema) {
       schemaCountByInput[inputIndex]++;
     }
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java
index 694c91b387..2ae82824be 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/protocol/VectorContainerAccessor.java
@@ -34,6 +34,14 @@ import org.apache.drill.exec.record.selection.SelectionVector4;
  * Wraps a vector container and optional selection vector in an interface
  * simpler than the entire {@link RecordBatch}. This implementation hosts
  * a container only.
+ * <p>
+ * Separates the idea of a batch schema and data batch. The accessor
+ * can identify a schema even if it has no batches. This occurs for
+ * readers that can identify the schema, but produce no actual data.
+ * <p>
+ * This version is designed for the the scan operator which will
+ * produce a series of different vector containers (which, oddly, must
+ * all contain the same vectors.)
  */
 public class VectorContainerAccessor implements BatchAccessor {
 
@@ -66,7 +74,9 @@ public class VectorContainerAccessor implements BatchAccessor {
    */
   public void addBatch(VectorContainer container) {
     setSchema(container);
-    batchCount++;
+    if (container != null) {
+      batchCount++;
+    }
   }
 
   public int batchCount() { return batchCount; }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
index 62bd729b19..b287e6c822 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/scan/project/ExplicitSchemaProjection.java
@@ -313,7 +313,7 @@ public class ExplicitSchemaProjection extends ReaderLevelProjection {
       if (child.isTuple()) {
         members.add(resolveMapMembers(members, child));
       } else {
-        members.add(outputTuple.nullBuilder.add(child.name()));
+        members.add(members.nullBuilder.add(child.name()));
       }
     }
     return mapCol;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
index b79c089d4e..3bf6e71705 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/BatchValidator.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.physical.impl.validate;
 
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.SimpleVectorWrapper;
 import org.apache.drill.exec.record.VectorAccessible;
@@ -247,6 +248,11 @@ public class BatchValidator {
     return reporter.errorCount() == 0;
   }
 
+
+  public static void validate(RowSet rowSet) {
+    validate(rowSet.container());
+  }
+
   private static ErrorReporter errorReporter(VectorAccessible batch) {
     String opName = batch.getClass().getSimpleName();
     if (LOG_TO_STDOUT) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
index 83c71f53ee..1723451413 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/impl/SingleVectorState.java
@@ -21,6 +21,7 @@ import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
+import org.apache.drill.exec.vector.BaseDataValueVector;
 import org.apache.drill.exec.vector.FixedWidthVector;
 import org.apache.drill.exec.vector.NullableVector;
 import org.apache.drill.exec.vector.UInt4Vector;
@@ -38,8 +39,10 @@ import org.slf4j.LoggerFactory;
  * Subclasses are specialized for offset vectors or values vectors.
  * (The "single vector" name contrasts with classes that manage compound
  * vectors, such as a data and offsets vector.)
+ * * <p>
+ * During overflow it is critical to update the various stored vector
+ * lengths so that serialization/deserialization works correctly.
  */
-
 public abstract class SingleVectorState implements VectorState {
 
   public abstract static class SimpleVectorState extends SingleVectorState {
@@ -64,7 +67,6 @@ public abstract class SingleVectorState implements VectorState {
       // look-ahead vector. Uses vector-level operations for convenience.
       // These aren't very efficient, but overflow does not happen very
       // often.
-
       for (int src = sourceStartIndex; src <= sourceEndIndex; src++, newIndex++) {
         mainVector.copyEntry(newIndex, backupVector, src);
       }
@@ -76,7 +78,6 @@ public abstract class SingleVectorState implements VectorState {
    * vector, or might be the payload part of a scalar array (repeated scalar)
    * vector.
    */
-
   public static class FixedWidthVectorState extends SimpleVectorState {
 
      public FixedWidthVectorState(WriterEvents writer, ValueVector mainVector) {
@@ -101,7 +102,6 @@ public abstract class SingleVectorState implements VectorState {
       int size = super.allocateVector(vector, cardinality);
 
       // IsSet ("bit") vectors rely on values being initialized to zero (unset.)
-
       ((FixedWidthVector) vector).zeroVector();
       return size;
     }
@@ -112,7 +112,6 @@ public abstract class SingleVectorState implements VectorState {
    * vector, or might be the payload part of a scalar array (repeated scalar)
    * vector.
    */
-
   public static class VariableWidthVectorState extends SimpleVectorState {
 
     private final ColumnMetadata schema;
@@ -126,11 +125,25 @@ public abstract class SingleVectorState implements VectorState {
     public int allocateVector(ValueVector vector, int cardinality) {
 
       // Cap the allocated size to the maximum.
-
       int size = (int) Math.min(ValueVector.MAX_BUFFER_SIZE, (long) cardinality * schema.expectedWidth());
       ((VariableWidthVector) vector).allocateNew(size, cardinality);
       return vector.getAllocatedSize();
     }
+
+    @Override
+    public void rollover(int cardinality) {
+      super.rollover(cardinality);
+
+      // Adjust offset vector length
+      int offsetLength = writer.rowStartIndex() + 1;
+      VariableWidthVector varWidthVector = ((VariableWidthVector) backupVector);
+      UInt4Vector offsetVector = varWidthVector.getOffsetVector();
+      offsetVector.getMutator().setValueCount(offsetLength );
+
+      // Adjust data vector length.
+      ((BaseDataValueVector) backupVector).getBuffer().writerIndex(
+          offsetVector.getAccessor().get(offsetLength - 1));
+    }
   }
 
   /**
@@ -140,7 +153,6 @@ public abstract class SingleVectorState implements VectorState {
    * with the offsets vector (here) or the values vector to allow the needed
    * fine control over overflow operations.
    */
-
   public static class OffsetVectorState extends SingleVectorState {
 
     private static final Logger logger = LoggerFactory.getLogger(OffsetVectorState.class);
@@ -151,7 +163,6 @@ public abstract class SingleVectorState implements VectorState {
      * child type is know so this field cannot be final. It will,
      * however, change value only once: from null to a valid writer.
      */
-
     private WriterPosition childWriter;
 
     public OffsetVectorState(WriterEvents writer, ValueVector mainVector,
@@ -185,7 +196,6 @@ public abstract class SingleVectorState implements VectorState {
 
       // This is an offset vector. The data to copy is one greater
       // than the row index.
-
       sourceStartIndex++;
       sourceEndIndex++;
 
@@ -204,10 +214,10 @@ public abstract class SingleVectorState implements VectorState {
       // offset vector position contains the offset of the start of the data
       // for the current row. We must subtract that offset from each copied
       // value to adjust the offset for the destination.
-
-      UInt4Vector.Accessor sourceAccessor = ((UInt4Vector) backupVector).getAccessor();
-      UInt4Vector.Mutator destMutator = ((UInt4Vector) mainVector).getMutator();
-      int offset = childWriter.rowStartIndex();
+      UInt4Vector sourceVector = ((UInt4Vector) backupVector);
+      final UInt4Vector.Accessor sourceAccessor = sourceVector.getAccessor();
+      final UInt4Vector.Mutator destMutator = ((UInt4Vector) mainVector).getMutator();
+      final int offset = childWriter.rowStartIndex();
       int newIndex = 1;
       logger.trace("Offset vector: copy {} values from {} to {} with offset {}",
         Math.max(0, sourceEndIndex - sourceStartIndex + 1),
@@ -221,13 +231,18 @@ public abstract class SingleVectorState implements VectorState {
         destMutator.set(newIndex, sourceAccessor.get(src) - offset);
       }
 
+      // Adjust offset vector length
+      int offsetLength = writer.rowStartIndex() + 1;
+      sourceVector.getMutator().setValueCount(offsetLength );
+
       // Getting offsets right was a pain. If you modify this code,
       // you'll likely relive that experience. Enabling the next two
       // lines will help reveal some of the mystery around offsets and their
       // confusing off-by-one design.
 
-//      VectorPrinter.printOffsets((UInt4Vector) backupVector, sourceStartIndex - 1, sourceEndIndex - sourceStartIndex + 3);
-//      VectorPrinter.printOffsets((UInt4Vector) mainVector, 0, newIndex);
+      // VectorChecker.verifyOffsets("nested", sourceVector);
+      // VectorPrinter.printOffsets(sourceVector, sourceStartIndex - 1, sourceEndIndex - sourceStartIndex + 3);
+      // VectorPrinter.printOffsets((UInt4Vector) mainVector, 0, newIndex);
     }
   }
 
@@ -265,7 +280,6 @@ public abstract class SingleVectorState implements VectorState {
    *
    * @param cardinality the number of unique columns in the row
    */
-
   @Override
   public void rollover(int cardinality) {
 
@@ -274,13 +288,11 @@ public abstract class SingleVectorState implements VectorState {
     // Remember the last write index for the original vector.
     // This tells us the end of the set of values to move, while the
     // sourceStartIndex above tells us the start.
-
     int sourceEndIndex = writer.lastWriteIndex();
 
     // Switch buffers between the backup vector and the writer's output
     // vector. Done this way because writers are bound to vectors and
     // we wish to keep the binding.
-
     if (backupVector == null) {
       backupVector = TypeHelper.getNewVector(mainVector.getField(),
           parseVectorType(mainVector), mainVector.getAllocator(), null);
@@ -291,7 +303,6 @@ public abstract class SingleVectorState implements VectorState {
 
     // Copy overflow values from the full vector to the new
     // look-ahead vector.
-
     copyOverflow(sourceStartIndex, sourceEndIndex);
 
     // At this point, the writer is positioned to write to the look-ahead
@@ -312,7 +323,6 @@ public abstract class SingleVectorState implements VectorState {
    * metadata declared within that vector
    * @return the actual major type of the vector
    */
-
   protected static MajorType parseVectorType(ValueVector vector) {
     MajorType purportedType = vector.getField().getType();
     if (purportedType.getMode() != DataMode.OPTIONAL) {
@@ -322,7 +332,6 @@ public abstract class SingleVectorState implements VectorState {
     // For nullable vectors, the purported type can be wrong. The "outer"
     // vector is nullable, but the internal "values" vector is required, though
     // it carries a nullable type -- that is, the metadata lies.
-
     if (vector instanceof NullableVector) {
       return purportedType;
     }
@@ -339,7 +348,6 @@ public abstract class SingleVectorState implements VectorState {
     * overflow buffers away in the backup vector.
     * Restore the main vector's last write position.
     */
-
   @Override
   public void harvestWithLookAhead() {
     mainVector.exchange(backupVector);
@@ -350,7 +358,6 @@ public abstract class SingleVectorState implements VectorState {
    * now ready to start writing to the next batch. Initialize that new batch
    * with the look-ahead values saved during overflow of the previous batch.
    */
-
   @Override
   public void startBatchWithLookAhead() {
     mainVector.exchange(backupVector);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java
index 3220efa57f..4b6fb40158 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/resultSet/model/single/SimpleReaderBuilder.java
@@ -69,7 +69,6 @@ import org.apache.drill.exec.vector.complex.UnionVector;
  * variant (LIST, UNION) and tuple (MAP) columns, the tree grows
  * quite complex.
  */
-
 public class SimpleReaderBuilder extends ReaderBuilder {
 
   private static final SimpleReaderBuilder INSTANCE = new SimpleReaderBuilder();
@@ -133,7 +132,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
       case LATE:
 
         // Occurs for a list with no type: a list of nulls.
-
         return AbstractScalarReader.nullReader(descrip.metadata);
       default:
         return buildScalarReader(va, descrip.metadata);
@@ -169,22 +167,19 @@ public class SimpleReaderBuilder extends ReaderBuilder {
     final boolean isArray = mode == DataMode.REPEATED;
 
     // Map type
-
     final AbstractObjectReader mapReader = MapReader.build(
         descrip.metadata,
         isArray ? null : va,
         buildMapMembers(vector,
             descrip.parent.childProvider(descrip.metadata)));
 
-    // Single map
-
-    if (! isArray) {
+    if (isArray) {
+      // Repeated map
+      return ArrayReaderImpl.buildTuple(descrip.metadata, va, mapReader);
+    } else {
+      // Single map
       return mapReader;
     }
-
-    // Repeated map
-
-    return ArrayReaderImpl.buildTuple(descrip.metadata, va, mapReader);
   }
 
   protected List<AbstractObjectReader> buildMapMembers(AbstractMapVector mapVector, MetadataProvider provider) {
@@ -208,7 +203,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
       // Will throw an exception for unsupported types.
       // so call this only if the MajorType reports that the type
       // already exists.
-
       final ValueVector memberVector = vector.getMember(type);
       final VectorDescrip memberDescrip = new VectorDescrip(provider, i++, memberVector.getField());
       variants[type.ordinal()] = buildVectorReader(memberVector, memberDescrip);
@@ -262,7 +256,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
    * then the union must also be not null. (Experience will show whether
    * existing code does, in fact, follow that convention.)
    */
-
   private AbstractObjectReader build1DList(ListVector vector, VectorAccessor listAccessor,
       VectorDescrip listDescrip) {
     final ValueVector dataVector = vector.getDataVector();
@@ -272,7 +265,6 @@ public class SimpleReaderBuilder extends ReaderBuilder {
       // At the metadata level, a list always holds a union. But, at the
       // implementation layer, a union of a single type is collapsed out
       // to leave just a list of that single type.
-
       dataMetadata = listDescrip;
     } else {
       dataMetadata = new VectorDescrip(listDescrip.childProvider(), 0, dataVector.getField());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
index 2ee1047e91..d4d385c248 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchema.java
@@ -100,27 +100,36 @@ public class BatchSchema implements Iterable<MaterializedField> {
   }
 
   /**
-   * DRILL-5525: the semantics of this method are badly broken.
-   * Caveat emptor.
+   * DRILL-5525: the semantics of this method are badly broken. Caveat emptor.
    *
-   * This check used for detecting actual schema change inside operator record batch will not work for
-   * AbstractContainerVectors (like MapVector). In each record batch a reference to incoming batch schema is
-   * stored (let say S:{a: int}) and then equals is called on that stored reference and current incoming batch schema.
-   * Internally schema object has references to Materialized fields from vectors in container. If there is change in
-   * incoming batch schema, then the upstream will create a new ValueVector in its output container with the new
-   * detected type, which in turn will have new instance for Materialized Field. Then later a new BatchSchema object
-   * is created for this new incoming batch (let say S":{a":varchar}). The operator calling equals will have reference
-   * to old schema object (S) and hence first check will not be satisfied and then it will call equals on each of the
-   * Materialized Field (a.equals(a")). Since new materialized field is created for newly created vector the equals
-   * check on field will return false. And schema change will be detected in this case.
-   * Now consider instead of int vector there is a MapVector such that initial schema was (let say S:{a:{b:int, c:int}}
-   * and then later schema for Map field c changes, then in container Map vector will be found but later the children
-   * vector for field c will be replaced. This new schema object will be created as (S":{a:{b:int, c":varchar}}). Now
-   * when S.equals(S") is called it will eventually call a.equals(a) which will return true even though the schema of
-   * children value vector c has changed. This is because no new vector is created for field (a) and hence it's object
-   * reference to MaterializedField has not changed which will be reflected in both old and new schema instances.
-   * Hence we should make use of {@link BatchSchema#isEquivalent(BatchSchema)} method instead since
-   * {@link MaterializedField#isEquivalent(MaterializedField)} method is updated to remove the reference check.
+   * This check used for detecting actual schema change inside operator record
+   * batch will not work for AbstractContainerVectors (like MapVector). In each
+   * record batch a reference to incoming batch schema is stored (let say S:{a:
+   * int}) and then equals is called on that stored reference and current
+   * incoming batch schema. Internally schema object has references to
+   * Materialized fields from vectors in container. If there is change in
+   * incoming batch schema, then the upstream will create a new ValueVector in
+   * its output container with the new detected type, which in turn will have
+   * new instance for Materialized Field. Then later a new BatchSchema object is
+   * created for this new incoming batch (let say S":{a":varchar}). The operator
+   * calling equals will have reference to old schema object (S) and hence first
+   * check will not be satisfied and then it will call equals on each of the
+   * Materialized Field (a.equals(a")). Since new materialized field is created
+   * for newly created vector the equals check on field will return false. And
+   * schema change will be detected in this case. Now consider instead of int
+   * vector there is a MapVector such that initial schema was (let say
+   * S:{a:{b:int, c:int}} and then later schema for Map field c changes, then in
+   * container Map vector will be found but later the children vector for field
+   * c will be replaced. This new schema object will be created as
+   * (S":{a:{b:int, c":varchar}}). Now when S.equals(S") is called it will
+   * eventually call a.equals(a) which will return true even though the schema
+   * of children value vector c has changed. This is because no new vector is
+   * created for field (a) and hence it's object reference to MaterializedField
+   * has not changed which will be reflected in both old and new schema
+   * instances. Hence we should make use of
+   * {@link BatchSchema#isEquivalent(BatchSchema)} method instead since
+   * {@link MaterializedField#isEquivalent(MaterializedField)} method is updated
+   * to remove the reference check.
    */
   @Override
   public boolean equals(Object obj) {
@@ -201,18 +210,21 @@ public class BatchSchema implements Iterable<MaterializedField> {
    * @param t2
    * @return
    */
-  private boolean majorTypeEqual(MajorType t1, MajorType t2) {
+  private static boolean majorTypeEqual(MajorType t1, MajorType t2) {
     if (t1.equals(t2)) {
       return true;
-    } else if (!t1.getMinorType().equals(t2.getMinorType())) {
-      return false;
-    } else if (!t1.getMode().equals(t2.getMode())) {
+    }
+    // TODO: the next two checks are redundant: equals does them.
+    if (!t1.getMinorType().equals(t2.getMinorType())) {
       return false;
-    } else if (!Sets.newHashSet(t1.getSubTypeList()).equals(Sets.newHashSet(t2.getSubTypeList()))) {
+    }
+    if (!t1.getMode().equals(t2.getMode())) {
       return false;
-    } else {
-      return true;
     }
+
+    // TODO: this does not do anything. The call to equals() above
+    // checks subtypes in a different way.
+    return Sets.newHashSet(t1.getSubTypeList()).equals(Sets.newHashSet(t2.getSubTypeList()));
   }
 
   /**
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index 9d1c6a3d1a..551971b592 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -167,7 +167,7 @@ public class VectorContainer implements VectorAccessible {
     final ValueVector vector;
     if (id != null) {
       vector = getValueAccessorById(id.getFieldIds()).getValueVector();
-      if (id.getFieldIds().length == 1 && !vector.getField().getType().equals(field.getType())) {
+      if (id.getFieldIds().length == 1 && !vector.getField().isEquivalent(field)) {
         final ValueVector newVector = TypeHelper.getNewVector(field, this.getAllocator(), callBack);
         replace(vector, newVector);
         return (T) newVector;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
index 30791e9474..0a514df0a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
@@ -38,10 +38,12 @@ public class SelectionVector2 implements AutoCloseable {
   public static final int RECORD_SIZE = 2;
 
   private final BufferAllocator allocator;
-  // Indicates number of indexes stored in the SV2 buffer which may be less than actual number of rows stored in
-  // RecordBatch container owning this SV2 instance
+  // Indicates number of indexes stored in the SV2 buffer which may be less
+  // than the actual number of rows stored in RecordBatch container owning
+  // this SV2 instance
   private int recordCount;
-  // Indicates actual number of rows in the RecordBatch container which owns this SV2 instance
+  // Indicates actual number of rows in the RecordBatch
+  // container which owns this SV2 instance
   private int batchActualRecordCount = -1;
   private DrillBuf buffer = DeadBuf.DEAD_BUFFER;
 
@@ -82,12 +84,11 @@ public class SelectionVector2 implements AutoCloseable {
     DrillBuf bufferHandle = buffer;
 
     if (clear) {
-      /* Increment the ref count for this buffer */
+      // Increment the ref count for this buffer
       bufferHandle.retain(1);
 
-      /* We are passing ownership of the buffer to the
-       * caller. clear the buffer from within our selection vector
-       */
+      // We are passing ownership of the buffer to the
+      // caller. clear the buffer from within our selection vector
       clear();
     }
 
@@ -95,7 +96,7 @@ public class SelectionVector2 implements AutoCloseable {
   }
 
   public void setBuffer(DrillBuf bufferHandle) {
-    /* clear the existing buffer */
+    // clear the existing buffer
     clear();
 
     buffer = bufferHandle;
@@ -106,6 +107,10 @@ public class SelectionVector2 implements AutoCloseable {
     return buffer.getChar(index * RECORD_SIZE);
   }
 
+  public void setIndex(int index, char value) {
+    buffer.setChar(index * RECORD_SIZE, value);
+  }
+
   public long getDataAddr() {
     return buffer.memoryAddress();
   }
@@ -135,10 +140,9 @@ public class SelectionVector2 implements AutoCloseable {
     newSV.batchActualRecordCount = batchActualRecordCount;
     newSV.buffer = buffer;
 
-    /* Since buffer and newSV.buffer essentially point to the
-     * same buffer, if we don't do a retain() on the newSV's
-     * buffer, it might get freed.
-     */
+    // Since buffer and newSV.buffer essentially point to the
+    // same buffer, if we don't do a retain() on the newSV's
+    // buffer, it might get freed.
     newSV.buffer.retain(1);
     clear();
     return newSV;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
index 460dbb7979..063b840c37 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
@@ -172,7 +172,11 @@ public class QueryResultHandler {
       resultsListener.dataArrived(batch, throttle);
       // That releases batch if successful.
     } catch (Exception e) {
-      batch.release();
+      try {
+        batch.release();
+      } catch (IllegalStateException e2) {
+        // Ignore, released twice
+      }
       resultsListener.submissionFailed(UserException.systemError(e).build(logger));
     }
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index 6c89470009..3e1f6a73ec 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -190,6 +190,7 @@ public class SystemOptionManager extends BaseOptionManager implements AutoClosea
       new OptionDefinition(ExecConstants.PARQUET_FLAT_BATCH_MEMORY_SIZE_VALIDATOR, new OptionMetaData(OptionValue.AccessibleScopes.SYSTEM_AND_SESSION, true, true)),
       new OptionDefinition(ExecConstants.PARQUET_COMPLEX_BATCH_NUM_RECORDS_VALIDATOR, new OptionMetaData(OptionValue.AccessibleScopes.SYSTEM_AND_SESSION, true, true)),
       new OptionDefinition(ExecConstants.PARTITIONER_MEMORY_REDUCTION_THRESHOLD_VALIDATOR),
+      new OptionDefinition(ExecConstants.ENABLE_V2_JSON_READER_VALIDATOR),
       new OptionDefinition(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR),
       new OptionDefinition(ExecConstants.JSON_WRITER_NAN_INF_NUMBERS_VALIDATOR),
       new OptionDefinition(ExecConstants.JSON_READER_NAN_INF_NUMBERS_VALIDATOR),
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
index 15ddf6b73e..fad3634d27 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyFormatPlugin.java
@@ -48,7 +48,8 @@ import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.CloseableRecordBatch;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
+import org.apache.drill.exec.store.ColumnExplorer;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.RecordWriter;
 import org.apache.drill.exec.store.StatisticsRecordWriter;
@@ -130,7 +131,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
     /**
      *  Choose whether to use the "traditional" or "enhanced" reader
      *  structure. Can also be selected at runtime by overriding
-     *  {@link #useEnhancedScan()}.
+     *  {@link #useEnhancedScan(OptionSet)}.
      */
     private final ScanFrameworkVersion scanVersion;
 
@@ -515,7 +516,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * @return true to use the enhanced scan framework, false for the
    * traditional scan-batch framework
    */
-  protected ScanFrameworkVersion scanVersion(OptionManager options) {
+  protected ScanFrameworkVersion scanVersion(OptionSet options) {
     return easyConfig.scanVersion;
   }
 
@@ -545,15 +546,15 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
   /**
    * Initialize the scan framework builder with standard options.
    * Call this from the plugin-specific
-   * {@link #frameworkBuilder(OptionManager, EasySubScan)} method.
+   * {@link #frameworkBuilder(OptionSet, EasySubScan)} method.
    * The plugin can then customize/revise options as needed.
    * <p>
    * For EVF V1, to be removed.
    *
    * @param builder the scan framework builder you create in the
-   * {@link #frameworkBuilder(OptionManager, EasySubScan)} method
+   * {@link #frameworkBuilder(OptionSet, EasySubScan)} method
    * @param scan the physical scan operator definition passed to
-   * the {@link #frameworkBuilder(OptionManager, EasySubScan)} method
+   * the {@link #frameworkBuilder(OptionSet, EasySubScan)} method
    */
   protected void initScanBuilder(FileScanBuilder builder, EasySubScan scan) {
     EvfV1ScanBuilder.initScanBuilder(this, builder, scan);
@@ -563,7 +564,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * For EVF V1, to be removed.
    */
   public ManagedReader<? extends FileSchemaNegotiator> newBatchReader(
-      EasySubScan scan, OptionManager options) throws ExecutionSetupException {
+      EasySubScan scan, OptionSet options) throws ExecutionSetupException {
     throw new ExecutionSetupException("Must implement newBatchReader() if using the enhanced framework.");
   }
 
@@ -583,7 +584,7 @@ public abstract class EasyFormatPlugin<T extends FormatPluginConfig> implements
    * @throws ExecutionSetupException for all setup failures
    */
   protected FileScanBuilder frameworkBuilder(
-      OptionManager options, EasySubScan scan) throws ExecutionSetupException {
+      OptionSet options, EasySubScan scan) throws ExecutionSetupException {
     throw new ExecutionSetupException("Must implement frameworkBuilder() if using the enhanced framework.");
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
index d5f498b5a3..7fe6ffaa55 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
@@ -49,6 +49,13 @@ import org.slf4j.LoggerFactory;
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.databind.JsonNode;
 
+/**
+ * Old-style JSON record reader. Not used when reading JSON files,
+ * but is used by some "mini-plan" unit tests, and by the VALUES
+ * reader. As a result, this reader cannot be removed and must be
+ * maintained until the other uses are converted to the new-style
+ * JSON reader.
+ */
 public class JSONRecordReader extends AbstractRecordReader {
   private static final Logger logger = LoggerFactory.getLogger(JSONRecordReader.class);
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
new file mode 100644
index 0000000000..48d44f42a4
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JsonBatchReader.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.easy.json;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.drill.common.exceptions.ChildErrorContext;
+import org.apache.drill.common.exceptions.CustomErrorContext;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.physical.impl.scan.file.FileScanFramework.FileSchemaNegotiator;
+import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
+import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoader;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder;
+import org.apache.hadoop.mapred.FileSplit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class JsonBatchReader implements ManagedReader<FileSchemaNegotiator> {
+  private static final Logger logger = LoggerFactory.getLogger(JsonBatchReader.class);
+
+  private JsonLoader jsonLoader;
+
+  @Override
+  public boolean open(FileSchemaNegotiator negotiator) {
+    DrillFileSystem fileSystem = negotiator.fileSystem();
+    FileSplit split = negotiator.split();
+
+    InputStream stream;
+    try {
+      stream = fileSystem.openPossiblyCompressedStream(split.getPath());
+    } catch (IOException e) {
+      throw UserException
+          .dataReadError(e)
+          .addContext("Failure to open JSON file", split.getPath().toString())
+          .build(logger);
+    }
+    CustomErrorContext errorContext = new ChildErrorContext(negotiator.parentErrorContext()) {
+      @Override
+      public void addContext(UserException.Builder builder) {
+        super.addContext(builder);
+        builder.addContext("File name", split.getPath().toString());
+      }
+    };
+    negotiator.setErrorContext(errorContext);
+
+    // Create the JSON loader (high-level parser).
+    jsonLoader = new JsonLoaderBuilder()
+        .resultSetLoader(negotiator.build())
+        .standardOptions(negotiator.queryOptions())
+        .errorContext(errorContext)
+        .fromStream(stream)
+        .build();
+    return true;
+  }
+
+  @Override
+  public boolean next() {
+    return jsonLoader.readBatch();
+  }
+
+  @Override
+  public void close() {
+    if (jsonLoader != null) {
+      jsonLoader.close();
+      jsonLoader = null;
+    }
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
index 49185ce996..ad0cba9ffb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/loader/BaseFieldFactory.java
@@ -75,7 +75,6 @@ public abstract class BaseFieldFactory implements FieldFactory {
 
   protected JsonLoaderImpl loader() { return loader; }
 
-  @Override
   public ValueParser scalarParserFor(FieldDefn fieldDefn, ColumnMetadata colSchema) {
     return scalarParserFor(fieldDefn.scalarWriterFor(colSchema));
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
index ddd1b03f47..43812a22b2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/log/LogFormatPlugin.java
@@ -34,7 +34,7 @@ import org.apache.drill.exec.record.metadata.Propertied;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionManager;
+import org.apache.drill.exec.server.options.OptionSet;
 import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
 import org.apache.drill.exec.store.dfs.easy.EasySubScan;
 import org.apache.drill.exec.store.log.LogBatchReader.LogReaderConfig;
@@ -144,7 +144,7 @@ public class LogFormatPlugin extends EasyFormatPlugin<LogFormatConfig> {
    */
   @Override
   protected FileScanBuilder frameworkBuilder(
-      OptionManager options, EasySubScan scan) throws ExecutionSetupException {
+      OptionSet options, EasySubScan scan) throws ExecutionSetupException {
 
     // Pattern and schema identical across readers; define
     // up front.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
index 5dd9898bde..8dfbac00ec 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedJsonOutput.java
@@ -180,6 +180,4 @@ public class ExtendedJsonOutput extends BasicJsonOutput {
   public void writeIntNull() throws IOException {
     writeBigIntNull();
   }
-
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java
index aa2883b089..31e83bfc59 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedType.java
@@ -35,7 +35,4 @@ public enum ExtendedType {
   ExtendedType(String name) {
     this.serialized = new SerializedString(name);
   }
-
-
-
 }
\ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java
index 2760b9ab61..2b03414ab6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ExtendedTypeName.java
@@ -17,14 +17,21 @@
  */
 package org.apache.drill.exec.vector.complex.fn;
 
+/**
+ * Based on
+ * <a href="https://docs.mongodb.com/manual/reference/mongodb-extended-json-v1/">
+ * V1</a> of the Mongo extended type spec. Some names overlap with the current
+ * <a href="https://docs.mongodb.com/manual/reference/mongodb-extended-json/">
+ * V2</a> of the Mongo specs.
+ */
 public interface ExtendedTypeName {
-  public static final String BINARY = "$binary";      // base64 encoded binary (ZHJpbGw=)  [from Mongo]
-  public static final String TYPE = "$type";          // type of binary data
-  public static final String DATE = "$dateDay";       // ISO date with no time. such as (12-24-27)
-  public static final String TIME = "$time";          // ISO time with no timezone (19:20:30.45Z)
-  public static final String TIMESTAMP = "$date";     // ISO standard time (2009-02-23T00:00:00.000-08:00) [from Mongo]
-  public static final String INTERVAL = "$interval";  // ISO standard duration (PT26.4S)
-  public static final String INTEGER = "$numberLong"; // 8 byte signed integer (123) [from Mongo]
-  public static final String DECIMAL = "$decimal";    // exact numeric value (123.123)
+  String BINARY = "$binary";      // base64 encoded binary (ZHJpbGw=)  [from Mongo]
+  String TYPE = "$type";          // type of binary data
+  String DATE = "$dateDay";       // ISO date with no time. such as (12-24-27)
+  String TIME = "$time";          // ISO time with no timezone (19:20:30.45Z)
+  String TIMESTAMP = "$date";     // ISO standard time (2009-02-23T00:00:00.000-08:00) [from Mongo]
+  String INTERVAL = "$interval";  // ISO standard duration (PT26.4S)
+  String INTEGER = "$numberLong"; // 8 byte signed integer (123) [from Mongo]
+  String DECIMAL = "$decimal";    // exact numeric value (123.123)
 }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java
index 71098d0141..6df6ec6786 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/FieldSelection.java
@@ -170,5 +170,4 @@ public class FieldSelection {
       return root.fixNodes();
     }
   }
-
 }
\ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
index 269293e43a..07fb3d5b8f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
@@ -61,7 +61,7 @@ public class JsonWriter {
     final DataMode m = reader.getType().getMode();
     final MinorType mt = reader.getType().getMinorType();
 
-    switch(m){
+    switch(m) {
     case OPTIONAL:
     case REQUIRED:
 
@@ -87,7 +87,6 @@ public class JsonWriter {
       case BIT:
         gen.writeBoolean(reader);
         break;
-
       case DATE:
         gen.writeDate(reader);
         break;
@@ -123,9 +122,9 @@ public class JsonWriter {
       case MAP:
         gen.writeStartObject();
         if (reader.isSet()) {
-          for(String name : reader){
+          for (String name : reader) {
             FieldReader childReader = reader.reader(name);
-            if(childReader.isSet()){
+            if (childReader.isSet()) {
               gen.writeFieldName(name);
               writeValue(childReader);
             }
@@ -154,60 +153,60 @@ public class JsonWriter {
       gen.writeStartArray();
       switch (mt) {
       case FLOAT4:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeFloat(i, reader);
         }
         break;
       case FLOAT8:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeDouble(i, reader);
         }
         break;
       case INT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeInt(i, reader);
         }
         break;
       case SMALLINT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeSmallInt(i, reader);
         }
         break;
       case TINYINT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeTinyInt(i, reader);
         }
         break;
       case BIGINT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeBigInt(i, reader);
         }
         break;
       case BIT:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeBoolean(i, reader);
         }
         break;
 
       case DATE:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeDate(i, reader);
         }
         break;
       case TIME:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeTime(i, reader);
         }
         break;
       case TIMESTAMP:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeTimestamp(i, reader);
         }
         break;
       case INTERVALYEAR:
       case INTERVALDAY:
       case INTERVAL:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeInterval(i, reader);
         }
         break;
@@ -218,24 +217,24 @@ public class JsonWriter {
       case DECIMAL9:
       case DECIMAL18:
       case VARDECIMAL:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeDecimal(i, reader);
         }
         break;
 
       case LIST:
-        for(int i = 0; i < reader.size(); i++){
-          while(reader.next()){
+        for (int i = 0; i < reader.size(); i++) {
+          while (reader.next()) {
             writeValue(reader.reader());
           }
         }
         break;
       case MAP:
-        while(reader.next()){
+        while (reader.next()) {
           gen.writeStartObject();
-          for(String name : reader){
+          for (String name : reader) {
             FieldReader mapField = reader.reader(name);
-            if(mapField.isSet()){
+            if (mapField.isSet()) {
               gen.writeFieldName(name);
               writeValue(mapField);
             }
@@ -247,17 +246,17 @@ public class JsonWriter {
         break;
 
       case VAR16CHAR:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeVar16Char(i, reader);
         }
         break;
       case VARBINARY:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeBinary(i, reader);
         }
         break;
       case VARCHAR:
-        for(int i = 0; i < reader.size(); i++){
+        for (int i = 0; i < reader.size(); i++) {
           gen.writeVarChar(i, reader);
         }
         break;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
index 4493518d27..ea3bfd4150 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
@@ -80,29 +80,29 @@ abstract class VectorOutput {
       .optionalStart().appendOffset("+HH", "Z").optionalEnd()
       .toFormatter();
 
-  public VectorOutput(WorkingBuffer work){
+  public VectorOutput(WorkingBuffer work) {
     this.work = work;
   }
 
-  public void setParser(JsonParser parser){
+  public void setParser(JsonParser parser) {
     this.parser = parser;
   }
 
   protected boolean innerRun() throws IOException{
     JsonToken t = parser.nextToken();
-    if(t != JsonToken.FIELD_NAME){
+    if (t != JsonToken.FIELD_NAME) {
       return false;
     }
 
     String possibleTypeName = parser.getText();
-    if(!possibleTypeName.isEmpty() && possibleTypeName.charAt(0) == '$'){
-      switch(possibleTypeName){
+    if (!possibleTypeName.isEmpty() && possibleTypeName.charAt(0) == '$') {
+      switch(possibleTypeName) {
       case ExtendedTypeName.BINARY:
         writeBinary(checkNextToken(JsonToken.VALUE_STRING));
         checkCurrentToken(JsonToken.END_OBJECT);
         return true;
       case ExtendedTypeName.TYPE:
-        if(checkNextToken(JsonToken.VALUE_NUMBER_INT) || !hasBinary()) {
+        if (checkNextToken(JsonToken.VALUE_NUMBER_INT) || !hasBinary()) {
           throw UserException.parseError()
           .message("Either $type is not an integer or has no $binary")
           .build(logger);
@@ -177,13 +177,13 @@ abstract class VectorOutput {
   }
 
   public boolean checkToken(final JsonToken t, final JsonToken expected1, final JsonToken expected2) throws IOException{
-    if(t == JsonToken.VALUE_NULL){
+    if (t == JsonToken.VALUE_NULL) {
       return true;
-    }else if(t == expected1){
+    } else if (t == expected1) {
       return false;
-    }else if(t == expected2){
+    } else if (t == expected2) {
       return false;
-    }else{
+    } else {
       throw new JsonParseException(String.format("Failure while reading ExtendedJSON typed value. Expected a %s but "
           + "received a token of type %s", expected1, t), parser.getCurrentLocation());
     }
@@ -212,7 +212,7 @@ abstract class VectorOutput {
     @Override
     public void writeBinary(boolean isNull) throws IOException {
       VarBinaryWriter bin = writer.varBinary();
-      if(!isNull){
+      if (!isNull) {
         byte[] binaryData = parser.getBinaryValue();
         if (hasType()) {
           //Ignoring type info as of now.
@@ -231,7 +231,7 @@ abstract class VectorOutput {
     @Override
     public void writeDate(boolean isNull) throws IOException {
       DateWriter dt = writer.date();
-      if(!isNull){
+      if (!isNull) {
         work.prepareVarCharHolder(parser.getValueAsString(), varchar);
         dt.writeDate(StringFunctionHelpers.getDate(varchar.buffer, varchar.start, varchar.end));
       }
@@ -240,7 +240,7 @@ abstract class VectorOutput {
     @Override
     public void writeTime(boolean isNull) throws IOException {
       TimeWriter t = writer.time();
-      if(!isNull){
+      if (!isNull) {
         // read time and obtain the local time in the provided time zone.
         LocalTime localTime = OffsetTime.parse(parser.getValueAsString(), DateUtility.isoFormatTime).toLocalTime();
         t.writeTime((int) ((localTime.toNanoOfDay() + 500000L) / 1000000L)); // round to milliseconds
@@ -250,7 +250,7 @@ abstract class VectorOutput {
     @Override
     public void writeTimestamp(boolean isNull) throws IOException {
       TimeStampWriter ts = writer.timeStamp();
-      if(!isNull){
+      if (!isNull) {
         switch (parser.getCurrentToken()) {
         case VALUE_NUMBER_INT:
           DateTime dt = new DateTime(parser.getLongValue(), org.joda.time.DateTimeZone.UTC);
@@ -276,7 +276,7 @@ abstract class VectorOutput {
     @Override
     public void writeInterval(boolean isNull) throws IOException {
       IntervalWriter intervalWriter = writer.interval();
-      if(!isNull){
+      if (!isNull) {
         final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString());
         int months = DateUtilities.monthsFromPeriod(p);
         int days = p.getDays();
@@ -288,7 +288,7 @@ abstract class VectorOutput {
     @Override
     public void writeInteger(boolean isNull) throws IOException {
       BigIntWriter intWriter = writer.bigInt();
-      if(!isNull){
+      if (!isNull) {
         intWriter.writeBigInt(Long.parseLong(parser.getValueAsString()));
       }
     }
@@ -297,7 +297,6 @@ abstract class VectorOutput {
     public void writeDecimal(boolean isNull) throws IOException {
       throw new JsonParseException("Decimal Extended types not yet supported.", parser.getCurrentLocation());
     }
-
   }
 
   static class MapVectorOutput extends VectorOutput {
@@ -318,7 +317,7 @@ abstract class VectorOutput {
     @Override
     public void writeBinary(boolean isNull) throws IOException {
       VarBinaryWriter bin = writer.varBinary(fieldName);
-      if(!isNull){
+      if (!isNull) {
         byte[] binaryData = parser.getBinaryValue();
         if (hasType()) {
           //Ignoring type info as of now.
@@ -337,7 +336,7 @@ abstract class VectorOutput {
     @Override
     public void writeDate(boolean isNull) throws IOException {
       DateWriter dt = writer.date(fieldName);
-      if(!isNull){
+      if (!isNull) {
         LocalDate    localDate = LocalDate.parse(parser.getValueAsString(), DateUtility.isoFormatDate);
         OffsetDateTime utcDate = OffsetDateTime.of(localDate, LocalTime.MIDNIGHT, ZoneOffset.UTC);
 
@@ -348,7 +347,7 @@ abstract class VectorOutput {
     @Override
     public void writeTime(boolean isNull) throws IOException {
       TimeWriter t = writer.time(fieldName);
-      if(!isNull){
+      if (!isNull) {
         LocalTime localTime = OffsetTime.parse(parser.getValueAsString(), DateUtility.isoFormatTime).toLocalTime();
         t.writeTime((int) ((localTime.toNanoOfDay() + 500000L) / 1000000L)); // round to milliseconds
       }
@@ -357,7 +356,7 @@ abstract class VectorOutput {
     @Override
     public void writeTimestamp(boolean isNull) throws IOException {
       TimeStampWriter ts = writer.timeStamp(fieldName);
-      if(!isNull){
+      if (!isNull) {
         switch (parser.getCurrentToken()) {
         case VALUE_NUMBER_INT:
           DateTime dt = new DateTime(parser.getLongValue(), org.joda.time.DateTimeZone.UTC);
@@ -383,7 +382,7 @@ abstract class VectorOutput {
     @Override
     public void writeInterval(boolean isNull) throws IOException {
       IntervalWriter intervalWriter = writer.interval(fieldName);
-      if(!isNull){
+      if (!isNull) {
         final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString());
         int months = DateUtilities.monthsFromPeriod(p);
         int days = p.getDays();
@@ -395,7 +394,7 @@ abstract class VectorOutput {
     @Override
     public void writeInteger(boolean isNull) throws IOException {
       BigIntWriter intWriter = writer.bigInt(fieldName);
-      if(!isNull){
+      if (!isNull) {
         intWriter.writeBigInt(Long.parseLong(parser.getValueAsString()));
       }
     }
@@ -404,7 +403,5 @@ abstract class VectorOutput {
     public void writeDecimal(boolean isNull) throws IOException {
       throw new IOException("Decimal Extended types not yet supported.");
     }
-
   }
-
 }
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index 56b1515e83..d5277ff54c 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -693,6 +693,7 @@ drill.exec.options: {
     # Property name and value should be separated by =.
     # Properties should be separated by new line (\n).
     store.hive.conf.properties: "",
+    store.json.enable_v2_reader: true,
     store.json.all_text_mode: false,
     store.json.writer.allow_nan_inf: true,
     store.json.reader.allow_nan_inf: true,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index f79eb60ef3..df83dbc159 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -22,6 +22,7 @@ import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
@@ -49,7 +50,9 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select n_name, *, n_name, n_name from cp.`tpch/nation.parquet`")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q1.tsv")
-      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_name", "n_nationkey", "n_name0", "n_regionkey", "n_comment", "n_name00", "n_name1")
       .build().run();
 
@@ -57,7 +60,9 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select n_name, *, n_name, n_name from cp.`tpch/nation.parquet` limit 2")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q2.tsv")
-      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_name", "n_nationkey", "n_name0", "n_regionkey", "n_comment", "n_name00", "n_name1")
       .build().run();
 
@@ -65,8 +70,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name, *, n_name, n_name from cp.`tpch/nation.parquet`")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q3.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-            TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_name0",
             "n_nationkey0", "n_name1", "n_regionkey0", "n_comment0", "n_name00", "n_name10")
       .build().run();
@@ -75,8 +82,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name, *, n_name, n_name from cp.`tpch/nation.parquet` limit 2")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarCommaSameColumnRepeated/q4.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-            TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_name0",
             "n_nationkey0", "n_name1", "n_regionkey0", "n_comment0", "n_name00", "n_name10")
       .build().run();
@@ -89,8 +98,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name as extra, *, n_name as extra from cp.`tpch/nation.parquet`")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStarsRegularColumnAsAlias/q1.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-              TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "extra", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "extra0")
       .build().run();
 
@@ -98,8 +109,10 @@ public class TestStarQueries extends BaseTestQuery {
       .sqlQuery("select *, n_name as extra, *, n_name as extra from cp.`tpch/nation.parquet` limit 2")
       .ordered()
       .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStarsRegularColumnAsAlias/q2.tsv")
-      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR,
-              TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+      .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                     TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                     TypeProtos.MinorType.VARCHAR)
       .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "extra", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "extra0")
       .build().run();
   }
@@ -111,7 +124,9 @@ public class TestStarQueries extends BaseTestQuery {
     .sqlQuery("select *, *, n_name from cp.`tpch/nation.parquet`")
     .ordered()
     .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStars/q1.tsv")
-    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                   TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
     .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "n_name1")
     .build().run();
 
@@ -119,7 +134,9 @@ public class TestStarQueries extends BaseTestQuery {
     .sqlQuery("select *, *, n_name from cp.`tpch/nation.parquet` limit 2")
     .ordered()
     .csvBaselineFile("testframework/testStarQueries/testSelStarMultipleStars/q2.tsv")
-    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                   TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
     .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_nationkey0", "n_name0", "n_regionkey0", "n_comment0", "n_name1")
     .build().run();
   }
@@ -131,40 +148,74 @@ public class TestStarQueries extends BaseTestQuery {
     .sqlQuery("select *, n_nationkey, *, n_name from cp.`tpch/nation.parquet` limit 2")
     .ordered()
     .csvBaselineFile("testframework/testStarQueries/testSelStarWithAdditionalColumnLimit/q1.tsv")
-    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.VARCHAR)
+    .baselineTypes(TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.INT,
+                   TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.INT, TypeProtos.MinorType.VARCHAR,
+                   TypeProtos.MinorType.VARCHAR)
     .baselineColumns("n_nationkey", "n_name", "n_regionkey", "n_comment", "n_nationkey0", "n_nationkey1", "n_name0", "n_regionkey0", "n_comment0", "n_name1")
     .build().run();
   }
 
+  public static final String ENABLE_V2_READER = "ALTER SESSION SET `" + ExecConstants.ENABLE_V2_JSON_READER_KEY + "` = %s";
+
   @Test
-  public void testSelStarOrderBy() throws Exception{
-    testBuilder()
-        .ordered()
-        .sqlQuery(" select * from cp.`employee.json` order by last_name")
-        .sqlBaselineQuery(" select employee_id, full_name,first_name,last_name,position_id,position_title,store_id," +
-            " department_id,birth_date,hire_date,salary,supervisor_id,education_level,marital_status,gender,management_role " +
+  public void testSelStarOrderBy() throws Exception {
+    // See DRILL-7522
+    String query = "select * from cp.`employee.json` order by last_name";
+    String baselineQueryHead = "select employee_id, full_name, first_name, last_name, position_id, position_title, store_id," +
+            " department_id, birth_date, hire_date, ";
+    String baselineQueryTail = "salary, supervisor_id, education_level, marital_status, gender, management_role " +
             " from cp.`employee.json` " +
-            " order by last_name ")
-        .build().run();
-
+            " order by last_name";
+    try {
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "false")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + baselineQueryTail)
+          .build().run();
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "true")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + "end_date, " + baselineQueryTail)
+          .build().run();
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   @Test
   @Category(UnlikelyTest.class)
-  public void testSelStarOrderByLimit() throws Exception{
-    testBuilder()
-        .ordered()
-        .sqlQuery(" select * from cp.`employee.json` order by last_name limit 2")
-        .sqlBaselineQuery(" select employee_id, full_name,first_name,last_name,position_id,position_title,store_id," +
-            " department_id,birth_date,hire_date,salary,supervisor_id,education_level,marital_status,gender,management_role " +
-            " from cp.`employee.json` " +
-            " order by last_name limit 2")
-        .build().run();
-
+  public void testSelStarOrderByLimit() throws Exception {
+    // See DRILL-7522
+    String query = "select * from cp.`employee.json` order by last_name limit 2";
+    String baselineQueryHead = "select employee_id, full_name, first_name, last_name, position_id, position_title, store_id, " +
+            "department_id, birth_date, hire_date, ";
+    String baselineQueryTail = "salary, supervisor_id, education_level, marital_status, " +
+            "gender, management_role " +
+            "from cp.`employee.json` " +
+            "order by last_name limit 2";
+    try {
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "false")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + baselineQueryTail)
+          .build().run();
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "true")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + "end_date, " + baselineQueryTail)
+          .build().run();
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   @Test
-  public void testSelStarPlusRegCol() throws Exception{
+  public void testSelStarPlusRegCol() throws Exception {
     testBuilder()
         .unOrdered()
         .sqlQuery("select *, n_nationkey as key2 from cp.`tpch/nation.parquet` order by n_name limit 2")
@@ -174,16 +225,31 @@ public class TestStarQueries extends BaseTestQuery {
   }
 
   @Test
-  public void testSelStarWhereOrderBy() throws Exception{
-    testBuilder()
-        .ordered()
-        .sqlQuery("select * from cp.`employee.json` where first_name = 'James' order by last_name")
-        .sqlBaselineQuery("select employee_id, full_name,first_name,last_name,position_id,position_title,store_id," +
-            " department_id,birth_date,hire_date,salary,supervisor_id,education_level,marital_status,gender,management_role " +
-            " from cp.`employee.json` " +
-            " where first_name = 'James' order by last_name")
-        .build().run();
+  public void testSelStarWhereOrderBy() throws Exception {
+    // See DRILL-7522
+    String query = "select * from cp.`employee.json` where first_name = 'James' order by last_name";
+    String baselineQueryHead = "select employee_id, full_name, first_name, last_name, position_id, position_title, store_id," +
+        " department_id, birth_date, hire_date, ";
+    String baselineQueryTail = "salary, supervisor_id, education_level, marital_status, gender,management_role " +
+        " from cp.`employee.json` " +
+        " where first_name = 'James' order by last_name";
 
+    try {
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "false")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + baselineQueryTail)
+          .build().run();
+      testBuilder()
+          .ordered()
+          .optionSettingQueriesForTestQuery(ENABLE_V2_READER, "true")
+          .sqlQuery(query)
+          .sqlBaselineQuery(baselineQueryHead + "end_date, " + baselineQueryTail)
+          .build().run();
+    } finally {
+      resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+    }
   }
 
   @Test
@@ -192,9 +258,10 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .ordered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
-        .sqlBaselineQuery("select n.n_nationkey, n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name, r.r_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select n.n_nationkey, n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name, r.r_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                           "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
@@ -202,33 +269,37 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .ordered()
         .sqlQuery("select n.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
-        .sqlBaselineQuery("select n.n_nationkey, n.n_name, n.n_regionkey, n.n_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select n.n_nationkey, n.n_name, n.n_regionkey, n.n_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                          "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
   public void testSelRightStarJoin() throws Exception {
     testBuilder()
         .ordered()
-        .sqlQuery("select r.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
-        .sqlBaselineQuery("select r.r_regionkey, r.r_name, r.r_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlQuery("select r.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                  "where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select r.r_regionkey, r.r_name, r.r_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                          "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
   public void testSelStarRegColConstJoin() throws Exception {
     testBuilder()
         .ordered()
-        .sqlQuery("select *, n.n_nationkey as n_nationkey0, 1 + 2 as constant from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlQuery("select *, n.n_nationkey as n_nationkey0, 1 + 2 as constant " +
+                  "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                  "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .sqlBaselineQuery(" select n.n_nationkey, n.n_name, n.n_regionkey, n.n_comment, r.r_regionkey, r.r_name, r.r_comment, " +
             " n.n_nationkey as n_nationkey0, 1 + 2 as constant " +
             " from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
             " where n.n_regionkey = r.r_regionkey " +
             " order by n.n_name")
         .build().run();
-
   }
 
   @Test
@@ -236,9 +307,10 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .unOrdered()
         .sqlQuery("select n.*, r.* from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey")
-        .sqlBaselineQuery("select n.n_nationkey,n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name,r.r_comment from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey order by n.n_name")
+        .sqlBaselineQuery("select n.n_nationkey,n.n_name,n.n_regionkey,n.n_comment,r.r_regionkey,r.r_name,r.r_comment " +
+                          "from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r " +
+                          "where n.n_regionkey = r.r_regionkey order by n.n_name")
         .build().run();
-
   }
 
   @Test
@@ -247,9 +319,9 @@ public class TestStarQueries extends BaseTestQuery {
         .unOrdered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` n1, cp.`tpch/nation.parquet` n2 where n1.n_nationkey = n2.n_nationkey")
         .sqlBaselineQuery("select n1.n_nationkey,n1.n_name,n1.n_regionkey,n1.n_comment,n2.n_nationkey,n2.n_name,n2.n_regionkey, n2.n_comment " +
-            "from cp.`tpch/nation.parquet` n1, cp.`tpch/nation.parquet` n2 where n1.n_nationkey = n2.n_nationkey")
+                          "from cp.`tpch/nation.parquet` n1, cp.`tpch/nation.parquet` n2 " +
+                          "where n1.n_nationkey = n2.n_nationkey")
         .build().run();
-
   }
 
   @Test // DRILL-1293
@@ -295,7 +367,8 @@ public class TestStarQueries extends BaseTestQuery {
   @Test(expected = UserException.class)  // Should get "At line 1, column 8: Column 'n_nationkey' is ambiguous"
   public void testSelStarAmbiguousJoin() throws Exception {
     try {
-      test("select x.n_nationkey, x.n_name, x.n_regionkey, x.r_name from (select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey) x " );
+      test("select x.n_nationkey, x.n_name, x.n_regionkey, x.r_name from " +
+           "(select * from cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r where n.n_regionkey = r.r_regionkey) x " );
     } catch (UserException e) {
       // Expected
       throw e;
@@ -312,9 +385,12 @@ public class TestStarQueries extends BaseTestQuery {
   public void testSelStarSubQPrefix() throws Exception {
     test("select t.n_nationkey, t.n_name, t.n_regionkey from (select * from cp.`tpch/nation.parquet`) t where t.n_regionkey > 1 order by t.n_name" );
 
-    test("select n.n_regionkey, count(*) as cnt from ( select * from ( select * from cp.`tpch/nation.parquet`) t where t.n_nationkey < 10 ) n where n.n_nationkey >1 group by n.n_regionkey order by n.n_regionkey ; ");
+    test("select n.n_regionkey, count(*) as cnt from " +
+         "( select * from ( select * from cp.`tpch/nation.parquet`) t where t.n_nationkey < 10 ) n " +
+         "where n.n_nationkey >1 group by n.n_regionkey order by n.n_regionkey ; ");
 
-    test("select t.n_regionkey, count(*) as cnt from (select * from cp.`tpch/nation.parquet`) t where t.n_nationkey > 1 group by t.n_regionkey order by t.n_regionkey;" );
+    test("select t.n_regionkey, count(*) as cnt from (select * from cp.`tpch/nation.parquet`) t " +
+         "where t.n_nationkey > 1 group by t.n_regionkey order by t.n_regionkey;" );
   }
 
   @Test  // Select * in SubQuery : regular columns appear in select clause, where, group by, order by.
@@ -384,7 +460,6 @@ public class TestStarQueries extends BaseTestQuery {
          " where x.n_nationkey > 5 \n" +
          " group by x.n_regionkey \n" +
          " order by cnt limit 5; ");
-
   }
 
   @Test // DRILL-595 : Join two CTE, each having select * : regular columns appear in the select , where and on clause, group by, order by.
@@ -411,19 +486,22 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .ordered()
         .sqlQuery("select *  from cp.`tpch/nation.parquet` order by substr(n_name, 2, 5) limit 3")
-        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey from cp.`tpch/nation.parquet` order by substr(n_name, 2, 5) limit 3 ")
+        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey " +
+                          "from cp.`tpch/nation.parquet` order by substr(n_name, 2, 5) limit 3 ")
         .build().run();
 
     testBuilder()
         .ordered()
         .sqlQuery("select *, n_nationkey + 5 as myexpr from cp.`tpch/nation.parquet` limit 3")
-        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey, n_nationkey + 5 as myexpr from cp.`tpch/nation.parquet` order by n_nationkey limit 3")
+        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey, n_nationkey + 5 as myexpr " +
+                          "from cp.`tpch/nation.parquet` order by n_nationkey limit 3")
         .build().run();
 
     testBuilder()
         .ordered()
         .sqlQuery("select *  from cp.`tpch/nation.parquet` where n_nationkey + 5 > 10 limit 3")
-        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey  from cp.`tpch/nation.parquet` where n_nationkey + 5 > 10 order by n_nationkey limit 3")
+        .sqlBaselineQuery("select n_comment, n_name, n_nationkey, n_regionkey  from cp.`tpch/nation.parquet` " +
+                          "where n_nationkey + 5 > 10 order by n_nationkey limit 3")
         .build().run();
   }
 
@@ -435,7 +513,7 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
     .sqlQuery("select * from dfs.`multilevel/parquet` where dir0=1994 and dir1='Q1' order by dir0 limit 1")
     .ordered()
-    .baselineColumns("dir0", "dir1", "o_clerk", "o_comment", "o_custkey", "o_orderdate", "o_orderkey",  "o_orderpriority", "o_orderstatus", "o_shippriority",  "o_totalprice")
+    .baselineColumns("dir0", "dir1", "o_clerk", "o_comment", "o_custkey", "o_orderdate", "o_orderkey", "o_orderpriority", "o_orderstatus", "o_shippriority",  "o_totalprice")
     .baselineValues("1994", "Q1", "Clerk#000000743", "y pending requests integrate", 1292, mydate, 66, "5-LOW", "F",  0, 104190.66)
     .build().run();
   }
@@ -446,14 +524,16 @@ public class TestStarQueries extends BaseTestQuery {
     testBuilder()
         .unOrdered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` where n_regionkey in (select r_regionkey from cp.`tpch/region.parquet`)")
-        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` where n_regionkey in (select r_regionkey from cp.`tpch/region.parquet`)")
+        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` " +
+                          "where n_regionkey in (select r_regionkey from cp.`tpch/region.parquet`)")
         .build().run();
 
     // multiple columns in "IN" subquery predicates.
     testBuilder()
         .unOrdered()
         .sqlQuery("select * from cp.`tpch/nation.parquet` where (n_nationkey, n_name) in ( select n_nationkey, n_name from cp.`tpch/nation.parquet`)")
-        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` where (n_nationkey, n_name) in ( select n_nationkey, n_name from cp.`tpch/nation.parquet`)")
+        .sqlBaselineQuery("select n_nationkey, n_name, n_regionkey, n_comment from cp.`tpch/nation.parquet` " +
+                          "where (n_nationkey, n_name) in ( select n_nationkey, n_name from cp.`tpch/nation.parquet`)")
         .build().run();
 
     // Multiple in subquery predicates.
@@ -558,5 +638,4 @@ public class TestStarQueries extends BaseTestQuery {
         .build()
         .run();
   }
-
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
index 58e2e80a31..97a7a7ddac 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
@@ -117,6 +117,17 @@ public class TestEmptyInputSql extends BaseTestQuery {
 
   @Test
   public void testQueryMapArrayEmptyJson() throws Exception {
+    try {
+      enableV2Reader(false);
+      doTestQueryMapArrayEmptyJson();
+      enableV2Reader(true);
+      doTestQueryMapArrayEmptyJson();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  private void doTestQueryMapArrayEmptyJson() throws Exception {
     SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("col1", TypeProtos.MinorType.INT)
         .addNullable("col2", TypeProtos.MinorType.INT)
@@ -132,6 +143,14 @@ public class TestEmptyInputSql extends BaseTestQuery {
         .run();
   }
 
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
   /**
    * Test with query against an empty file. Select clause has three expressions.
    * 1.0 + 100.0 as constant expression, is resolved to required FLOAT8/VARDECIMAL
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
index 83cc81f032..3870c9d657 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/impl/TestTypeFns.java
@@ -307,19 +307,50 @@ public class TestTypeFns extends ClusterTest {
         .go();
   }
 
+  /**
+   * The V1 JSON reader appears to omit missing columns.
+   */
   @Test
-  public void testTypeOfWithFile() throws Exception {
-    // Column `x` does not actually appear in the file.
-    String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
-                "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
-                "       typeof(x) AS x_t\n" +
-                "FROM cp.`jsoninput/allTypes.json`";
-     testBuilder()
-      .sqlQuery(sql)
-      .ordered()
-      .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t", "x_t")
-      .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "NULL", "NULL")
-      .go();
+  public void testTypeOfWithFileV1() throws Exception {
+    try {
+      enableV2Reader(false);
+      // Column `x` does not actually appear in the file.
+      String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
+                  "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
+                  "       typeof(x) AS x_t\n" +
+                  "FROM cp.`jsoninput/allTypes.json`";
+       testBuilder()
+        .sqlQuery(sql)
+        .ordered()
+        .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t", "x_t")
+        .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "NULL", "NULL")
+        .go();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  /**
+   * The V2 JSON reader fills in missing columns with a nullable VARCHAR.
+   */
+  @Test
+  public void testTypeOfWithFileV2() throws Exception {
+    try {
+      enableV2Reader(true);
+      // Column `x` does not actually appear in the file.
+      String sql ="SELECT typeof(bi) AS bi_t, typeof(fl) AS fl_t, typeof(st) AS st_t,\n" +
+                  "       typeof(mp) AS mp_t, typeof(ar) AS ar_t, typeof(nu) AS nu_t,\n" +
+                  "       typeof(x) AS x_t\n" +
+                  "FROM cp.`jsoninput/allTypes.json`";
+       testBuilder()
+        .sqlQuery(sql)
+        .ordered()
+        .baselineColumns("bi_t",   "fl_t",   "st_t",    "mp_t", "ar_t",   "nu_t",    "x_t")
+        .baselineValues( "BIGINT", "FLOAT8", "VARCHAR", "MAP",  "BIGINT", "VARCHAR", "VARCHAR")
+        .go();
+    } finally {
+      resetV2Reader();
+    }
   }
 
   @Test
@@ -345,4 +376,12 @@ public class TestTypeFns extends ClusterTest {
       client.resetSession(ExecConstants.ENABLE_UNION_TYPE_KEY);
     }
   }
+
+  private void enableV2Reader(boolean enable) throws Exception {
+    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
index 63cf2b2bec..16c42ad0d2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
@@ -27,20 +27,19 @@ import static org.junit.Assert.fail;
 
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.config.Limit;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
 import org.apache.drill.exec.proto.UserBitShared.NamePart;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
-import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch.IterOutcome;
 import org.apache.drill.exec.record.TypedFieldId;
@@ -51,6 +50,7 @@ import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.VarCharVector;
 import org.apache.drill.test.SubOperatorTest;
+import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
@@ -60,8 +60,7 @@ import org.slf4j.LoggerFactory;
  * Test the implementation of the Drill Volcano iterator protocol that
  * wraps the modular operator implementation.
  */
-
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestOperatorRecordBatch extends SubOperatorTest {
   private static final Logger logger = LoggerFactory.getLogger(TestOperatorRecordBatch.class);
 
@@ -70,7 +69,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * and provides a light-weight vector container. Returns a
    * defined number of (batches) with an optional schema change.
    */
-
   private class MockOperatorExec implements OperatorExec {
 
     public boolean bindCalled;
@@ -117,11 +115,11 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
         return false;
       }
       if (nextCount == schemaChangeAt) {
-        BatchSchemaBuilder newSchema = new BatchSchemaBuilder(batchAccessor.schema());
-        newSchema.schemaBuilder()
-            .add("b", MinorType.VARCHAR);
-        VectorContainer newContainer = new VectorContainer(fixture.allocator(), newSchema.build());
-        batchAccessor.addBatch(newContainer);
+        VectorContainer container =  batchAccessor.container();
+        container.addOrGet(
+            MaterializedField.create("b", Types.required(MinorType.VARCHAR)));
+        container.buildSchema(SelectionVectorMode.NONE);
+        batchAccessor.addBatch(container);
       }
       return true;
     }
@@ -137,11 +135,11 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   }
 
   private static VectorContainer mockBatch() {
-    SchemaBuilder schemaBuilder = new SchemaBuilder()
-      .add("a", MinorType.INT);
-    VectorContainer container = new VectorContainer(fixture.allocator(), new BatchSchemaBuilder()
-        .withSchemaBuilder(schemaBuilder)
-        .build());
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .build();
+    VectorContainer container = new VectorContainer(fixture.allocator());
+    container.addOrGet(schema.column(0));
     container.buildSchema(SelectionVectorMode.NONE);
     return container;
   }
@@ -155,7 +153,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Simulate a normal run: return some batches, encounter a schema change.
    */
-
   @Test
   public void testNormalLifeCycle() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -167,24 +164,20 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       assertNotNull(opBatch.getContext());
 
       // First call to next() builds schema
-
       assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
       assertTrue(opExec.bindCalled);
       assertTrue(opExec.buildSchemaCalled);
       assertEquals(0, opExec.nextCount);
 
       // Second call returns the first batch
-
       assertEquals(IterOutcome.OK, opBatch.next());
       assertEquals(1, opExec.nextCount);
 
       // Third call causes a schema change
-
       assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
       assertEquals(2, opExec.nextCount);
 
       // Fourth call reaches EOF
-
       assertEquals(IterOutcome.NONE, opBatch.next());
       assertEquals(3, opExec.nextCount);
 
@@ -201,7 +194,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate a truncated life cycle: next() is never called. Not a valid part
    * of the protocol; but should be ready anyway.
    */
-
   @Test
   public void testTruncatedLifeCycle() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -218,7 +210,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Simulate reaching EOF when trying to create the schema.
    */
-
   @Test
   public void testSchemaEOF() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -237,7 +228,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate reaching EOF on the first batch. This simulated data source
    * discovered a schema, but had no data.
    */
-
   @Test
   public void testFirstBatchEOF() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -257,7 +247,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Simulate the caller failing the operator before getting the schema.
    */
-
   @Test
   public void testFailEarly() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -298,7 +287,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate the caller failing the operator after EOF but before close.
    * This is a silly time to fail, but have to handle it anyway.
    */
-
   @Test
   public void testFailBeforeClose() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -312,7 +300,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       opBatch.cancel();
 
       // Already hit EOF, so fail won't be passed along.
-
       assertFalse(opExec.cancelCalled);
     } catch (Exception e) {
       fail();
@@ -324,7 +311,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Simulate the caller failing the operator after close.
    * This is violates the operator protocol, but have to handle it anyway.
    */
-
   @Test
   public void testFailAfterClose() {
     MockOperatorExec opExec = new MockOperatorExec();
@@ -351,14 +337,11 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * container operations. Probably an artifact of its history. In any event, make
    * sure those methods are passed through to the container accessor.
    */
-
   @Test
   public void testBatchAccessor() {
-    SchemaBuilder schemaBuilder = new SchemaBuilder()
-      .add("a", MinorType.INT)
-      .add("b", MinorType.VARCHAR);
-    BatchSchema schema = new BatchSchemaBuilder()
-        .withSchemaBuilder(schemaBuilder)
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.INT)
+        .add("b", MinorType.VARCHAR)
         .build();
     SingleRowSet rs = fixture.rowSetBuilder(schema)
         .addRow(10, "fred")
@@ -369,7 +352,7 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
     try (OperatorRecordBatch opBatch = makeOpBatch(opExec)) {
       assertEquals(IterOutcome.OK_NEW_SCHEMA, opBatch.next());
-      assertEquals(schema, opBatch.getSchema());
+      RowSetUtilities.assertSchemasEqual(schema, opBatch.getSchema());
       assertEquals(2, opBatch.getRecordCount());
       assertSame(rs.container(), opBatch.getOutgoingContainer());
 
@@ -379,7 +362,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
       // Not a full test of the schema path; just make sure that the
       // pass-through to the Vector Container works.
-
       SchemaPath path = SchemaPath.create(NamePart.newBuilder().setName("a").build());
       TypedFieldId id = opBatch.getValueVectorId(path);
       assertEquals(MinorType.INT, id.getFinalType().getMinorType());
@@ -393,7 +375,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       assertEquals(1, id.getFieldIds()[0]);
 
       // Sanity check of getValueAccessorById()
-
       VectorWrapper<?> w = opBatch.getValueAccessorById(IntVector.class, 0);
       assertNotNull(w);
       assertEquals("a", w.getValueVector().getField().getName());
@@ -404,7 +385,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
       // getWritableBatch() ?
 
       // No selection vectors
-
       try {
         opBatch.getSelectionVector2();
         fail();
@@ -439,7 +419,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     int schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // Be tidy: start at 1.
-
     assertEquals(1, schemaVersion);
 
     // Changing data does not trigger schema change
@@ -449,7 +428,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     assertEquals(schemaVersion, opExec.batchAccessor().schemaVersion());
 
     // Different container, same vectors, does not trigger a change
-
     VectorContainer c2 = new VectorContainer(fixture.allocator());
     for (VectorWrapper<?> vw : container) {
       c2.add(vw.getValueVector());
@@ -463,7 +441,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
     // Replacing a vector with another of the same type does trigger
     // a change.
-
     VectorContainer c3 = new VectorContainer(fixture.allocator());
     c3.add(container.getValueVector(0).getValueVector());
     c3.add(TypeHelper.getNewVector(
@@ -475,12 +452,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // No change if same schema again
-
     opExec.batchAccessor.addBatch(c3);
     assertEquals(schemaVersion, opExec.batchAccessor().schemaVersion());
 
     // Adding a vector triggers a change
-
     MaterializedField c = SchemaBuilder.columnSchema("c", MinorType.INT, DataMode.OPTIONAL);
     c3.add(TypeHelper.getNewVector(c, fixture.allocator(), null));
     c3.buildSchema(SelectionVectorMode.NONE);
@@ -489,12 +464,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // No change if same schema again
-
     opExec.batchAccessor.addBatch(c3);
     assertEquals(schemaVersion, opExec.batchAccessor().schemaVersion());
 
     // Removing a vector triggers a change
-
     c3.remove(c3.getValueVector(2).getValueVector());
     c3.buildSchema(SelectionVectorMode.NONE);
     assertEquals(2, c3.getNumberOfColumns());
@@ -503,7 +476,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     schemaVersion = opExec.batchAccessor().schemaVersion();
 
     // Clean up
-
     opExec.close();
     c2.clear();
     c3.clear();
@@ -512,7 +484,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Test that an SV2 is properly handled by the proper container accessor.
    */
-
   @Test
   public void testSv2() {
     TupleMetadata schema = new SchemaBuilder()
@@ -542,7 +513,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
     assertTrue(opExec.closeCalled);
 
     // Must release SV2
-
     rs.clear();
   }
 
@@ -559,7 +529,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Failure on the bind method.
    */
-
   @Test
   public void testWrappedExceptionOnBind() {
     MockOperatorExec opExec = new MockOperatorExec() {
@@ -654,7 +623,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
    * Failure on the second or subsequent calls to next(), when actually
    * fetching a record batch.
    */
-
   @Test
   public void testWrappedExceptionOnNext() {
     MockOperatorExec opExec = new MockOperatorExec() {
@@ -704,7 +672,6 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   /**
    * Failure when closing the operator implementation.
    */
-
   @Test
   public void testWrappedExceptionOnClose() {
     MockOperatorExec opExec = new MockOperatorExec() {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java
index 6009ed3c51..9346aaea90 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArray.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.MockScanBuilder;
@@ -50,7 +50,7 @@ import org.junit.experimental.categories.Category;
  * Test the "columns" array mechanism integrated with the scan schema
  * orchestrator including simulating reading data.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnsArray extends SubOperatorTest {
 
   private static class MockScanner {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java
index 35cc9d1e41..90ed824b57 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayFramework.java
@@ -21,7 +21,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -55,7 +55,7 @@ import static org.junit.Assert.assertTrue;
 /**
  * Test the columns-array specific behavior in the columns scan framework.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnsArrayFramework extends SubOperatorTest {
 
   private static final Path MOCK_FILE_PATH = new Path("file:/w/x/y/z.csv");
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java
index 809c8ea363..835b0185fd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestColumnsArrayParser.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.physical.impl.scan.columns.ColumnsArrayParser;
@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnsArrayParser extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
index 2efbe2af30..9ffda169b0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
@@ -28,7 +28,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -64,7 +64,7 @@ import org.junit.experimental.categories.Category;
  * Focuses on the file metadata itself, assumes that other tests have
  * verified the underlying mechanisms.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestFileScanFramework extends SubOperatorTest {
 
   private static final String MOCK_FILE_NAME = "foo.csv";
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java
index b4971ccf90..cb8c8887d4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnParser.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.physical.impl.scan.file.FileMetadataColumn;
 import org.apache.drill.exec.physical.impl.scan.file.ImplicitColumnManager;
@@ -40,7 +40,7 @@ import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestImplicitColumnParser extends SubOperatorTest {
 
   private ImplicitColumnOptions standardOptions(Path filePath) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java
index e511e8e0c5..62bfd96498 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestImplicitColumnProjection.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.file.FileMetadata;
@@ -52,7 +52,7 @@ import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestImplicitColumnProjection extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
index fd53d6fe8c..95c0ddbcbd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.scan;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.base.AbstractSubScan;
@@ -44,7 +44,7 @@ import io.netty.buffer.DrillBuf;
  * set follows the same semantics as the original set.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanBatchWriters extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java
index 938f3848e2..a67a79c3f9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecBasics.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -36,7 +36,7 @@ import org.junit.experimental.categories.Category;
  * Tests the basics of the scan operator protocol: error conditions,
  * etc.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecBasics extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java
index b50394dfa0..ff21a58172 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecEarlySchema.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -35,7 +35,7 @@ import org.junit.experimental.categories.Category;
  * Test "early schema" readers: those that can declare a schema at
  * open time.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecEarlySchema extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java
index 500b343c1c..918a457288 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecLateSchema.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -40,7 +40,7 @@ import org.junit.experimental.categories.Category;
  * Test "late schema" readers: those like JSON that discover their schema
  * as they read data.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecLateSchema extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java
index 87ea958492..68cd0044a8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOuputSchema.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.impl.scan;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
@@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
  * defines the schema to be output from the scan operator, and forces
  * conversions between reader and output data types.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecOuputSchema extends BaseScanOperatorExecTest {
 
   private static class MockSimpleReader implements ManagedReader<SchemaNegotiator> {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java
index 21db742b11..b11b3c1ed6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecOverflow.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
@@ -37,7 +37,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test vector overflow in the context of the scan operator.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecOverflow extends BaseScanOperatorExecTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
index 86899da507..0a9aa1fc19 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
@@ -40,7 +40,7 @@ import org.junit.experimental.categories.Category;
  * context of a single scan operator: it cannot help when a query has
  * multiple scans, each in its own fragment.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOperExecSmoothing extends BaseScanOperatorExecTest {
 
   private static class MockEarlySchemaReader2 extends MockEarlySchemaReader {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
index f7075fffac..7bc00722cc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -53,7 +53,7 @@ import org.junit.experimental.categories.Category;
  * The tests here focus on the scan orchestrator itself; the tests assume
  * that tests for lower-level components have already passed.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java
index 36fa31a058..7c7573ca9d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorImplicitColumns.java
@@ -25,7 +25,7 @@ import java.io.IOException;
 import java.nio.file.Paths;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -59,7 +59,7 @@ import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
  * with implicit file columns provided by the file metadata manager.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOrchestratorImplicitColumns extends SubOperatorTest {
 
   private ImplicitColumnOptions standardOptions(Path filePath) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
index a57f86f9b6..b703f77310 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.impl.scan;
 
 import static org.junit.Assert.assertFalse;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.MockScanBuilder;
@@ -47,7 +47,7 @@ import org.junit.experimental.categories.Category;
  * that tests for lower-level components have already passed.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScanOrchestratorLateSchema extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java
index 969d81ef0c..88fdc41f86 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/convert/TestColumnConverter.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.impl.scan.convert;
 
 import static org.apache.drill.test.rowSet.RowSetUtilities.intArray;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetTestUtils;
@@ -43,7 +43,7 @@ import org.junit.experimental.categories.Category;
  * Not really much to test, more a verification that the pattern works
  * in practice.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestColumnConverter extends SubOperatorTest {
 
   private static class MockSource {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
index 9d91cd7851..3be0b4bf49 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.impl.scan.project;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -48,7 +48,7 @@ import org.junit.experimental.categories.Category;
  * values. The ConstantColumnLoader builds and populates these columns.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestConstantColumnLoader extends SubOperatorTest {
 
   private static class DummyColumn implements ConstantColumnSpec {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
index 882e2169f8..b7d01c895e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestNullColumnLoader.java
@@ -25,7 +25,7 @@ import java.util.List;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBuilderBuilder;
 import org.apache.drill.exec.physical.resultSet.ResultVectorCache;
@@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category;
  * can create the classic nullable Int null column, or one of
  * any other type and mode.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestNullColumnLoader extends SubOperatorTest {
 
   private ResolvedNullColumn makeNullCol(String name, MajorType nullType) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
index db183dee5e..e0cccb0b71 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestReaderLevelProjection.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * combines these to map out the actual projection.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestReaderLevelProjection extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
index 1bf35b3a7a..e3d5a70b49 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
@@ -41,7 +41,7 @@ import io.netty.buffer.DrillBuf;
 import static org.apache.drill.test.rowSet.RowSetUtilities.mapValue;
 import static org.apache.drill.test.rowSet.RowSetUtilities.singleMap;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 
 import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
 
@@ -55,7 +55,7 @@ import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
  * vector.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRowBatchMerger extends SubOperatorTest {
 
   public static class RowSetSource implements VectorSource {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java
index 842f18e0c9..7b251ceef9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestSchemaSmoothing.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.protocol.SchemaTracker;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils;
@@ -87,7 +87,7 @@ import org.junit.experimental.categories.Category;
  * because such an algorithm would require time-travel: looking into
  * the future to know what data will be scanned.)
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSchemaSmoothing extends SubOperatorTest {
 
   private ImplicitColumnOptions standardOptions(List<Path> files) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
index a30bda94a1..87e3153a6b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
@@ -26,7 +26,7 @@ import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -41,7 +41,7 @@ import org.apache.drill.test.SubOperatorTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestBatchValidator extends SubOperatorTest {
 
   public static class CapturingReporter implements BatchValidator.ErrorReporter {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java
index 9f3e71f1fb..5cf72660b0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDictArray.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.Arrays;
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.validate.BatchValidator;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test dict array support in the result set loader.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderDictArray extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java
index 3fb523116d..ccf2b07faf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderDicts.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.validate.BatchValidator;
@@ -53,7 +53,7 @@ import java.util.Arrays;
 /**
  * Test (non-array) dict support in the result set loader and related classes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderDicts extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java
index ef18b70628..86a25f2cbc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderEmptyProject.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -41,7 +41,7 @@ import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderEmptyProject extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java
index 49ff638bc3..16b69f2de0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderLimits.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * in fact, depend on the row count) and vector overflow (which an occur when
  * the row limit turns out to be too large.)
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderLimits extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java
index 5e607f6c4b..41b82f8b58 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMapArray.java
@@ -28,7 +28,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.Arrays;
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -60,7 +60,7 @@ import org.junit.experimental.categories.Category;
  * tests work. Maps, and especially repeated maps, are very complex
  * constructs not to be tackled lightly.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderMapArray extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java
index e869c7c9f6..a3628ef70d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderMaps.java
@@ -28,7 +28,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -56,7 +56,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test (non-array) map support in the result set loader and related classes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderMaps extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java
index 6d7f09252f..e0add3b645 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOmittedValues.java
@@ -23,8 +23,9 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.physical.impl.validate.BatchValidator;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
 import org.apache.drill.exec.physical.resultSet.RowSetLoader;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
@@ -40,7 +41,7 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
 
   /**
@@ -137,7 +138,6 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
     // Harvest the row and verify.
 
     RowSet actual = fixture.wrap(rsLoader.harvest());
-//    actual.print();
 
     TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
@@ -210,6 +210,7 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
     // Harvest and verify
 
     RowSet result = fixture.wrap(rsLoader.harvest());
+    BatchValidator.validate(result);
     assertEquals(rowNumber - 1, result.rowCount());
     RowSetReader reader = result.reader();
     int rowIndex = 0;
@@ -248,9 +249,9 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
     // Verify that holes were preserved.
 
     result = fixture.wrap(rsLoader.harvest());
+    BatchValidator.validate(result);
     assertEquals(rowNumber, rsLoader.totalRowCount());
     assertEquals(rowNumber - startRowNumber + 1, result.rowCount());
-//    result.print();
     reader = result.reader();
     rowIndex = 0;
     while (reader.next()) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java
index a7eea4d418..143ef0c254 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderOverflow.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -51,7 +51,7 @@ import org.junit.experimental.categories.Category;
  * Exercise the vector overflow functionality for the result set loader.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderOverflow extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
index c68bdae69b..14d5bc6e16 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderProtocol.java
@@ -35,7 +35,7 @@ import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -52,6 +52,9 @@ import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.exec.vector.accessor.TupleWriter.UndefinedColumnException;
 import org.apache.drill.test.SubOperatorTest;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
+import org.apache.drill.exec.physical.rowSet.RowSetReader;
 import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -77,7 +80,7 @@ import org.junit.experimental.categories.Category;
  * the structure. The object tree will show all the components and their
  * current state.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderProtocol extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java
index b4c1fea76a..59696cf5e1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderRepeatedList.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.resultSet.impl;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -64,7 +64,7 @@ import static org.junit.Assert.assertTrue;
  * actually, since the different "slices" need not have the same length...)
  * Repeated lists appear to be used only by JSON.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderRepeatedList extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java
index 8e90e1dbb6..d5c404dc69 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderTorture.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
 import org.apache.drill.exec.physical.resultSet.RowSetLoader;
@@ -65,7 +65,7 @@ import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
  * things in a single query.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderTorture extends SubOperatorTest {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestResultSetLoaderTorture.class);
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java
index 7479f15d46..d63a3f8c9a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetLoaderUnions.java
@@ -33,7 +33,7 @@ import java.util.Arrays;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
 import org.apache.drill.exec.physical.resultSet.RowSetLoader;
@@ -76,7 +76,7 @@ import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
  * Most operators do not support them. But, JSON uses them, so they must
  * be made to work in the result set loader layer.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetLoaderUnions extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java
index 250303ab68..4f53227dae 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultSetSchemaChange.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.resultSet.ResultSetLoader;
@@ -43,7 +43,7 @@ import org.apache.drill.test.rowSet.RowSetUtilities;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultSetSchemaChange extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java
index a44d0bef9e..ec58fc4d8a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/impl/TestResultVectorCache.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNotSame;
 import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -35,7 +35,7 @@ import org.apache.drill.test.SubOperatorTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestResultVectorCache extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java
index 10fc958e53..8484e29137 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/resultSet/project/TestTupleProjection.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * parsing; the only bits not tested here is that which is
  * inherently specific to some use case.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestTupleProjection extends BaseTest {
 
   private static final ColumnMetadata NORMAL_COLUMN =
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java
index ca2a62116f..ac87ae3171 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestDummyWriter.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertFalse;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
@@ -40,7 +40,7 @@ import org.apache.drill.test.SubOperatorTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestDummyWriter extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java
index b918b3cd22..03485434ed 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFillEmpties.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category;
  * Note that this test also has the handy side-effect of testing
  * null handling in the accessor classes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestFillEmpties extends SubOperatorTest {
 
   public static final int ROW_COUNT = 1000;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java
index c74d526fad..76e7a30854 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestFixedWidthWriter.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.MaterializedField;
@@ -42,7 +42,7 @@ import org.junit.experimental.categories.Category;
  * overflow, and filling in empty values.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestFixedWidthWriter extends SubOperatorTest {
 
   public static class TestIndex implements ColumnWriterIndex {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java
index b765b55aef..5a1e6cdc03 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestHyperVectorReaders.java
@@ -25,7 +25,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet.HyperRowSet;
@@ -48,7 +48,7 @@ import org.junit.experimental.categories.Category;
  * <p>
  * This test does not cover repeated vectors; those tests should be added.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestHyperVectorReaders extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java
index 887a05b0b9..e4fbf9996a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestIndirectReaders.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.physical.rowSet;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -44,7 +44,7 @@ import org.junit.experimental.categories.Category;
  * so if the index works for one reader, it will for for all.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestIndirectReaders extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java
index 4057f3365a..e8d40d7359 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestMapAccessors.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.Iterator;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
 import org.apache.drill.exec.record.MaterializedField;
@@ -58,7 +58,7 @@ import org.junit.experimental.categories.Category;
  * schema, which makes this mechanism far simpler.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestMapAccessors extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java
index 8fce484661..a4884a1455 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestOffsetVectorWriter.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.TestFixedWidthWriter.TestIndex;
@@ -48,7 +48,7 @@ import org.junit.experimental.categories.Category;
  * counts.)
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestOffsetVectorWriter extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java
index 9fbaa1444e..9a17276e96 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRepeatedListAccessors.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -63,7 +63,7 @@ import org.junit.experimental.categories.Category;
  * on to the result set loader tests.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRepeatedListAccessors extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java
index 987bd02c85..1ddfe3cb59 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestRowSet.java
@@ -32,7 +32,7 @@ import java.math.BigDecimal;
 import java.util.Arrays;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet;
@@ -80,7 +80,7 @@ import org.junit.experimental.categories.Category;
  * A list is an array of variants. Variants are tested elsewhere.
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRowSet extends SubOperatorTest {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestRowSet.class);
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java
index c1e803bf9c..6531e95a97 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestScalarAccessors.java
@@ -33,7 +33,7 @@ import java.time.LocalTime;
 import java.time.ZoneOffset;
 import java.util.Arrays;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
@@ -77,7 +77,7 @@ import org.junit.experimental.categories.Category;
 // TODO: Var16Char
 // TODO: Bit
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestScalarAccessors extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java
index e754db5889..0e104b161c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestSchemaBuilder.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
@@ -46,7 +46,7 @@ import org.junit.experimental.categories.Category;
  * lists and repeated lists. This test verifies that it assembles the various
  * pieces correctly for the various nesting combinations.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSchemaBuilder extends DrillTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java
index 69190d219c..b66376a12a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariableWidthWriter.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.TestFixedWidthWriter.TestIndex;
@@ -38,7 +38,7 @@ import org.bouncycastle.util.Arrays;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestVariableWidthWriter extends SubOperatorTest {
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java
index a0dd8ad066..2e7e59824e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/TestVariantAccessors.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.List;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
@@ -67,7 +67,7 @@ import org.junit.experimental.categories.Category;
  * and other operators. Some assembly required for future use.)
  */
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestVariantAccessors extends SubOperatorTest {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
index a0292d6961..2d7a4f6e44 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestAnalyze.java
@@ -152,25 +152,28 @@ public class TestAnalyze extends ClusterTest {
 
   @Test
   public void testAnalyzeSupportedFormats() throws Exception {
-    //Only allow computing statistics on PARQUET files.
+    // Only allow computing statistics on PARQUET files.
     try {
       client.alterSession(ExecConstants.SLICE_TARGET, 1);
       client.alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "json");
       run("CREATE TABLE dfs.tmp.employee_basic4 AS SELECT * from cp.`employee.json`");
-      //Should display not supported
+      // Should display not supported
       verifyAnalyzeOutput("ANALYZE TABLE dfs.tmp.employee_basic4 COMPUTE STATISTICS",
           "Table employee_basic4 is not supported by ANALYZE. "
           + "Support is currently limited to directory-based Parquet tables.");
 
+      // See DRILL-7522
+      client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, false);
       run("DROP TABLE dfs.tmp.employee_basic4");
       client.alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "parquet");
       run("CREATE TABLE dfs.tmp.employee_basic4 AS SELECT * from cp.`employee.json`");
-      //Should complete successfully (16 columns in employee.json)
+      // Should complete successfully (16 columns in employee.json)
       verifyAnalyzeOutput("ANALYZE TABLE dfs.tmp.employee_basic4 COMPUTE STATISTICS",
           "16");
     } finally {
       client.resetSession(ExecConstants.SLICE_TARGET);
       client.resetSession(ExecConstants.OUTPUT_FORMAT_OPTION);
+      client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
     }
   }
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/BaseTestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/BaseTestJsonReader.java
new file mode 100644
index 0000000000..d7be5e8aeb
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/BaseTestJsonReader.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import static org.junit.Assert.fail;
+
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.rpc.RpcException;
+import org.apache.drill.test.ClusterTest;
+
+public class BaseTestJsonReader extends ClusterTest {
+
+  protected void enableV2Reader(boolean enable) throws Exception {
+    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  protected void resetV2Reader() throws Exception {
+    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
+  protected interface TestWrapper {
+    void apply() throws Exception;
+  }
+
+  protected void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  protected RowSet runTest(String sql) {
+    try {
+      return client.queryBuilder().sql(sql).rowSet();
+    } catch (RpcException e) {
+      fail(e.getMessage());
+      throw new IllegalStateException(e);
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
index 1af8e1d413..19b0b7be72 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonModes.java
@@ -18,7 +18,7 @@
 
 package org.apache.drill.exec.store.json;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -39,7 +39,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestJsonModes extends ClusterTest {
 
   @BeforeClass
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
new file mode 100644
index 0000000000..5b2fb24741
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderFns.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import java.nio.file.Paths;
+
+import org.apache.drill.categories.RowSetTest;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.exec.vector.complex.writer.TestJsonReader;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.rowSet.RowSetComparison;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Tests of Drill selected Drill functions using JSON as an input source.
+ * (Split from the original <tt>TestJsonReader</tt>.) Relative to the Drill 1.12
+ * version, the tests here:
+ * <ul>
+ * <li>Are rewritten to use the {@link ClusterFixture} framework.</li>
+ * <li>Add data verification where missing.</li>
+ * <li>Clean up handling of session options.</li>
+ * </ul>
+ * When running tests, consider these to be secondary. First verify the core
+ * JSON reader itself (using {@link TestJsonReader}), then run these tests to
+ * ensure vectors populated by JSON work with downstream functions.
+ */
+@Category(RowSetTest.class)
+public class TestJsonReaderFns extends BaseTestJsonReader {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+    dirTestWatcher.copyResourceToRoot(Paths.get("store", "json"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
+  }
+
+  @Test
+  public void testEmptyList() throws Exception {
+    runBoth(() -> doTestEmptyList());
+  }
+
+  private void doTestEmptyList() throws Exception {
+    final String sql = "select count(a[0]) as ct from dfs.`store/json/emptyLists`";
+
+    final RowSet results = runTest(sql);
+    final TupleMetadata schema = new SchemaBuilder()
+        .add("ct", MinorType.BIGINT)
+        .build();
+
+    final RowSet expected = client.rowSetBuilder(schema)
+        .addRow(6L)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  // Expansion of former testRepeatedCount()
+
+  @Test
+  public void testRepeatedCountStr() throws Exception {
+    runBoth(() -> doTestRepeatedCountStr());
+  }
+
+  private void doTestRepeatedCountStr() throws Exception {
+    final RowSet results = runTest("select repeated_count(str_list) from cp.`store/json/json_basic_repeated_varchar.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(5)
+        .addSingleCol(1)
+        .addSingleCol(3)
+        .addSingleCol(1)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountInt() throws Exception {
+    runBoth(() -> doTestRepeatedCountInt());
+  }
+
+  private void doTestRepeatedCountInt() throws Exception {
+    final RowSet results = runTest("select repeated_count(INT_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(12)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountFloat4() throws Exception {
+    runBoth(() -> doTestRepeatedCountFloat4());
+  }
+
+  private void doTestRepeatedCountFloat4() throws Exception {
+    final RowSet results = runTest("select repeated_count(FLOAT4_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(7)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .addSingleCol(4)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountVarchar() throws Exception {
+    runBoth(() -> doTestRepeatedCountVarchar());
+  }
+
+  private void doTestRepeatedCountVarchar() throws Exception {
+    final RowSet results = runTest("select repeated_count(VARCHAR_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(4)
+        .addSingleCol(3)
+        .addSingleCol(3)
+        .addSingleCol(3)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedCountBit() throws Exception {
+    runBoth(() -> doTestRepeatedCountBit());
+  }
+
+  private void doTestRepeatedCountBit() throws Exception {
+    final RowSet results = runTest("select repeated_count(BIT_col) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(countSchema())
+        .addSingleCol(7)
+        .addSingleCol(7)
+        .addSingleCol(5)
+        .addSingleCol(3)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  private TupleMetadata countSchema() {
+    final TupleMetadata expectedSchema = new SchemaBuilder()
+        .add("EXPR$0", MinorType.INT)
+        .build();
+    return expectedSchema;
+  }
+
+
+  // Reimplementation of testRepeatedContains()
+
+  @Test
+  public void testRepeatedContainsStr() throws Exception {
+    runBoth(() -> doTestRepeatedContainsStr());
+  }
+
+  private void doTestRepeatedContainsStr() throws Exception {
+    final RowSet results = runTest("select repeated_contains(str_list, 'asdf') from cp.`store/json/json_basic_repeated_varchar.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(2) // WRONG! Should be 1 (true). See DRILL-6034
+        .addSingleCol(0)
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsInt() throws Exception {
+    runBoth(() -> doTestRepeatedContainsInt());
+  }
+
+  private void doTestRepeatedContainsInt() throws Exception {
+    final RowSet results = runTest("select repeated_contains(INT_col, -2147483648) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsFloat4() throws Exception {
+    runBoth(() -> doTestRepeatedContainsFloat4());
+  }
+
+  private void doTestRepeatedContainsFloat4() throws Exception {
+    final RowSet results = runTest("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsVarchar() throws Exception {
+    runBoth(() -> doTestRepeatedContainsVarchar());
+  }
+
+  private void doTestRepeatedContainsVarchar() throws Exception {
+    final RowSet results = runTest("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(1)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsBitTrue() throws Exception {
+    runBoth(() -> doTestRepeatedContainsBitTrue());
+  }
+
+  private void doTestRepeatedContainsBitTrue() throws Exception {
+    final RowSet results = runTest("select repeated_contains(BIT_col, true) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(11) // WRONG! Should be 1 (true). See DRILL-6034
+        .addSingleCol(2)
+        .addSingleCol(0)
+        .addSingleCol(3)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  @Test
+  public void testRepeatedContainsBitFalse() throws Exception {
+    runBoth(() -> doTestRepeatedContainsBitFalse());
+  }
+
+  private void doTestRepeatedContainsBitFalse() throws Exception {
+    final RowSet results = runTest("select repeated_contains(BIT_col, false) from cp.`parquet/alltypes_repeated.json`");
+    final RowSet expected = client.rowSetBuilder(bitCountSchema())
+        .addSingleCol(5) // WRONG! Should be 1 (true). See DRILL-6034
+        .addSingleCol(5)
+        .addSingleCol(5)
+        .addSingleCol(0)
+        .build();
+    new RowSetComparison(expected).verifyAndClearAll(results);
+  }
+
+  private TupleMetadata bitCountSchema() {
+    return new SchemaBuilder()
+        .add("EXPR$0", MinorType.BIT)
+        .buildSchema();
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
new file mode 100644
index 0000000000..3fcad38e00
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderQueries.java
@@ -0,0 +1,634 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import static org.apache.drill.test.TestBuilder.listOf;
+import static org.apache.drill.test.TestBuilder.mapOf;
+import static org.apache.drill.test.rowSet.RowSetUtilities.longArray;
+import static org.apache.drill.test.rowSet.RowSetUtilities.mapValue;
+import static org.apache.drill.test.rowSet.RowSetUtilities.singleMap;
+import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.nio.file.Paths;
+import java.util.zip.GZIPOutputStream;
+
+import org.apache.drill.categories.RowSetTest;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.util.DrillFileUtils;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.physical.rowSet.DirectRowSet;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
+import org.apache.drill.shaded.guava.com.google.common.io.Files;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.QueryBuilder.QuerySummary;
+import org.apache.drill.test.QueryResultSet;
+import org.apache.drill.test.rowSet.RowSetUtilities;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+/**
+ * Reimplementation of selected tests from the
+ * TestJsonReader test suite.
+ */
+
+@Category(RowSetTest.class)
+public class TestJsonReaderQueries extends BaseTestJsonReader {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+    dirTestWatcher.copyResourceToRoot(Paths.get("store", "json"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("jsoninput/drill_3353"));
+  }
+
+  /**
+   * Reimplementation of a Drill 1.12 unit test to actually verify results.
+   * Doing so is non-trivial as inline comments explain. This test shows the
+   * limits "schema-free" processing when the schema changes.
+   * @throws Exception
+   */
+
+  @Test
+  @Ignore("Too fragile to keep working")
+  public void schemaChange() throws Exception {
+    String sql = "select b from dfs.`vector/complex/writer/schemaChange/`";
+//    runAndPrint(sql);
+    QueryResultSet results = client.queryBuilder().sql(sql).resultSet();
+
+    // Query will scan two files:
+    // f1:
+    // {"a": "foo","b": null}
+    // {"a": "bar","b": null}
+    // f2:
+    // {"a": "foo2","b": null}
+    // {"a": "bar2","b": {"x":1, "y":2}}
+
+    // When f1 is read, we didn't know the type of b, so it will default to Varchar
+    // (Assuming text mode for that column.)
+    //
+    // On reading f2, we discover that b is a map (which we discover the
+    // second record.)
+    //
+    // The scanner handles schema persistence, but not (at present) for maps.
+    // If we did have schema persistence, then if f2 was first, we'd remember
+    // the map schema when we read f1.
+    //
+    // This crazy behavior is the best we can do without a schema. Bottom line:
+    // Drill needs a user-provided schema to make sense of these cases because
+    // "Drill can't predict the future" (TM).
+    //
+    // See TestCSV* for a way to implement this test case
+
+    TupleMetadata f2Schema = new SchemaBuilder()
+        .addMap("b")
+          .addNullable("x", MinorType.BIGINT)
+          .addNullable("y", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+    RowSet f2Expected = client.rowSetBuilder(f2Schema)
+        .addSingleCol(mapValue(null, null))
+        .addSingleCol(mapValue(1L, 2L))
+        .build();
+
+    TupleMetadata f1Schema = new SchemaBuilder()
+        .addNullable("b", MinorType.VARCHAR)
+        .build();
+    RowSet f1Expected = client.rowSetBuilder(f1Schema)
+        .addSingleCol(null)
+        .addSingleCol(null)
+        .build();
+
+    // First batch is empty; presents only schema. But,
+    // since file order is non-deterministic, we don't know
+    // which one.
+
+    RowSet batch = results.next();
+    assertNotNull(batch);
+    assertEquals(0, batch.rowCount());
+    boolean mapFirst;
+    if (batch.schema().metadata("b").type() == MinorType.MAP) {
+      RowSet expected = client.rowSetBuilder(f2Schema)
+          .build();
+      RowSetUtilities.verify(expected, batch);
+      mapFirst = true;
+    } else {
+      RowSet expected = client.rowSetBuilder(f1Schema)
+          .build();
+      RowSetUtilities.verify(expected, batch);
+      mapFirst = false;
+    }
+    for (int i = 0; i < 2; i++) {
+      batch = results.next();
+      assertNotNull(batch);
+      if (i == 0 && mapFirst || i == 1 && ! mapFirst) {
+        RowSetUtilities.verify(f2Expected, batch);
+      } else {
+        RowSetUtilities.verify(f1Expected, batch);
+      }
+    }
+    assertNull(results.next());
+    results.close();
+  }
+
+  /**
+   * Reimplementation of the Drill 1.12 test. Tests the odd case in which
+   * we project both a single column from inside a map, as well as the
+   * entire map.
+   *
+   *
+   * As it turns out, the original functionality
+   * was broken, that the test had incorrect expected results that reflected the broken
+   * functionality.
+   * <p>
+   * The query selects two fields which are deeply nested:
+   * <ul>
+   * <li><tt>t.field_4.inner_3</tt> where <tt>field_4</tt> is a map and
+   * <tt>inner_3</tt> is another map.</li>
+   * <li><tt>t.field_4</tt> is a map with three total items.</li>
+   * </ul>
+   * The original expected results
+   * @throws Exception
+   */
+
+  @Test
+  @Ignore("broken")
+  public void testFieldSelectionBug() throws Exception {
+    runBoth(() -> doTestFieldSelectionBug());
+  }
+
+  private void doTestFieldSelectionBug() throws Exception {
+    String sql = "select t.field_4.inner_3 as col_1, t.field_4 as col_2 from cp.`store/json/schema_change_int_to_string.json` t";
+    try {
+      client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+
+      testBuilder()
+          .sqlQuery(sql)
+          .unOrdered()
+          .baselineColumns("col_1", "col_2")
+          .baselineValues(
+              mapOf(),
+              mapOf(
+                  "inner_1", listOf(),
+                  "inner_3", mapOf()))
+          .baselineValues(
+              mapOf("inner_object_field_1", "2"),
+              mapOf(
+                  "inner_1", listOf("1", "2", "3"),
+                  "inner_2", "3",
+                  "inner_3", mapOf("inner_object_field_1", "2")))
+          .baselineValues(
+              mapOf(),
+              mapOf(
+                  "inner_1", listOf("4", "5", "6"),
+                  "inner_2", "3",
+                  "inner_3", mapOf()))
+          .go();
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  @Test
+  public void testReadCompressed() throws Exception {
+    runBoth(() -> doTestReadCompressed());
+  }
+
+  private void doTestReadCompressed() throws Exception {
+    String filepath = "compressed_json.json";
+    File f = new File(dirTestWatcher.getRootDir(), filepath);
+    PrintWriter out = new PrintWriter(f);
+    out.println("{\"a\" :5}");
+    out.close();
+
+    gzipIt(f);
+    testBuilder()
+        .sqlQuery("select * from dfs.`%s.gz`", filepath)
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues(5l)
+        .build().run();
+
+    // test reading the uncompressed version as well
+    testBuilder()
+        .sqlQuery("select * from dfs.`%s`", filepath)
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues(5l)
+        .build().run();
+  }
+
+  public static void gzipIt(File sourceFile) throws IOException {
+
+    // modified from: http://www.mkyong.com/java/how-to-compress-a-file-in-gzip-format/
+    byte[] buffer = new byte[1024];
+    GZIPOutputStream gzos =
+        new GZIPOutputStream(new FileOutputStream(sourceFile.getPath() + ".gz"));
+
+    FileInputStream in =
+        new FileInputStream(sourceFile);
+
+    int len;
+    while ((len = in.read(buffer)) > 0) {
+      gzos.write(buffer, 0, len);
+    }
+    in.close();
+    gzos.finish();
+    gzos.close();
+  }
+
+  @Test
+  public void testDrill_1419() throws Exception {
+    runBoth(() -> doTestDrill_1419());
+  }
+
+  private void doTestDrill_1419() throws Exception {
+    String sql = "select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`store/json/clicks.json` t limit 5";
+    RowSet results = client.queryBuilder().sql(sql).rowSet();
+    TupleMetadata schema = new SchemaBuilder()
+        .addNullable("trans_id", MinorType.BIGINT)
+        .addNullable("EXPR$1", MinorType.BIGINT)
+        .addNullable("EXPR$2", MinorType.BIGINT)
+        .build();
+
+    RowSet expected = client.rowSetBuilder(schema)
+        .addRow(31920L, 174L, 2L)
+        .addRow(31026L, null, null)
+        .addRow(33848L, 582L, null)
+        .addRow(32383L, 710L, 47L)
+        .addRow(32359L, 0L, 8L)
+        .build();
+    RowSetUtilities.verify(expected, results);
+  }
+
+  @Test
+  public void testSingleColumnRead_vector_fill_bug() throws Exception {
+    runBoth(() -> doTestSingleColumnRead_vector_fill_bug());
+  }
+
+  private void doTestSingleColumnRead_vector_fill_bug() throws Exception {
+    String sql = "select * from cp.`store/json/single_column_long_file.json`";
+    QuerySummary results = client.queryBuilder().sql(sql).run();
+    assertEquals(13_512, results.recordCount());
+  }
+
+  @Test
+  public void testNonExistentColumnReadAlone() throws Exception {
+    runBoth(() -> doTestNonExistentColumnReadAlone());
+  }
+
+  private void doTestNonExistentColumnReadAlone() throws Exception {
+    String sql = "select non_existent_column from cp.`store/json/single_column_long_file.json`";
+    QuerySummary results = client.queryBuilder().sql(sql).run();
+    assertEquals(13_512, results.recordCount());
+  }
+
+  @Test
+  public void testAllTextMode() throws Exception {
+    runBoth(() -> doTestAllTextMode());
+  }
+
+  private void doTestAllTextMode() throws Exception {
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+    try {
+      String sql = "select * from cp.`store/json/schema_change_int_to_string.json`";
+      QuerySummary results = client.queryBuilder().sql(sql).run();
+
+      // This is a pretty lame test as it does not verify results. However,
+      // enough other all-text mode tests do verify results. Here, we just
+      // make sure that the query does not die with a schema change exception.
+
+      assertEquals(3, results.recordCount());
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  private void testExistentColumns(RowSet result) throws SchemaChangeException {
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addArray("field_1", MinorType.BIGINT)
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .addMap("field_4")
+          .addArray("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = client.rowSetBuilder(expectedSchema)
+        .addRow(longArray(1L), mapValue(null, null), mapValue(longArray(), null))
+        .addRow(longArray(5L), mapValue(2L, null), mapValue(longArray(1L, 2L, 3L), 3L))
+        .addRow(longArray(5L, 10L, 15L), mapValue(5L, 3L), mapValue(longArray(4L, 5L, 6L), 3L))
+        .build();
+
+    RowSetUtilities.verify(expected, result);
+  }
+
+  @Test
+  public void readComplexWithStar() throws Exception {
+    runBoth(() -> doReadComplexWithStar());
+  }
+
+  private void doReadComplexWithStar() throws Exception {
+    RowSet results = runTest("select * from cp.`store/json/test_complex_read_with_star.json`");
+    testExistentColumns(results);
+  }
+
+  @Test
+  public void testNullWhereListExpectedNumeric() throws Exception {
+    runBoth(() -> doTestNullWhereListExpectedNumeric());
+  }
+
+  private void doTestNullWhereListExpectedNumeric() throws Exception {
+    String sql = "select * from cp.`store/json/null_where_list_expected.json`";
+    RowSet results = runTest(sql);
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addArray("list_1", MinorType.BIGINT)
+        .build();
+
+    RowSet expected = client.rowSetBuilder(expectedSchema)
+        .addSingleCol(longArray(1L, 2L, 3L))
+        .addSingleCol(longArray())
+        .addSingleCol(longArray(4L, 5L, 6L))
+        .build();
+
+    RowSetUtilities.verify(expected, results);
+  }
+
+  @Test
+  public void testNullWhereMapExpectedNumeric() throws Exception {
+    runBoth(() -> doTestNullWhereMapExpectedNumeric());
+  }
+
+  private void doTestNullWhereMapExpectedNumeric() throws Exception {
+    String sql = "select * from cp.`store/json/null_where_map_expected.json`";
+    RowSet results = runTest(sql);
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addMap("map_1")
+          .addNullable("f_1", MinorType.BIGINT)
+          .addNullable("f_2", MinorType.BIGINT)
+          .addNullable("f_3", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = client.rowSetBuilder(expectedSchema)
+        .addSingleCol(mapValue(1L, 2L, 3L))
+        .addSingleCol(mapValue(null, null, null))
+        .addSingleCol(mapValue(3L, 4L, 5L))
+        .build();
+
+    RowSetUtilities.verify(expected, results);
+  }
+
+  @Test
+  public void testNullWhereMapExpectedText() throws Exception {
+    runBoth(() -> doTestNullWhereMapExpectedText());
+  }
+
+  private void doTestNullWhereMapExpectedText() throws Exception {
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+    try {
+      String sql = "select * from cp.`store/json/null_where_map_expected.json`";
+      RowSet results = runTest(sql);
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .addMap("map_1")
+            .addNullable("f_1", MinorType.VARCHAR)
+            .addNullable("f_2", MinorType.VARCHAR)
+            .addNullable("f_3", MinorType.VARCHAR)
+            .resumeSchema()
+          .build();
+
+      RowSet expected = client.rowSetBuilder(expectedSchema)
+          .addSingleCol(mapValue("1", "2", "3"))
+          .addSingleCol(mapValue(null, null, null))
+          .addSingleCol(mapValue("3", "4", "5"))
+          .build();
+
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  @Test
+  public void testNullWhereListExpectedText() throws Exception {
+    runBoth(() -> doTestNullWhereListExpectedText());
+  }
+
+  private void doTestNullWhereListExpectedText() throws Exception {
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+    try {
+      String sql = "select * from cp.`store/json/null_where_list_expected.json`";
+      RowSet results = runTest(sql);
+
+      TupleMetadata expectedSchema = new SchemaBuilder()
+          .addArray("list_1", MinorType.VARCHAR)
+          .build();
+
+      RowSet expected = client.rowSetBuilder(expectedSchema)
+          .addSingleCol(strArray("1", "2", "3"))
+          .addSingleCol(strArray())
+          .addSingleCol(strArray("4", "5", "6"))
+          .build();
+
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  @Test
+  public void ensureProjectionPushdown() throws Exception {
+    runBoth(() -> doEnsureProjectionPushdown());
+  }
+
+  private void doEnsureProjectionPushdown() throws Exception {
+    // Tests to make sure that we are correctly eliminating schema changing columns.
+    // If completes, means that the projection pushdown was successful.
+
+    client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, false);
+    try {
+      String sql = "select t.field_1, t.field_3.inner_1, t.field_3.inner_2, t.field_4.inner_1 "
+                  + "from cp.`store/json/schema_change_int_to_string.json` t";
+      assertEquals(3, client.queryBuilder().sql(sql).run().recordCount());
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+    }
+  }
+
+  /**
+   * Old description: The project pushdown rule is correctly adding the
+   * projected columns to the scan, however it is not removing the redundant
+   * project operator after the scan, this tests runs a physical plan generated
+   * from one of the tests to ensure that the project is filtering out the
+   * correct data in the scan alone.
+   * <p>
+   * Revised functionality: the scan operator does all of the requested project
+   * operations, producing five columns.
+   */
+
+  @Test
+  public void testProjectPushdown() throws Exception {
+    try {
+      enableV2Reader(true);
+      client.alterSession(ExecConstants.JSON_ALL_TEXT_MODE, false);
+      String plan = Files.asCharSource(DrillFileUtils.getResourceAsFile(
+          "/store/json/project_pushdown_json_physical_plan.json"),
+          Charsets.UTF_8).read();
+//      client.queryBuilder().physical(plan).printCsv();
+      DirectRowSet results = client.queryBuilder().physical(plan).rowSet();
+//      results.print();
+
+      // Projects all columns (since the revised scan operator handles missing-column
+      // projection.) Note that the result includes two batches, including the first empty
+      // batch.
+
+      TupleMetadata schema = new SchemaBuilder()
+          .addArray("field_1", MinorType.BIGINT)
+          .addMap("field_3")
+            .addNullable("inner_1", MinorType.BIGINT)
+            .addNullable("inner_2", MinorType.BIGINT)
+            .resumeSchema()
+          .addMap("field_4")
+            .addArray("inner_1", MinorType.BIGINT)
+            .resumeSchema()
+          .addNullable("non_existent_at_root", MinorType.VARCHAR)
+          .addMap("non_existent")
+            .addMap("nested")
+              .addNullable("field", MinorType.VARCHAR)
+              .resumeMap()
+            .resumeSchema()
+          .build();
+
+      Object nullMap = singleMap(singleMap(null));
+      RowSet expected = client.rowSetBuilder(schema)
+          .addRow(longArray(1L), mapValue(null, null), singleMap(longArray()), null, nullMap )
+          .addRow(longArray(5L), mapValue(2L, null), singleMap(longArray(1L, 2L, 3L)), null, nullMap)
+          .addRow(longArray(5L, 10L, 15L), mapValue(5L, 3L), singleMap(longArray(4L, 5L, 6L)), null, nullMap)
+          .build();
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      client.resetSession(ExecConstants.JSON_ALL_TEXT_MODE);
+      resetV2Reader();
+    }
+  }
+
+  @Test
+  public void testJsonDirectoryWithEmptyFile() throws Exception {
+    runBoth(() -> doTestJsonDirectoryWithEmptyFile());
+  }
+
+  private void doTestJsonDirectoryWithEmptyFile() throws Exception {
+    testBuilder()
+        .sqlQuery("select * from dfs.`store/json/jsonDirectoryWithEmpyFile`")
+        .unOrdered()
+        .baselineColumns("a")
+        .baselineValues(1l)
+        .build()
+        .run();
+  }
+
+  // Only works in V2 reader.
+  // Disabled because it depends on the (random) read order
+
+  @Test
+  @Ignore("unstable")
+  public void drill_4032() throws Exception {
+    try {
+      enableV2Reader(true);
+      File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4032"));
+      table_dir.mkdir();
+      try (PrintWriter os = new PrintWriter(new FileWriter(new File(table_dir, "a.json")))) {
+        os.write("{\"col1\": \"val1\", \"col2\": null}");
+        os.write("{\"col1\": \"val2\", \"col2\": {\"col3\":\"abc\", \"col4\":\"xyz\"}}");
+      }
+      try (PrintWriter os = new PrintWriter(new FileWriter(new File(table_dir, "b.json")))) {
+        os.write("{\"col1\": \"val3\", \"col2\": null}");
+        os.write("{\"col1\": \"val4\", \"col2\": null}");
+      }
+      String sql = "select t.col1, t.col2.col3 from dfs.tmp.drill_4032 t order by col1";
+//      String sql = "select t.col1, t.col2.col3 from dfs.tmp.drill_4032 t";
+      RowSet results = runTest(sql);
+      results.print();
+
+      TupleMetadata schema = new SchemaBuilder()
+          .addNullable("col1", MinorType.VARCHAR)
+          .addNullable("EXPR$1", MinorType.VARCHAR)
+          .build();
+
+      RowSet expected = client.rowSetBuilder(schema)
+          .addRow("val1", null)
+          .addRow("val2", "abc")
+          .addRow("val3", null)
+          .addRow("val4", null)
+          .build();
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      resetV2Reader();
+    }
+  }
+
+  /** Test <pre>
+   * { "a": 5.2 }
+   * { "a": 6 }</pre>
+   * In Drill 1.16 and before, triggered an exception. In Drill 1.17
+   * and later, the second number, an integer, is converted to a
+   * double.
+   */
+
+  @Test
+  public void testMixedNumberTypes() throws Exception {
+    try {
+      enableV2Reader(true);
+      String sql = "select * from cp.`jsoninput/mixed_number_types.json`";
+      RowSet results = runTest(sql);
+      TupleMetadata schema = new SchemaBuilder()
+          .addNullable("a", MinorType.FLOAT8)
+          .build();
+
+      RowSet expected = client.rowSetBuilder(schema)
+          .addSingleCol(5.2D)
+          .addSingleCol(6.0D)
+          .build();
+      RowSetUtilities.verify(expected, results);
+    } finally {
+      resetV2Reader();
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java
new file mode 100644
index 0000000000..c0d4a4b80f
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonReaderWithSchema.java
@@ -0,0 +1,24 @@
+package org.apache.drill.exec.store.json;
+
+import org.apache.drill.exec.ExecConstants;
+import org.junit.Test;
+
+public class TestJsonReaderWithSchema extends BaseTestJsonReader {
+
+  @Test
+  public void testSelectFromListWithCase() throws Exception {
+    try {
+      testBuilder()
+              .sqlQuery("select a, typeOf(a) `type` from " +
+                "(select case when is_list(field2) then field2[4][1].inner7 end a " +
+                "from cp.`jsoninput/union/a.json`) where a is not null")
+              .ordered()
+              .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
+              .baselineColumns("a", "type")
+              .baselineValues(13L, "BIGINT")
+              .go();
+    } finally {
+      client.resetSession(ExecConstants.ENABLE_UNION_TYPE_KEY);
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
index bf83ae29b9..7b0a61c496 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonRecordReader.java
@@ -17,29 +17,63 @@
  */
 package org.apache.drill.exec.store.json;
 
-import org.apache.drill.test.BaseTestQuery;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.nio.file.Paths;
+
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.exec.proto.UserBitShared;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.proto.UserBitShared;
+import org.apache.drill.test.BaseTestQuery;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.junit.Assert;
 import org.junit.experimental.categories.Category;
 
-import java.nio.file.Paths;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
+/**
+ * Original JSON reader tests. Left in original form; not converted
+ * to the newer formats.
+ */
+@Category(RowSetTest.class)
 public class TestJsonRecordReader extends BaseTestQuery {
+
   @BeforeClass
   public static void setupTestFiles() {
     dirTestWatcher.copyResourceToRoot(Paths.get("jsoninput/drill_3353"));
   }
 
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
+  public interface TestWrapper {
+    void apply() throws Exception;
+  }
+
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
   @Test
   public void testComplexJsonInput() throws Exception {
+    runBoth(this::doTestComplexJsonInput);
+  }
+
+  private void doTestComplexJsonInput() throws Exception {
     test("select `integer`, x['y'] as x1, x['y'] as x2, z[0], z[0]['orange'], z[1]['pink']  from cp.`jsoninput/input2.json` limit 10 ");
   }
 
@@ -50,11 +84,19 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testContainingArray() throws Exception {
+    runBoth(this::doTestContainingArray);
+  }
+
+  private void doTestContainingArray() throws Exception {
     test("select * from cp.`store/json/listdoc.json`");
   }
 
   @Test
   public void testComplexMultipleTimes() throws Exception {
+    runBoth(this::doTestComplexMultipleTimes);
+  }
+
+  private void doTestComplexMultipleTimes() throws Exception {
     for (int i = 0; i < 5; i++) {
       test("select * from cp.`join/merge_join.json`");
     }
@@ -62,6 +104,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void trySimpleQueryWithLimit() throws Exception {
+    runBoth(this::doTrySimpleQueryWithLimit);
+  }
+
+  private void doTrySimpleQueryWithLimit() throws Exception {
     test("select * from cp.`limit/test1.json` limit 10");
   }
 
@@ -69,6 +115,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   // DRILL-1634 : retrieve an element in a nested array in a repeated map.
   // RepeatedMap (Repeated List (Repeated varchar))
   public void testNestedArrayInRepeatedMap() throws Exception {
+    runBoth(() -> doTestNestedArrayInRepeatedMap());
+  }
+
+  private void doTestNestedArrayInRepeatedMap() throws Exception {
     test("select a[0].b[0] from cp.`jsoninput/nestedArray.json`");
     test("select a[0].b[1] from cp.`jsoninput/nestedArray.json`");
     test("select a[1].b[1] from cp.`jsoninput/nestedArray.json`"); // index out of the range. Should return empty list.
@@ -76,19 +126,31 @@ public class TestJsonRecordReader extends BaseTestQuery {
 
   @Test
   public void testEmptyMapDoesNotFailValueCapacityCheck() throws Exception {
+    runBoth(() -> doTestEmptyMapDoesNotFailValueCapacityCheck());
+  }
+
+  private void doTestEmptyMapDoesNotFailValueCapacityCheck() throws Exception {
     final String sql = "select * from cp.`store/json/value-capacity.json`";
     test(sql);
   }
 
   @Test
   public void testEnableAllTextMode() throws Exception {
-    testNoResult("alter session set `store.json.all_text_mode`= true");
+    runBoth(() -> doTestEnableAllTextMode());
+  }
+
+  private void doTestEnableAllTextMode() throws Exception {
+    alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
     test("select * from cp.`jsoninput/big_numeric.json`");
-    testNoResult("alter session set `store.json.all_text_mode`= false");
+    resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
   }
 
   @Test
   public void testExceptionHandling() throws Exception {
+    runBoth(this::doTestExceptionHandling);
+  }
+
+  private void doTestExceptionHandling() throws Exception {
     try {
       test("select * from cp.`jsoninput/DRILL-2350.json`");
     } catch (UserException e) {
@@ -96,8 +158,8 @@ public class TestJsonRecordReader extends BaseTestQuery {
           UserBitShared.DrillPBError.ErrorType.UNSUPPORTED_OPERATION, e
               .getOrCreatePBError(false).getErrorType());
       String s = e.getMessage();
-      assertEquals("Expected Unsupported Operation Exception.", true,
-          s.contains("Drill does not support lists of different types."));
+      assertTrue("Expected Unsupported Operation Exception.",
+        s.contains("Drill does not support lists of different types."));
     }
 
   }
@@ -106,6 +168,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   // DRILL-1832
   public void testJsonWithNulls1() throws Exception {
+    runBoth(() -> doTestJsonWithNulls1());
+  }
+
+  private void doTestJsonWithNulls1() throws Exception {
     final String query = "select * from cp.`jsoninput/twitter_43.json`";
     testBuilder().sqlQuery(query).unOrdered()
         .jsonBaselineFile("jsoninput/drill-1832-1-result.json").go();
@@ -115,70 +181,97 @@ public class TestJsonRecordReader extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   // DRILL-1832
   public void testJsonWithNulls2() throws Exception {
+    runBoth(() -> doTestJsonWithNulls2());
+  }
+
+  private void doTestJsonWithNulls2() throws Exception {
     final String query = "select SUM(1) as `sum_Number_of_Records_ok` from cp.`jsoninput/twitter_43.json` having (COUNT(1) > 0)";
     testBuilder().sqlQuery(query).unOrdered()
         .jsonBaselineFile("jsoninput/drill-1832-2-result.json").go();
   }
 
+  // V1-only test. In V2, this works. See TestJsonReaderQueries.
+
   @Test
   public void testMixedNumberTypes() throws Exception {
     try {
+      enableV2Reader(false);
       testBuilder()
           .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
           .unOrdered().jsonBaselineFile("jsoninput/mixed_number_types.json")
           .build().run();
+      fail("Mixed number types verification failed, expected failure on conflicting number types.");
     } catch (Exception ex) {
+      // this indicates successful completion of the test
       assertTrue(ex
           .getMessage()
           .contains(
               "You tried to write a BigInt type when you are using a ValueWriter of type NullableFloat8WriterImpl."));
-      // this indicates successful completion of the test
-      return;
+    } finally {
+      resetV2Reader();
     }
-    throw new Exception(
-        "Mixed number types verification failed, expected failure on conflicting number types.");
   }
 
   @Test
   public void testMixedNumberTypesInAllTextMode() throws Exception {
-    testNoResult("alter session set `store.json.all_text_mode`= true");
-    testBuilder()
-        .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
-        .unOrdered().baselineColumns("a").baselineValues("5.2")
-        .baselineValues("6").build().run();
+    runBoth(() -> doTestMixedNumberTypesInAllTextMode());
+  }
+
+  private void doTestMixedNumberTypesInAllTextMode() throws Exception {
+    try {
+      alterSession("store.json.all_text_mode", true);
+      testBuilder()
+          .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
+          .unOrdered().baselineColumns("a").baselineValues("5.2")
+          .baselineValues("6").build().run();
+    } finally {
+      resetSessionOption("store.json.all_text_mode");
+    }
   }
 
   @Test
   public void testMixedNumberTypesWhenReadingNumbersAsDouble() throws Exception {
     try {
-      testNoResult("alter session set `store.json.read_numbers_as_double`= true");
+      alterSession(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE, true);
       testBuilder()
           .sqlQuery("select * from cp.`jsoninput/mixed_number_types.json`")
           .unOrdered().baselineColumns("a").baselineValues(5.2D)
           .baselineValues(6D).build().run();
     } finally {
-      testNoResult("alter session set `store.json.read_numbers_as_double`= false");
+      resetSessionOption(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE);
     }
   }
 
   @Test
   public void drill_3353() throws Exception {
     try {
-      testNoResult("alter session set `store.json.all_text_mode` = true");
-      test("create table dfs.tmp.drill_3353 as select a from dfs.`jsoninput/drill_3353` where e = true");
-      String query = "select t.a.d cnt from dfs.tmp.drill_3353 t where t.a.d is not null";
-      test(query);
-      testBuilder().sqlQuery(query).unOrdered().baselineColumns("cnt")
-          .baselineValues("1").go();
+      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
+       test("create table dfs.tmp.drill_3353 as select a from dfs.`jsoninput/drill_3353` where e = true");
+      runBoth(this::doDrill_3353);
     } finally {
-      testNoResult("alter session set `store.json.all_text_mode` = false");
+      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
     }
   }
 
+  private void doDrill_3353() throws Exception {
+    String query = "select t.a.d cnt from dfs.tmp.drill_3353 t where t.a.d is not null";
+    test(query);
+    testBuilder()
+      .sqlQuery(query)
+      .unOrdered()
+      .baselineColumns("cnt")
+      .baselineValues("1")
+      .go();
+  }
+
   @Test
   @Category(UnlikelyTest.class)
   // See DRILL-3476
   public void testNestedFilter() throws Exception {
+    runBoth(this::doTestNestedFilter);
+  }
+
+  private void doTestNestedFilter() throws Exception {
     String query = "select a from cp.`jsoninput/nestedFilter.json` t where t.a.b = 1";
     String baselineQuery = "select * from cp.`jsoninput/nestedFilter.json` t where t.a.b = 1";
     testBuilder().sqlQuery(query).unOrdered().sqlBaselineQuery(baselineQuery)
@@ -192,19 +285,22 @@ public class TestJsonRecordReader extends BaseTestQuery {
   public void testCountingQuerySkippingInvalidJSONRecords() throws Exception {
     try {
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
       String set1 = "alter session set `"
-          + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
-          + "` = true";
+        + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
+        + "` = true";
       String query = "select count(*) from cp.`jsoninput/drill4653/file.json`";
 
       testNoResult(set);
       testNoResult(set1);
-      testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
-          .run();
+      testBuilder()
+        .unOrdered()
+        .sqlQuery(query)
+        .sqlBaselineQuery(query)
+        .go();
     } finally {
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
       testNoResult(set);
     }
   }
@@ -214,6 +310,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   // See DRILL-4653
   /* Test for CountingJSONReader */
   public void testCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
+    runBoth(this::doTestCountingQueryNotSkippingInvalidJSONRecords);
+  }
+
+  private void doTestCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
     try {
       String query = "select count(*) from cp.`jsoninput/drill4653/file.json`";
       testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
@@ -231,21 +331,23 @@ public class TestJsonRecordReader extends BaseTestQuery {
   /* Test for JSONReader */
   public void testNotCountingQuerySkippingInvalidJSONRecords() throws Exception {
     try {
-
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = true";
       String set1 = "alter session set `"
-          + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
-          + "` = true";
+        + ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG
+        + "` = true";
       String query = "select sum(balance) from cp.`jsoninput/drill4653/file.json`";
       testNoResult(set);
       testNoResult(set1);
-      testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
-          .run();
+      testBuilder()
+        .unOrdered()
+        .sqlQuery(query)
+        .sqlBaselineQuery(query)
+        .go();
     }
     finally {
       String set = "alter session set `"
-          + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
+        + ExecConstants.JSON_READER_SKIP_INVALID_RECORDS_FLAG + "` = false";
       testNoResult(set);
     }
   }
@@ -256,6 +358,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   /* Test for JSONReader */
   public void testNotCountingQueryNotSkippingInvalidJSONRecords()
       throws Exception {
+    runBoth(this::doTestNotCountingQueryNotSkippingInvalidJSONRecords);
+  }
+
+  private void doTestNotCountingQueryNotSkippingInvalidJSONRecords() throws Exception {
     try {
       String query = "select sum(balance) from cp.`jsoninput/drill4653/file.json`";
       testBuilder().unOrdered().sqlQuery(query).sqlBaselineQuery(query).build()
@@ -272,6 +378,10 @@ public class TestJsonRecordReader extends BaseTestQuery {
   // See DRILL-7362
   /* Test for CountingJSONReader */
   public void testContainingArrayCount() throws Exception {
+    runBoth(this::doTestContainingArrayCount);
+  }
+
+  private void doTestContainingArrayCount() throws Exception {
     testBuilder()
       .sqlQuery("select count(*) as cnt from cp.`store/json/listdoc.json`")
       .unOrdered()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonScanOp.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonScanOp.java
new file mode 100644
index 0000000000..8f03dd53e7
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonScanOp.java
@@ -0,0 +1,271 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import static org.apache.drill.test.rowSet.RowSetUtilities.longArray;
+import static org.apache.drill.test.rowSet.RowSetUtilities.mapArray;
+import static org.apache.drill.test.rowSet.RowSetUtilities.mapValue;
+import static org.apache.drill.test.rowSet.RowSetUtilities.singleMap;
+import static org.apache.drill.test.rowSet.RowSetUtilities.strArray;
+import static org.junit.Assert.assertTrue;
+
+import java.io.BufferedInputStream;
+import java.io.InputStream;
+
+import org.apache.drill.categories.RowSetTest;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.physical.impl.scan.BaseScanOperatorExecTest.BaseScanFixtureBuilder;
+import org.apache.drill.exec.physical.impl.scan.ScanOperatorExec;
+import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
+import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
+import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
+import org.apache.drill.exec.physical.rowSet.RowSet;
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoader;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder;
+import org.apache.drill.exec.store.easy.json.loader.JsonLoaderOptions;
+import org.apache.drill.test.SubOperatorTest;
+import org.apache.drill.test.rowSet.RowSetUtilities;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category(RowSetTest.class)
+public class TestJsonScanOp extends SubOperatorTest {
+
+  private static class JsonReaderFixure implements ManagedReader<SchemaNegotiator> {
+
+    private final String filePath;
+    private InputStream stream;
+    private JsonLoader jsonLoader;
+    private final JsonLoaderOptions options;
+
+    public JsonReaderFixure(String filePath, JsonLoaderOptions options) {
+      this.filePath = filePath;
+      this.options = options;
+    }
+
+    @Override
+    public boolean open(SchemaNegotiator negotiator) {
+      stream = new BufferedInputStream(getClass().getResourceAsStream(filePath));
+      jsonLoader = new JsonLoaderBuilder()
+          .resultSetLoader(negotiator.build())
+          .options(options)
+          .fromStream(stream)
+          .build();
+      return true;
+    }
+
+    @Override
+    public boolean next() {
+      return jsonLoader.readBatch();
+    }
+
+    @Override
+    public void close() {
+      if (jsonLoader != null) {
+        jsonLoader.close();
+        jsonLoader = null;
+      }
+    }
+  }
+
+  /**
+   * Test the case where the reader does not play the "first batch contains
+   * only schema" game, and instead returns data. The Scan operator will
+   * split the first batch into two: one with schema only, another with
+   * data.
+   */
+
+  @Test
+  public void testScanOperator() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    options.allTextMode = true;
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+    builder.setProjection("field_3", "field_5");
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    result.clear();
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    TupleMetadata expectedSchema = new SchemaBuilder()
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.VARCHAR)
+          .addNullable("inner_2", MinorType.VARCHAR)
+          .addMapArray("inner_3")
+            .addNullable("inner_object_field_1", MinorType.VARCHAR)
+            .resumeMap()
+          .resumeSchema()
+        .addMapArray("field_5")
+          .addArray("inner_list", MinorType.VARCHAR)
+          .addArray("inner_list_2", MinorType.VARCHAR)
+          .resumeSchema()
+        .buildSchema();
+
+    RowSetUtilities.strArray();
+    RowSet expected = fixture.rowSetBuilder(expectedSchema)
+        .addRow(mapValue(null, null, mapArray()),
+                mapArray())
+        .addRow(mapValue("2", null, mapArray()),
+                mapArray(
+                  mapValue(strArray("1", "", "6"), strArray()),
+                  mapValue(strArray("3", "8"), strArray()),
+                  mapValue(strArray("12", "", "4", "null", "5"), strArray())))
+        .addRow(mapValue("5", "3", mapArray(singleMap(null), singleMap("10"))),
+            mapArray(
+                mapValue(strArray("5", "", "6.0", "1234"), strArray()),
+                mapValue(strArray("7", "8.0", "12341324"),
+                         strArray("1", "2", "2323.443e10", "hello there")),
+                mapValue(strArray("3", "4", "5"), strArray("10", "11", "12"))))
+        .build();
+
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+
+  @Test
+  public void testScanProjectMapSubset() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+    builder.setProjection("field_3.inner_1", "field_3.inner_2");
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    TupleMetadata schema = new SchemaBuilder()
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = fixture.rowSetBuilder(schema)
+        .addSingleCol(mapValue(null, null))
+        .addSingleCol(mapValue(2L, null))
+        .addSingleCol(mapValue(5L, 3L))
+        .build();
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+
+  @Test
+  public void testScanProjectMapArraySubsetAndNull() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    options.allTextMode = true;
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+    builder.setProjection("field_5.inner_list", "field_5.dummy");
+    builder.builder().nullType(Types.optional(MinorType.VARCHAR));
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    TupleMetadata schema = new SchemaBuilder()
+        .addMapArray("field_5")
+          .addArray("inner_list", MinorType.VARCHAR)
+          .addNullable("dummy", MinorType.VARCHAR)
+          .resumeSchema()
+        .build();
+
+    RowSet expected = fixture.rowSetBuilder(schema)
+        .addSingleCol(mapArray())
+        .addSingleCol(mapArray(
+            mapValue(strArray("1", "", "6"), null),
+            mapValue(strArray("3", "8"), null),
+            mapValue(strArray("12", "", "4", "null", "5"), null)))
+        .addSingleCol(mapArray(
+            mapValue(strArray("5", "", "6.0", "1234"), null),
+            mapValue(strArray("7", "8.0", "12341324"), null),
+            mapValue(strArray("3", "4", "5"), null)))
+        .build();
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+
+  @Test
+  public void testScanProject() {
+
+    BaseScanFixtureBuilder builder = new BaseScanFixtureBuilder();
+    JsonLoaderOptions options = new JsonLoaderOptions();
+    builder.addReader(new JsonReaderFixure("/store/json/schema_change_int_to_string.json", options));
+
+    // Projection omits field_2 which has an ambiguous type. Since
+    // the field is not materialized, the ambiguity is benign.
+    // (If this test triggers an error, perhaps a change has caused
+    // the column to become materialized.)
+
+    builder.setProjection("field_1", "field_3.inner_1", "field_3.inner_2", "field_4.inner_1",
+        "non_existent_at_root", "non_existent.nested.field");
+    builder.builder().nullType(Types.optional(MinorType.VARCHAR));
+    ScanFixture scanFixture = builder.build();
+    ScanOperatorExec scanOp = scanFixture.scanOp;
+
+    assertTrue(scanOp.buildSchema());
+    RowSet result = fixture.wrap(scanOp.batchAccessor().container());
+    assertTrue(scanOp.next());
+    result = fixture.wrap(scanOp.batchAccessor().container());
+
+    // Projects all columns (since the revised scan operator handles missing-column
+    // projection.) Note that the result includes two batches, including the first empty
+    // batch.
+
+    TupleMetadata schema = new SchemaBuilder()
+        .addArray("field_1", MinorType.BIGINT)
+        .addMap("field_3")
+          .addNullable("inner_1", MinorType.BIGINT)
+          .addNullable("inner_2", MinorType.BIGINT)
+          .resumeSchema()
+        .addMap("field_4")
+          .addArray("inner_1", MinorType.BIGINT)
+          .resumeSchema()
+        .addNullable("non_existent_at_root", MinorType.VARCHAR)
+        .addMap("non_existent")
+          .addMap("nested")
+            .addNullable("field", MinorType.VARCHAR)
+            .resumeMap()
+          .resumeSchema()
+        .build();
+
+    Object nullMap = singleMap(singleMap(null));
+    RowSet expected = fixture.rowSetBuilder(schema)
+        .addRow(longArray(1L), mapValue(null, null), singleMap(longArray()), null, nullMap )
+        .addRow(longArray(5L), mapValue(2L, null), singleMap(longArray(1L, 2L, 3L)), null, nullMap)
+        .addRow(longArray(5L, 10L, 15L), mapValue(5L, 3L), singleMap(longArray(4L, 5L, 6L)), null, nullMap)
+        .build();
+    RowSetUtilities.verify(expected, result);
+    scanFixture.close();
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
index 0ba1f2261a..02f5d8209c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
@@ -25,7 +25,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -53,7 +53,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestLogReader extends ClusterTest {
 
   public static final String DATE_ONLY_PATTERN = "(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d) .*";
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java
index c7ddd9eabf..65cb0ab8c4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockPlugin.java
@@ -20,7 +20,7 @@ package org.apache.drill.exec.store.mock;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
@@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
  * tested, where needed in unit tests.
  */
 
-@Category({RowSetTests.class, UnlikelyTest.class})
+@Category({RowSetTest.class, UnlikelyTest.class})
 public class TestMockPlugin extends ClusterTest {
 
   @BeforeClass
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java
index ad207ef0e4..45587a84b5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/mock/TestMockRowReader.java
@@ -27,7 +27,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.DataMode;
@@ -52,7 +52,7 @@ import org.junit.experimental.categories.Category;
  * scan operator, without the rest of Drill. A side effect is that this
  * also tests the scan mechanism itself.
  */
-@Category({RowSetTests.class, UnlikelyTest.class})
+@Category({RowSetTest.class, UnlikelyTest.class})
 public class TestMockRowReader extends SubOperatorTest {
 
   private static ScanFixture buildScan(MockSubScanPOP config,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java
index fd72e8ef26..f989a3f41d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/sequencefile/TestSequenceFileReader.java
@@ -23,7 +23,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.nio.file.Paths;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
@@ -39,7 +39,7 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestSequenceFileReader extends ClusterTest {
 
   @BeforeClass
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
index e80b9f06d7..5ab2cfeb1b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
@@ -25,13 +25,13 @@ public class TestComplexTypeWriter  extends BaseTestQuery {
   @Test
   //basic case. convert varchar into json.
   public void testA0() throws Exception{
-    test(" select convert_from('{x:100, y:215.6}' ,'JSON') as mycol from cp.`tpch/nation.parquet`;");
+    test("select convert_from('{x:100, y:215.6}' ,'JSON') as mycol from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //map contains int, float, repeated list , repeated map, nested repeated map, etc.
   public void testA1() throws Exception{
-    test(" select convert_from('{x:100, y:215.6, z: [1, 2, 3], s : [[5, 6, 7], [8, 9]], " +
+    test("select convert_from('{x:100, y:215.6, z: [1, 2, 3], s : [[5, 6, 7], [8, 9]], " +
                                 " t : [{a : 100, b: 200}, {a:300, b: 400}], " +
                                 " nrmp: [ { x: [{ id: 123}], y: { y : \"SQL\"} }] }' ,'JSON') " +
                                 " as mycol from cp.`tpch/nation.parquet`;");
@@ -40,55 +40,55 @@ public class TestComplexTypeWriter  extends BaseTestQuery {
   @Test
   //two convert functions.
   public void testA2() throws Exception{
-    test(" select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions.  One convert's input comes from a string concat function.
   public void testA3() throws Exception{
-    test(" select convert_from(concat('{x:100,',  'y:215.6}') ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from(concat('{x:100,',  'y:215.6}') ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions. One's input is an empty map.
   public void testA4() throws Exception{
-    test(" select convert_from('{}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from('{}' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions. One's input is an empty list ( ok to have null in the result?)
   public void testA5() throws Exception{
-    test(" select convert_from('[]' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[]' ,'JSON') as mycol1, convert_from('{x:100, y:215.6}' ,'JSON') as mycol2 from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of BigInt. Output will be a repeated list vector.
   public void testA6() throws Exception{
-    test(" select convert_from('[1, 2, 3]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[1, 2, 3]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of float. Output will be a repeated list vector.
   public void testA7() throws Exception{
-    test(" select convert_from('[1.2, 2.3, 3.5]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[1.2, 2.3, 3.5]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of list of big int. Output will be a repeated list vector.
   public void testA8() throws Exception{
-    test(" select convert_from('[ [1, 2], [3, 4], [5]]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[ [1, 2], [3, 4], [5]]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //input is a list of map. Output will be a repeated list vector.
   public void testA9() throws Exception{
-    test(" select convert_from('[{a : 100, b: 200}, {a:300, b: 400}]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
+    test("select convert_from('[{a : 100, b: 200}, {a:300, b: 400}]' ,'JSON') as mycol1  from cp.`tpch/nation.parquet`;");
   }
 
   @Test
   //two convert functions, one regular nest functions, used with Filter op.
   public void testA10() throws Exception{
-    test(" select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, " +
+    test("select convert_from('{x:100, y:215.6}' ,'JSON') as mycol1, " +
          "        convert_from('{x:200, y:678.9}' ,'JSON') as mycol2, " +
          "        1 + 2 * 3 as numvalue " +
          " from cp.`tpch/nation.parquet` where n_nationkey > 5;");
@@ -97,15 +97,14 @@ public class TestComplexTypeWriter  extends BaseTestQuery {
   @Test
   //convert from string constructed from columns in parquet file.
   public void testA11() throws Exception{
-    test(" select convert_from(concat(concat('{ NationName: \"', N_NAME) , '\"}'), 'JSON')" +
+    test("select convert_from(concat(concat('{ NationName: \"', N_NAME) , '\"}'), 'JSON')" +
          " from cp.`tpch/nation.parquet` where n_nationkey > 5;");
   }
 
   @Test
   //Test multiple batches creation ( require multiple alloc for complex writer during Project ).
   public void testA100() throws Exception{
-    test(" select convert_from(concat(concat('{ Price : ', L_EXTENDEDPRICE) , '}') , 'JSON') " +
+    test("select convert_from(concat(concat('{ Price : ', L_EXTENDEDPRICE) , '}') , 'JSON') " +
          " from cp.`tpch/lineitem.parquet` limit 10; ");
   }
-
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
index bc27e88044..cb205453b1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
@@ -22,15 +22,17 @@ import static org.junit.Assert.assertEquals;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.List;
+import java.util.TimeZone;
 
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
-import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+// TODO: Move to JSON reader package after code review
 public class TestExtendedTypes extends BaseTestQuery {
+
   @BeforeClass
   public static void setupTestFiles() {
     dirTestWatcher.copyResourceToRoot(Paths.get("vector", "complex"));
@@ -38,14 +40,19 @@ public class TestExtendedTypes extends BaseTestQuery {
 
   @Test
   public void checkReadWriteExtended() throws Exception {
-    mockUtcDateTimeZone();
+    runBoth(() -> doCheckReadWriteExtended());
+  }
+
+  private void doCheckReadWriteExtended() throws Exception {
 
     final String originalFile = "vector/complex/extended.json";
     final String newTable = "TestExtendedTypes/newjson";
 
+    TimeZone origZone = TimeZone.getDefault();
     try {
-      testNoResult(String.format("ALTER SESSION SET `%s` = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName()));
-      testNoResult(String.format("ALTER SESSION SET `%s` = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName()));
+      TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+      alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "json");
+      alterSession(ExecConstants.JSON_EXTENDED_TYPES_KEY, true);
 
       // create table
       test("create table dfs.tmp.`%s` as select * from cp.`%s`", newTable, originalFile);
@@ -57,18 +64,26 @@ public class TestExtendedTypes extends BaseTestQuery {
       final byte[] newData = Files.readAllBytes(dirTestWatcher.getDfsTestTmpDir().toPath().resolve(Paths.get(newTable, "0_0_0.json")));
       assertEquals(new String(originalData), new String(newData));
     } finally {
-      resetSessionOption(ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName());
-      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES.getOptionName());
+      TimeZone.setDefault(origZone);
+      resetSessionOption(ExecConstants.OUTPUT_FORMAT_OPTION);
+      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES_KEY);
+      test("DROP TABLE IF EXISTS dfs.tmp.`%s`", newTable);
     }
   }
 
   @Test
   public void testMongoExtendedTypes() throws Exception {
+    runBoth(() -> doTestMongoExtendedTypes());
+  }
+
+  private void doTestMongoExtendedTypes() throws Exception {
     final String originalFile = "vector/complex/mongo_extended.json";
 
+    TimeZone origZone = TimeZone.getDefault();
     try {
-      testNoResult(String.format("ALTER SESSION SET `%s` = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName()));
-      testNoResult(String.format("ALTER SESSION SET `%s` = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName()));
+      TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+      alterSession(ExecConstants.OUTPUT_FORMAT_OPTION, "json");
+      alterSession(ExecConstants.JSON_EXTENDED_TYPES_KEY, true);
 
       int actualRecordCount = testSql(String.format("select * from cp.`%s`", originalFile));
       assertEquals(
@@ -78,10 +93,34 @@ public class TestExtendedTypes extends BaseTestQuery {
       List<QueryDataBatch> resultList = testSqlWithResults(String.format("select * from dfs.`%s`", originalFile));
       String actual = getResultString(resultList, ",");
       String expected = "drill_timestamp_millies,bin,bin1\n2015-07-07 03:59:43.488,drill,drill\n";
-      Assert.assertEquals(expected, actual);
+      assertEquals(expected, actual);
+    } finally {
+      TimeZone.setDefault(origZone);
+      resetSessionOption(ExecConstants.OUTPUT_FORMAT_OPTION);
+      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES_KEY);
+    }
+  }
+
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
+  public interface TestWrapper {
+    void apply() throws Exception;
+  }
+
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
     } finally {
-      resetSessionOption(ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName());
-      resetSessionOption(ExecConstants.JSON_EXTENDED_TYPES.getOptionName());
+      resetV2Reader();
     }
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
index 314a328559..dcd65a6f92 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonEscapeAnyChar.java
@@ -20,6 +20,7 @@ package org.apache.drill.exec.vector.complex.writer;
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.vector.complex.writer.TestJsonReader.TestWrapper;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
 import org.junit.After;
@@ -45,9 +46,23 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
     FileUtils.writeStringToFile(testFile, JSON_DATA);
   }
 
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
+
   @Test
   public void testwithOptionEnabled() throws Exception {
+    runBoth(() -> doTestWithOptionEnabled());
+  }
 
+  private void doTestWithOptionEnabled() throws Exception {
     try {
       enableJsonReaderEscapeAnyChar();
       testBuilder()
@@ -61,9 +76,12 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
       resetJsonReaderEscapeAnyChar();
     }
   }
-
   @Test
   public void testwithOptionDisabled() throws Exception {
+    runBoth(() -> doTestWithOptionDisabled());
+  }
+
+  private void doTestWithOptionDisabled() throws Exception {
     try {
       queryBuilder().sql(QUERY)
         .run();
@@ -80,6 +98,14 @@ public class TestJsonEscapeAnyChar extends ClusterTest {
     client.alterSession(ExecConstants.JSON_READER_ESCAPE_ANY_CHAR, false);
   }
 
+  private void enableV2Reader(boolean enable) {
+    client.alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() {
+    client.resetSession(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
+
   @After
   public void teardown() throws Exception {
     FileUtils.deleteQuietly(testFile);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
index 9b32b465ff..5b440a740b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonNanInf.java
@@ -22,6 +22,7 @@ import static org.hamcrest.CoreMatchers.containsString;
 import static org.junit.Assert.assertFalse;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.util.List;
@@ -35,15 +36,32 @@ import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.vector.VarCharVector;
+import org.apache.drill.exec.vector.complex.writer.TestJsonReader.TestWrapper;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 
+// TODO: Move to JSON reader package after code review
+// TODO: Split or rename: this tests mor than NanInf
 public class TestJsonNanInf extends BaseTestQuery {
 
+  public void runBoth(TestWrapper wrapper) throws Exception {
+    try {
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
+    } finally {
+      resetV2Reader();
+    }
+  }
 
   @Test
   public void testNanInfSelect() throws Exception {
+    runBoth(() -> doTestNanInfSelect());
+  }
+
+  private void doTestNanInfSelect() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}";
@@ -65,6 +83,10 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testExcludePositiveInfinity() throws Exception {
+    runBoth(() -> doTestExcludePositiveInfinity());
+  }
+
+  private void doTestExcludePositiveInfinity() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":Infinity}," +
@@ -87,6 +109,10 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testExcludeNegativeInfinity() throws Exception {
+    runBoth(() -> doTestExcludeNegativeInfinity());
+  }
+
+  private void doTestExcludeNegativeInfinity() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":-Infinity}," +
@@ -109,6 +135,10 @@ public class TestJsonNanInf extends BaseTestQuery {
   @Test
   @Ignore // see DRILL-6018
   public void testIncludePositiveInfinity() throws Exception {
+    runBoth(() -> doTestIncludePositiveInfinity());
+  }
+
+  private void doTestIncludePositiveInfinity() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":Infinity}," +
@@ -130,6 +160,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testExcludeNan() throws Exception {
+    runBoth(() -> doTestExcludeNan());
+  }
+
+  private void doTestExcludeNan() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":-Infinity}," +
@@ -149,9 +183,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-
   @Test
   public void testIncludeNan() throws Exception {
+    runBoth(() -> doTestIncludeNan());
+  }
+
+  private void doTestIncludeNan() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "[{\"nan_col\":NaN, \"inf_col\":-Infinity}," +
@@ -171,8 +208,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testNanInfFailure() throws Exception {
+    runBoth(() -> doTestNanInfFailure());
+  }
+
+  private void doTestNanInfFailure() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     test("alter session set `%s` = false", ExecConstants.JSON_READER_NAN_INF_NUMBERS);
@@ -180,9 +221,9 @@ public class TestJsonNanInf extends BaseTestQuery {
     try {
       FileUtils.writeStringToFile(file, json);
       test("select * from dfs.`%s`;", table);
+      fail();
     } catch (UserRemoteException e) {
       assertThat(e.getMessage(), containsString("Error parsing JSON"));
-      throw e;
     } finally {
       resetSessionOption(ExecConstants.JSON_READER_NAN_INF_NUMBERS);
       FileUtils.deleteQuietly(file);
@@ -191,6 +232,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testCreateTableNanInf() throws Exception {
+    runBoth(() -> doTestCreateTableNanInf());
+  }
+
+  private void doTestCreateTableNanInf() throws Exception {
     String table = "nan_test.json";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}";
@@ -217,6 +262,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testConvertFromJsonFunction() throws Exception {
+    runBoth(() -> doTestConvertFromJsonFunction());
+  }
+
+  private void doTestConvertFromJsonFunction() throws Exception {
     String table = "nan_test.csv";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String csv = "col_0, {\"nan_col\":NaN}";
@@ -234,10 +283,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-
-
   @Test
   public void testLargeStringBinary() throws Exception {
+    runBoth(() -> doTestLargeStringBinary());
+  }
+
+  private void doTestLargeStringBinary() throws Exception {
     String chunk = "0123456789";
     StringBuilder builder = new StringBuilder();
     for (int i = 0; i < 1000; i++) {
@@ -249,6 +300,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testConvertToJsonFunction() throws Exception {
+    runBoth(() -> doTestConvertToJsonFunction());
+  }
+
+  private void doTestConvertToJsonFunction() throws Exception {
     String table = "nan_test.csv";
     File file = new File(dirTestWatcher.getRootDir(), table);
     String csv = "col_0, {\"nan_col\":NaN}";
@@ -290,6 +345,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testOrderByWithNaN() throws Exception {
+    runBoth(() -> doTestOrderByWithNaN());
+  }
+
+  private void doTestOrderByWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
         "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":4, \"attr4\":Infinity}\n" +
@@ -319,6 +378,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testNestedLoopJoinWithNaN() throws Exception {
+    runBoth(() -> doTestNestedLoopJoinWithNaN());
+  }
+
+  private void doTestNestedLoopJoinWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"object1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
             "{\"name\":\"object1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
@@ -358,6 +421,10 @@ public class TestJsonNanInf extends BaseTestQuery {
 
   @Test
   public void testHashJoinWithNaN() throws Exception {
+    runBoth(() -> doTestHashJoinWithNaN());
+  }
+
+  private void doTestHashJoinWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
             "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":4, \"attr4\":Infinity}\n" +
@@ -386,9 +453,12 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
-
   @Test
   public void testMergeJoinWithNaN() throws Exception {
+    runBoth(() -> doTestMergeJoinWithNaN());
+  }
+
+  private void doTestMergeJoinWithNaN() throws Exception {
     String table_name = "nan_test.json";
     String json = "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":3, \"attr4\":NaN}\n" +
             "{\"name\":\"obj1\", \"attr1\":1, \"attr2\":2, \"attr3\":4, \"attr4\":Infinity}\n" +
@@ -417,4 +487,11 @@ public class TestJsonNanInf extends BaseTestQuery {
     }
   }
 
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
+
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
index 0643e22e44..bd2517ceea 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
@@ -20,42 +20,41 @@ package org.apache.drill.exec.vector.complex.writer;
 import static org.apache.drill.test.TestBuilder.listOf;
 import static org.apache.drill.test.TestBuilder.mapOf;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
 
 import java.io.BufferedOutputStream;
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
 import java.nio.file.Paths;
-import java.util.List;
-import java.util.zip.GZIPOutputStream;
 
-import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.proto.UserBitShared;
-import org.apache.drill.exec.record.RecordBatchLoader;
-import org.apache.drill.exec.record.VectorWrapper;
-import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.store.easy.json.JSONRecordReader;
 import org.apache.drill.exec.util.JsonStringHashMap;
 import org.apache.drill.exec.util.Text;
-import org.apache.drill.exec.vector.IntVector;
-import org.apache.drill.exec.vector.RepeatedBigIntVector;
 import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
 import org.apache.drill.shaded.guava.com.google.common.io.Files;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Some tests previously here have moved, and been rewritten to use
+ * the newer test framework. Find them in
+ * <tt>org.apache.drill.exec.store.json</tt>:
+ * <ul>
+ * <li><tt>TestJsonReaderFns</tt></li>
+ * <li><tt>TestJsonReaderQuery</tt></li>
+ * </ul>
+ */
+//TODO: Move to JSON reader package after code review
+@Category(RowSetTest.class)
 public class TestJsonReader extends BaseTestQuery {
   private static final Logger logger = LoggerFactory.getLogger(TestJsonReader.class);
 
@@ -65,60 +64,44 @@ public class TestJsonReader extends BaseTestQuery {
     dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
   }
 
-  @Test
-  public void testEmptyList() throws Exception {
-    final String root = "store/json/emptyLists";
+  private void enableV2Reader(boolean enable) throws Exception {
+    alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable);
+  }
 
-    testBuilder()
-        .sqlQuery("select count(a[0]) as ct from dfs.`%s`", root, root)
-        .ordered()
-        .baselineColumns("ct")
-        .baselineValues(6l)
-        .build()
-        .run();
+  private void resetV2Reader() throws Exception {
+    resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY);
   }
 
-  @Test
-  public void schemaChange() throws Exception {
-    // Verifies that the schema change does not cause a
-    // crash. A pretty minimal test.
-    // TODO: Verify actual results.
-    test("select b from dfs.`vector/complex/writer/schemaChange/`");
+  public interface TestWrapper {
+    void apply() throws Exception;
   }
 
-  @Test
-  public void testFieldSelectionBug() throws Exception {
+  public void runBoth(TestWrapper wrapper) throws Exception {
     try {
-      testBuilder()
-          .sqlQuery("select t.field_4.inner_3 as col_1, t.field_4 as col_2 from cp.`store/json/schema_change_int_to_string.json` t")
-          .unOrdered()
-          .optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
-          .baselineColumns("col_1", "col_2")
-          .baselineValues(
-              mapOf(),
-              mapOf(
-                  "inner_1", listOf(),
-                  "inner_3", mapOf()))
-          .baselineValues(
-              mapOf("inner_object_field_1", "2"),
-              mapOf(
-                  "inner_1", listOf("1", "2", "3"),
-                  "inner_2", "3",
-                  "inner_3", mapOf("inner_object_field_1", "2")))
-          .baselineValues(
-              mapOf(),
-              mapOf(
-                  "inner_1", listOf("4", "5", "6"),
-                  "inner_2", "3",
-                  "inner_3", mapOf()))
-          .go();
+      enableV2Reader(false);
+      wrapper.apply();
+      enableV2Reader(true);
+      wrapper.apply();
     } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
+      resetV2Reader();
     }
   }
 
+   @Test
+  public void schemaChange() throws Exception {
+    runBoth(() -> doSchemaChange());
+  }
+
+  private void doSchemaChange() throws Exception {
+    test("select b from dfs.`vector/complex/writer/schemaChange/`");
+  }
+
   @Test
   public void testSplitAndTransferFailure() throws Exception {
+    runBoth(() -> doTestSplitAndTransferFailure());
+  }
+
+  private void doTestSplitAndTransferFailure() throws Exception {
     final String testVal = "a string";
     testBuilder()
         .sqlQuery("select flatten(config) as flat from cp.`store/json/null_list.json`")
@@ -148,6 +131,10 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test // DRILL-1824
   public void schemaChangeValidate() throws Exception {
+    runBoth(() -> doSchemaChangeValidate());
+  }
+
+  private void doSchemaChangeValidate() throws Exception {
     testBuilder()
       .sqlQuery("select b from dfs.`vector/complex/writer/schemaChange/`")
       .unOrdered()
@@ -183,251 +170,7 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
-  @Test
-  public void testReadCompressed() throws Exception {
-    String filepath = "compressed_json.json";
-    File f = new File(dirTestWatcher.getRootDir(), filepath);
-    PrintWriter out = new PrintWriter(f);
-    out.println("{\"a\" :5}");
-    out.close();
-
-    gzipIt(f);
-    testBuilder()
-        .sqlQuery("select * from dfs.`%s.gz`", filepath)
-        .unOrdered()
-        .baselineColumns("a")
-        .baselineValues(5l)
-        .build().run();
-
-    // test reading the uncompressed version as well
-    testBuilder()
-        .sqlQuery("select * from dfs.`%s`", filepath)
-        .unOrdered()
-        .baselineColumns("a")
-        .baselineValues(5l)
-        .build().run();
-  }
-
-  public static void gzipIt(File sourceFile) throws IOException {
-
-    // modified from: http://www.mkyong.com/java/how-to-compress-a-file-in-gzip-format/
-    byte[] buffer = new byte[1024];
-    GZIPOutputStream gzos =
-        new GZIPOutputStream(new FileOutputStream(sourceFile.getPath() + ".gz"));
-
-    FileInputStream in =
-        new FileInputStream(sourceFile);
-
-    int len;
-    while ((len = in.read(buffer)) > 0) {
-      gzos.write(buffer, 0, len);
-    }
-    in.close();
-    gzos.finish();
-    gzos.close();
-  }
-
-  @Test
-  public void testDrill_1419() throws Exception {
-    String[] queries = {"select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`store/json/clicks.json` t limit 5"};
-    long[] rowCounts = {5};
-    String filename = "/store/json/clicks.json";
-    runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-  }
-
-  @Test
-  public void testRepeatedCount() throws Exception {
-    test("select repeated_count(str_list) from cp.`store/json/json_basic_repeated_varchar.json`");
-    test("select repeated_count(INT_col) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_count(FLOAT4_col) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_count(VARCHAR_col) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_count(BIT_col) from cp.`parquet/alltypes_repeated.json`");
-  }
-
-  @Test
-  public void testRepeatedContains() throws Exception {
-    test("select repeated_contains(str_list, 'asdf') from cp.`store/json/json_basic_repeated_varchar.json`");
-    test("select repeated_contains(INT_col, -2147483648) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(BIT_col, true) from cp.`parquet/alltypes_repeated.json`");
-    test("select repeated_contains(BIT_col, false) from cp.`parquet/alltypes_repeated.json`");
-  }
-
-  @Test
-  public void testSingleColumnRead_vector_fill_bug() throws Exception {
-    String[] queries = {"select * from cp.`store/json/single_column_long_file.json`"};
-    long[] rowCounts = {13512};
-    String filename = "/store/json/single_column_long_file.json";
-    runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-  }
-
-  @Test
-  public void testNonExistentColumnReadAlone() throws Exception {
-    String[] queries = {"select non_existent_column from cp.`store/json/single_column_long_file.json`"};
-    long[] rowCounts = {13512};
-    String filename = "/store/json/single_column_long_file.json";
-    runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-  }
-
-  @Test
-  public void testAllTextMode() throws Exception {
-    try {
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-      String[] queries = {"select * from cp.`store/json/schema_change_int_to_string.json`"};
-      long[] rowCounts = {3};
-      String filename = "/store/json/schema_change_int_to_string.json";
-      runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-    } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void readComplexWithStar() throws Exception {
-    List<QueryDataBatch> results = testSqlWithResults("select * from cp.`store/json/test_complex_read_with_star.json`");
-    assertEquals(1, results.size());
-
-    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
-    QueryDataBatch batch = results.get(0);
-
-    assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
-    assertEquals(3, batchLoader.getSchema().getFieldCount());
-    testExistentColumns(batchLoader);
-
-    batch.release();
-    batchLoader.clear();
-  }
-
-  @Test
-  public void testNullWhereListExpected() throws Exception {
-    try {
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-      String[] queries = {"select * from cp.`store/json/null_where_list_expected.json`"};
-      long[] rowCounts = {3};
-      String filename = "/store/json/null_where_list_expected.json";
-      runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-    }
-    finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void testNullWhereMapExpected() throws Exception {
-    try {
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, true);
-      String[] queries = {"select * from cp.`store/json/null_where_map_expected.json`"};
-      long[] rowCounts = {3};
-      String filename = "/store/json/null_where_map_expected.json";
-      runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
-    }
-    finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void ensureProjectionPushdown() throws Exception {
-    try {
-      // Tests to make sure that we are correctly eliminating schema changing
-      // columns. If completes, means that the projection pushdown was
-      // successful.
-      test("alter system set `store.json.all_text_mode` = false; "
-          + "select  t.field_1, t.field_3.inner_1, t.field_3.inner_2, t.field_4.inner_1 "
-          + "from cp.`store/json/schema_change_int_to_string.json` t");
-    } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  // The project pushdown rule is correctly adding the projected columns to the
-  // scan, however it is not removing the redundant project operator after the
-  // scan, this tests runs a physical plan generated from one of the tests to
-  // ensure that the project is filtering out the correct data in the scan alone.
-  @Test
-  public void testProjectPushdown() throws Exception {
-    try {
-      String[] queries = {Files.asCharSource(DrillFileUtils.getResourceAsFile(
-          "/store/json/project_pushdown_json_physical_plan.json"), Charsets.UTF_8).read()};
-      String filename = "/store/json/schema_change_int_to_string.json";
-      alterSession(ExecConstants.JSON_ALL_TEXT_MODE, false);
-      long[] rowCounts = {3};
-      runTestsOnFile(filename, UserBitShared.QueryType.PHYSICAL, queries, rowCounts);
-
-      List<QueryDataBatch> results = testPhysicalWithResults(queries[0]);
-      assertEquals(1, results.size());
-      // "`field_1`", "`field_3`.`inner_1`", "`field_3`.`inner_2`", "`field_4`.`inner_1`"
-
-      RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
-      QueryDataBatch batch = results.get(0);
-      assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
-
-      // this used to be five. It is now four. This is because the plan doesn't
-      // have a project. Scanners are not responsible for projecting non-existent
-      // columns (as long as they project one column)
-      //
-      // That said, the JSON format plugin does claim it can do project
-      // push-down, which means it will ensure columns for any column
-      // mentioned in the project list, in a form consistent with the schema
-      // path. In this case, `non_existent`.`nested`.`field` appears in
-      // the query. But, even more oddly, the missing field is inserted only
-      // if all text mode is true, omitted if all text mode is false.
-      // Seems overly complex.
-      assertEquals(3, batchLoader.getSchema().getFieldCount());
-      testExistentColumns(batchLoader);
-
-      batch.release();
-      batchLoader.clear();
-    } finally {
-      resetSessionOption(ExecConstants.JSON_ALL_TEXT_MODE);
-    }
-  }
-
-  @Test
-  public void testJsonDirectoryWithEmptyFile() throws Exception {
-    testBuilder()
-        .sqlQuery("select * from dfs.`store/json/jsonDirectoryWithEmpyFile`")
-        .unOrdered()
-        .baselineColumns("a")
-        .baselineValues(1l)
-        .build()
-        .run();
-  }
-
-  private void testExistentColumns(RecordBatchLoader batchLoader) throws SchemaChangeException {
-    VectorWrapper<?> vw = batchLoader.getValueAccessorById(
-        RepeatedBigIntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_1")).getFieldIds()
-    );
-    assertEquals("[1]", vw.getValueVector().getAccessor().getObject(0).toString());
-    assertEquals("[5]", vw.getValueVector().getAccessor().getObject(1).toString());
-    assertEquals("[5,10,15]", vw.getValueVector().getAccessor().getObject(2).toString());
-
-    vw = batchLoader.getValueAccessorById(
-        IntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_1")).getFieldIds()
-    );
-    assertNull(vw.getValueVector().getAccessor().getObject(0));
-    assertEquals(2l, vw.getValueVector().getAccessor().getObject(1));
-    assertEquals(5l, vw.getValueVector().getAccessor().getObject(2));
-
-    vw = batchLoader.getValueAccessorById(
-        IntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_2")).getFieldIds()
-    );
-    assertNull(vw.getValueVector().getAccessor().getObject(0));
-    assertNull(vw.getValueVector().getAccessor().getObject(1));
-    assertEquals(3l, vw.getValueVector().getAccessor().getObject(2));
-
-    vw = batchLoader.getValueAccessorById(
-        RepeatedBigIntVector.class,
-        batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_4", "inner_1")).getFieldIds()
-    );
-    assertEquals("[]", vw.getValueVector().getAccessor().getObject(0).toString());
-    assertEquals("[1,2,3]", vw.getValueVector().getAccessor().getObject(1).toString());
-    assertEquals("[4,5,6]", vw.getValueVector().getAccessor().getObject(2).toString());
-  }
+  // TODO: Union not yet supported in V2.
 
   @Test
   public void testSelectStarWithUnionType() throws Exception {
@@ -481,6 +224,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSelectFromListWithCase() throws Exception {
     try {
@@ -498,6 +243,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testTypeCase() throws Exception {
     try {
@@ -518,6 +265,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSumWithTypeCase() throws Exception {
     try {
@@ -536,6 +285,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testUnionExpressionMaterialization() throws Exception {
     try {
@@ -553,6 +304,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSumMultipleBatches() throws Exception {
     File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_batch"));
@@ -577,6 +330,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test
   public void testSumFilesWithDifferentSchema() throws Exception {
     File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_file"));
@@ -606,6 +361,8 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  // V1 version of the test. See TsetJsonReaderQueries for the V2 version.
+
   @Test
   public void drill_4032() throws Exception {
     File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4032"));
@@ -625,19 +382,23 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test
   public void drill_4479() throws Exception {
-    try {
-      File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
-      table_dir.mkdir();
-      BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
-      // Create an entire batch of null values for 3 columns
-      for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
-        os.write("{\"a\": null, \"b\": null, \"c\": null}".getBytes());
-      }
-      // Add a row with {bigint,  float, string} values
-      os.write("{\"a\": 123456789123, \"b\": 99.999, \"c\": \"Hello World\"}".getBytes());
-      os.flush();
-      os.close();
+    File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
+    table_dir.mkdir();
+    BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
+    // Create an entire batch of null values for 3 columns
+    for (int i = 0; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
+      os.write("{\"a\": null, \"b\": null, \"c\": null}".getBytes());
+    }
+    // Add a row with {bigint,  float, string} values
+    os.write("{\"a\": 123456789123, \"b\": 99.999, \"c\": \"Hello World\"}".getBytes());
+    os.flush();
+    os.close();
+
+    runBoth(() -> doDrill_4479());
+  }
 
+  private void doDrill_4479() throws Exception {
+    try {
       testBuilder()
         .sqlQuery("select c, count(*) as cnt from dfs.tmp.drill_4479 t group by c")
         .ordered()
@@ -675,6 +436,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
     }
 
+    runBoth(() -> doTestFlattenEmptyArrayWithAllTextMode());
+  }
+
+  private void doTestFlattenEmptyArrayWithAllTextMode() throws Exception {
     try {
       String query = "select flatten(t.a.b.c) as c from dfs.`empty_array_all_text_mode.json` t";
 
@@ -703,6 +468,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
     }
 
+    runBoth(() -> doTestFlattenEmptyArrayWithUnionType());
+  }
+
+  private void doTestFlattenEmptyArrayWithUnionType() throws Exception {
     try {
       String query = "select flatten(t.a.b.c) as c from dfs.`empty_array.json` t";
 
@@ -734,6 +503,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{\"rk\": \"a\", \"m\": {\"a\":\"1\"}}");
     }
 
+    runBoth(() -> doTestKvgenWithUnionAll(fileName));
+  }
+
+  private void doTestKvgenWithUnionAll(String fileName) throws Exception {
     String query = String.format("select kvgen(m) as res from (select m from dfs.`%s` union all " +
         "select convert_from('{\"a\" : null}' ,'json') as m from (values(1)))", fileName);
     assertEquals("Row count should match", 2, testSql(query));
@@ -746,6 +519,10 @@ public class TestJsonReader extends BaseTestQuery {
       writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
     }
 
+    runBoth(() -> doTestFieldWithDots(fileName));
+  }
+
+  private void doTestFieldWithDots(String fileName) throws Exception {
     testBuilder()
       .sqlQuery("select t.m.`a.b` as a,\n" +
         "t.m.a.b as b,\n" +
@@ -759,6 +536,8 @@ public class TestJsonReader extends BaseTestQuery {
       .go();
   }
 
+  // TODO: Union not yet supported in V2.
+
   @Test // DRILL-6020
   public void testUntypedPathWithUnion() throws Exception {
     String fileName = "table.json";
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java
index 0326612428..cb67b2c8d7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRowSetComparison.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.test.rowSet.test;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.memory.RootAllocator;
@@ -32,7 +32,7 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestRowSetComparison extends BaseTest {
   private BufferAllocator allocator;
 
diff --git a/exec/vector/src/main/codegen/templates/HolderReaderImpl.java b/exec/vector/src/main/codegen/templates/HolderReaderImpl.java
index dd0c3f8a3f..62b2fd44c8 100644
--- a/exec/vector/src/main/codegen/templates/HolderReaderImpl.java
+++ b/exec/vector/src/main/codegen/templates/HolderReaderImpl.java
@@ -100,9 +100,9 @@ public class ${holderMode}${name}HolderReaderImpl extends AbstractFieldReader {
     return BasicTypeHelper.getType(holder);
 <#else>
   <#if holderMode == "Repeated">
-    return repeatedHolder.TYPE;
+    return ${holderMode}${name}Holder.TYPE;
   <#else>
-    return holder.TYPE;
+    return ${nullMode}${name}Holder.TYPE;
   </#if>
 </#if>
   }
@@ -311,6 +311,7 @@ public void copyAsField(String name, MapWriter writer) {
 </#if>
     impl.vector.getMutator().setSafe(impl.idx(), repeatedHolder);
   }
+
 <#else>
   <#if !(minor.class == "Decimal9" || minor.class == "Decimal18")>
   public void copyAsValue(${minor.class?cap_first}Writer writer) {
@@ -318,7 +319,6 @@ public void copyAsField(String name, MapWriter writer) {
       writer.write${minor.class}(<#list fields as field>holder.${field.name}<#if field_has_next>, </#if></#list>);
     }
   }
-
     <#if minor.class == "VarDecimal">
   public void copyAsField(String name, MapWriter writer, int precision, int scale) {
     ${minor.class?cap_first}Writer impl = writer.${lowerName}(name, precision, scale);
@@ -330,6 +330,7 @@ public void copyAsField(String name, MapWriter writer) {
       impl.write${minor.class}(<#list fields as field>holder.${field.name}<#if field_has_next>,</#if></#list>);
     }
   }
+
   </#if>
 </#if>
 }
diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
index 5bd6a7e5e4..98b5ab5cce 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
@@ -105,7 +105,5 @@ public class SingleMapReaderImpl extends AbstractFieldReader {
     SingleMapWriter impl = (SingleMapWriter) writer.map(name);
     impl.container.copyFromSafe(idx(), impl.idx(), vector);
   }
-
-
 }
 
diff --git a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
index 6ffd170993..1e889da2cb 100644
--- a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
+++ b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
@@ -26,7 +26,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.expr.BasicTypeHelper;
@@ -34,7 +34,7 @@ import org.apache.drill.test.BaseTest;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestMetadataProperties extends BaseTest {
 
   @Test
diff --git a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
index f8e489b6c8..c66dd92673 100644
--- a/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
+++ b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
@@ -32,7 +32,7 @@ import java.util.List;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.categories.RowSetTests;
+import org.apache.drill.categories.RowSetTest;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.metadata.ColumnMetadata.StructureType;
@@ -43,7 +43,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Test the tuple and column metadata, including extended attributes.
  */
-@Category(RowSetTests.class)
+@Category(RowSetTest.class)
 public class TestTupleSchema extends BaseTest {
 
   /**


[drill] 03/03: DRILL-8195: Add Timestamp Zone offset ISO-8601 format for JSON EVF

Posted by vi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit ead453c9848b8131203dd10724100b252c16b841
Author: Vitalii Diravka <vi...@apache.org>
AuthorDate: Tue Apr 26 17:09:02 2022 +0300

    DRILL-8195: Add Timestamp Zone offset ISO-8601 format for JSON EVF
---
 .../drill/common/util/DrillDateTimeFormatter.java  | 34 ++++++++++++++++++++++
 .../json/values/UtcTimestampValueListener.java     |  6 ++--
 .../drill/exec/vector/complex/fn/VectorOutput.java | 14 +++------
 3 files changed, 42 insertions(+), 12 deletions(-)

diff --git a/common/src/main/java/org/apache/drill/common/util/DrillDateTimeFormatter.java b/common/src/main/java/org/apache/drill/common/util/DrillDateTimeFormatter.java
new file mode 100644
index 0000000000..30786fc1cd
--- /dev/null
+++ b/common/src/main/java/org/apache/drill/common/util/DrillDateTimeFormatter.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.common.util;
+
+import java.time.format.DateTimeFormatterBuilder;
+
+
+/**
+ * Extends regular {@link java.time.Instant#parse} with more formats.
+ * By default, {@link java.time.format.DateTimeFormatter#ISO_INSTANT} used.
+ */
+public class DrillDateTimeFormatter {
+  public static java.time.format.DateTimeFormatter ISO_DATETIME_FORMATTER =
+    new DateTimeFormatterBuilder().append(java.time.format.DateTimeFormatter.ISO_LOCAL_DATE_TIME)
+    .optionalStart().appendOffset("+HH:MM", "+00:00").optionalEnd()
+    .optionalStart().appendOffset("+HHMM", "+0000").optionalEnd()
+    .optionalStart().appendOffset("+HH", "Z").optionalEnd()
+    .toFormatter();
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/UtcTimestampValueListener.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/UtcTimestampValueListener.java
index 662e87dbec..3d4b917cb5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/UtcTimestampValueListener.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/values/UtcTimestampValueListener.java
@@ -26,6 +26,8 @@ import org.apache.drill.exec.vector.accessor.ScalarWriter;
 
 import com.fasterxml.jackson.core.JsonToken;
 
+import static org.apache.drill.common.util.DrillDateTimeFormatter.ISO_DATETIME_FORMATTER;
+
 /**
  * Per the <a href="https://docs.mongodb.com/manual/reference/mongodb-extended-json-v1/#bson.data_date">
  * V1 docs</a>:
@@ -59,7 +61,7 @@ public class UtcTimestampValueListener extends ScalarListener {
         break;
       case VALUE_STRING:
         try {
-          instant = Instant.parse(tokenizer.stringValue());
+          instant = ISO_DATETIME_FORMATTER.parse(tokenizer.stringValue(), Instant::from);
         } catch (Exception e) {
           throw loader.dataConversionError(schema(), "date", tokenizer.stringValue());
         }
@@ -67,6 +69,6 @@ public class UtcTimestampValueListener extends ScalarListener {
       default:
         throw tokenizer.invalidValue(token);
     }
-    writer.setLong(instant.toEpochMilli() + LOCAL_ZONE_ID.getRules().getOffset(instant).getTotalSeconds() * 1000);
+    writer.setLong(instant.toEpochMilli() + LOCAL_ZONE_ID.getRules().getOffset(instant).getTotalSeconds() * 1000L);
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
index 040679a2c3..2ed52fd9c1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
@@ -25,8 +25,6 @@ import java.time.OffsetDateTime;
 import java.time.OffsetTime;
 import java.time.ZoneId;
 import java.time.ZoneOffset;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
 
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.expr.fn.impl.DateUtility;
@@ -58,6 +56,8 @@ import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonToken;
 
+import static org.apache.drill.common.util.DrillDateTimeFormatter.ISO_DATETIME_FORMATTER;
+
 abstract class VectorOutput {
 
   private static final Logger logger = LoggerFactory.getLogger(VectorOutput.class);
@@ -74,12 +74,6 @@ abstract class VectorOutput {
   protected final WorkingBuffer work;
   protected JsonParser parser;
 
-  protected DateTimeFormatter isoDateTimeFormatter = new DateTimeFormatterBuilder().append(DateTimeFormatter.ISO_LOCAL_DATE_TIME)
-      .optionalStart().appendOffset("+HH:MM", "+00:00").optionalEnd()
-      .optionalStart().appendOffset("+HHMM", "+0000").optionalEnd()
-      .optionalStart().appendOffset("+HH", "Z").optionalEnd()
-      .toFormatter();
-
   public VectorOutput(WorkingBuffer work) {
     this.work = work;
   }
@@ -260,7 +254,7 @@ abstract class VectorOutput {
           // See the mongo specs and the Drill handler (in new JSON loader) :
           // 1. https://docs.mongodb.com/manual/reference/mongodb-extended-json
           // 2. org.apache.drill.exec.store.easy.json.values.UtcTimestampValueListener
-          Instant instant = isoDateTimeFormatter.parse(parser.getValueAsString(), Instant::from);
+          Instant instant = ISO_DATETIME_FORMATTER.parse(parser.getValueAsString(), Instant::from);
           long offset = ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000L;
           ts.writeTimeStamp(instant.toEpochMilli() + offset);
           break;
@@ -366,7 +360,7 @@ abstract class VectorOutput {
           // See the mongo specs and the Drill handler (in new JSON loader) :
           // 1. https://docs.mongodb.com/manual/reference/mongodb-extended-json
           // 2. org.apache.drill.exec.store.easy.json.values.UtcTimestampValueListener
-          Instant instant = isoDateTimeFormatter.parse(parser.getValueAsString(), Instant::from);
+          Instant instant = ISO_DATETIME_FORMATTER.parse(parser.getValueAsString(), Instant::from);
           long offset = ZoneId.systemDefault().getRules().getOffset(instant).getTotalSeconds() * 1000L;
           ts.writeTimeStamp(instant.toEpochMilli() + offset);
           break;