You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ar...@apache.org on 2019/06/29 17:02:41 UTC

[drill] branch master updated (dc865e8 -> f3d6b69)

This is an automated email from the ASF dual-hosted git repository.

arina pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from dc865e8  DRILL-7271: Refactor Metadata interfaces and classes to contain all needed information for the File based Metastore
     new e8d9b0a  DRILL-6711: Use jitpack repository for Drill Calcite project artifacts instead of repository.mapr.com
     new f3d6b69  DRILL-7310: Move schema-related classes from exec module to be able to use them in metastore module

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 common/pom.xml                                     |  2 +-
 contrib/storage-hive/core/pom.xml                  | 14 +++-
 contrib/storage-hive/hive-exec-shade/pom.xml       |  4 +-
 docs/dev/Calcite.md                                | 26 ++++++
 exec/java-exec/pom.xml                             | 25 +-----
 .../exec/physical/rowSet/impl/ColumnBuilder.java   | 13 +--
 .../drill/exec/record/BatchSchemaBuilder.java      | 57 +++++++++++++
 .../org/apache/drill/exec/record/SchemaUtil.java   | 11 +++
 .../store/parquet/BaseParquetMetadataProvider.java |  5 +-
 .../store/parquet/ParquetTableMetadataUtils.java   |  4 +-
 .../java/org/apache/drill/TestFunctionsQuery.java  |  7 +-
 .../java/org/apache/drill/TestStarQueries.java     | 19 +++--
 .../test/java/org/apache/drill/TestUnionAll.java   |  7 +-
 .../java/org/apache/drill/TestUnionDistinct.java   |  7 +-
 .../java/org/apache/drill/TestUntypedNull.java     | 19 +++--
 .../org/apache/drill/exec/TestEmptyInputSql.java   | 61 +++++++++-----
 .../drill/exec/cache/TestBatchSerialization.java   | 31 ++++---
 .../drill/exec/fn/impl/TestCastFunctions.java      |  7 +-
 .../impl/agg/TestStreamingAggEmitOutcome.java      |  2 -
 .../physical/impl/join/TestHashJoinAdvanced.java   |  7 +-
 .../physical/impl/join/TestHashJoinOutcome.java    |  4 +-
 .../impl/protocol/TestOperatorRecordBatch.java     | 32 +++++---
 .../impl/scan/BaseScanOperatorExecTest.java        |  5 +-
 .../physical/impl/scan/TestFileScanFramework.java  | 30 ++++---
 .../physical/impl/scan/TestScanBatchWriters.java   |  6 +-
 .../impl/scan/TestScanOperExecSmoothing.java       |  5 +-
 .../impl/scan/TestScanOrchestratorEarlySchema.java | 24 +++---
 .../impl/scan/TestScanOrchestratorLateSchema.java  |  5 +-
 .../impl/scan/TestScanOrchestratorMetadata.java    | 17 ++--
 .../scan/project/TestConstantColumnLoader.java     | 10 +--
 .../impl/scan/project/TestRowBatchMerger.java      | 46 +++++------
 .../impl/svremover/AbstractGenericCopierTest.java  |  7 +-
 .../physical/impl/validate/TestBatchValidator.java | 42 +++++-----
 .../exec/physical/impl/xsort/TestExternalSort.java | 26 +++---
 .../physical/impl/xsort/managed/TestCopier.java    |  7 +-
 .../impl/xsort/managed/TestShortArrays.java        |  6 +-
 .../physical/impl/xsort/managed/TestSortImpl.java  |  3 +-
 .../physical/impl/xsort/managed/TestSorter.java    | 14 ++--
 .../impl/TestResultSetLoaderOmittedValues.java     |  5 +-
 .../rowSet/impl/TestResultSetLoaderProjection.java | 13 ++-
 .../rowSet/impl/TestResultSetSchemaChange.java     | 20 +++--
 .../drill/exec/physical/unit/TestMiniPlan.java     | 19 +++--
 .../exec/physical/unit/TestNullInputMiniPlan.java  | 69 ++++++++++------
 .../drill/exec/record/TestRecordBatchSizer.java    | 95 +++++++++++++++-------
 .../drill/exec/record/TestVectorContainer.java     | 13 +--
 .../exec/record/metadata/TestTupleSchema.java      |  6 +-
 .../apache/drill/exec/record/vector/TestLoad.java  | 76 +++++++++++------
 .../drill/exec/store/TestImplicitFileColumns.java  | 19 +++--
 .../drill/exec/store/httpd/TestHTTPDLogReader.java |  6 +-
 .../apache/drill/exec/store/log/TestLogReader.java | 54 ++++++------
 .../store/parquet/TestParquetMetadataCache.java    |  9 +-
 .../java/org/apache/drill/test/ClientFixture.java  |  7 +-
 .../java/org/apache/drill/test/ExampleTest.java    |  6 +-
 .../rowSet/test/TestRepeatedListAccessors.java     | 21 +++--
 .../test/rowSet/test/TestVariantAccessors.java     |  5 +-
 exec/jdbc-all/pom.xml                              |  8 +-
 exec/vector/pom.xml                                | 21 +++++
 .../record/metadata/schema/parser/SchemaLexer.g4   |  0
 .../record/metadata/schema/parser/SchemaParser.g4  |  0
 .../record/metadata/AbstractColumnMetadata.java    |  0
 .../drill/exec/record/metadata/ColumnBuilder.java  |  0
 .../drill/exec/record/metadata/MapBuilder.java     |  0
 .../exec/record/metadata/MapColumnMetadata.java    |  0
 .../drill/exec/record/metadata/MetadataUtils.java  | 45 ----------
 .../record/metadata/PrimitiveColumnMetadata.java   |  6 +-
 .../exec/record/metadata/RepeatedListBuilder.java  |  0
 .../metadata/RepeatedListColumnMetadata.java       |  0
 .../drill/exec/record/metadata/SchemaBuilder.java  | 23 ------
 .../exec/record/metadata/SchemaContainer.java      |  0
 .../drill/exec/record/metadata/TupleBuilder.java   |  6 --
 .../drill/exec/record/metadata/TupleSchema.java    | 10 ---
 .../drill/exec/record/metadata/UnionBuilder.java   |  0
 .../record/metadata/VariantColumnMetadata.java     |  0
 .../drill/exec/record/metadata/VariantSchema.java  |  0
 .../drill/exec/record/metadata/package-info.java   |  2 +-
 .../metadata/schema/parser/SchemaExprParser.java   |  0
 .../schema/parser/SchemaParsingException.java      |  0
 .../metadata/schema/parser/SchemaVisitor.java      |  0
 .../schema/parser/UpperCaseCharStream.java         |  0
 .../record/metadata/TestMetadataProperties.java    |  0
 .../schema/parser/TestParserErrorHandling.java     |  0
 .../metadata/schema/parser/TestSchemaParser.java   |  0
 logical/pom.xml                                    |  2 +-
 .../drill/metastore/util/SchemaPathUtils.java      | 37 +++++++++
 pom.xml                                            | 35 ++------
 85 files changed, 732 insertions(+), 523 deletions(-)
 create mode 100644 docs/dev/Calcite.md
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchemaBuilder.java
 rename exec/{java-exec => vector}/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaLexer.g4 (100%)
 rename exec/{java-exec => vector}/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4 (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/ColumnBuilder.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java (78%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java (98%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListBuilder.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListColumnMetadata.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java (91%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/SchemaContainer.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/TupleBuilder.java (95%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java (94%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/UnionBuilder.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/VariantColumnMetadata.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/VariantSchema.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/package-info.java (90%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaParsingException.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java (100%)
 rename exec/{java-exec => vector}/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/UpperCaseCharStream.java (100%)
 rename exec/{java-exec => vector}/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java (100%)
 rename exec/{java-exec => vector}/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java (100%)
 rename exec/{java-exec => vector}/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java (100%)


[drill] 02/02: DRILL-7310: Move schema-related classes from exec module to be able to use them in metastore module

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit f3d6b69a6e7a86f51e74f84c99c84aab58ed663a
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Thu Jun 27 20:49:20 2019 +0300

    DRILL-7310: Move schema-related classes from exec module to be able to use them in metastore module
    
    closes #1816
---
 exec/java-exec/pom.xml                             | 21 -----
 .../exec/physical/rowSet/impl/ColumnBuilder.java   | 13 +--
 .../drill/exec/record/BatchSchemaBuilder.java      | 57 +++++++++++++
 .../org/apache/drill/exec/record/SchemaUtil.java   | 11 +++
 .../store/parquet/BaseParquetMetadataProvider.java |  5 +-
 .../store/parquet/ParquetTableMetadataUtils.java   |  4 +-
 .../java/org/apache/drill/TestFunctionsQuery.java  |  7 +-
 .../java/org/apache/drill/TestStarQueries.java     | 19 +++--
 .../test/java/org/apache/drill/TestUnionAll.java   |  7 +-
 .../java/org/apache/drill/TestUnionDistinct.java   |  7 +-
 .../java/org/apache/drill/TestUntypedNull.java     | 19 +++--
 .../org/apache/drill/exec/TestEmptyInputSql.java   | 61 +++++++++-----
 .../drill/exec/cache/TestBatchSerialization.java   | 31 ++++---
 .../drill/exec/fn/impl/TestCastFunctions.java      |  7 +-
 .../impl/agg/TestStreamingAggEmitOutcome.java      |  2 -
 .../physical/impl/join/TestHashJoinAdvanced.java   |  7 +-
 .../physical/impl/join/TestHashJoinOutcome.java    |  4 +-
 .../impl/protocol/TestOperatorRecordBatch.java     | 32 +++++---
 .../impl/scan/BaseScanOperatorExecTest.java        |  5 +-
 .../physical/impl/scan/TestFileScanFramework.java  | 30 ++++---
 .../physical/impl/scan/TestScanBatchWriters.java   |  6 +-
 .../impl/scan/TestScanOperExecSmoothing.java       |  5 +-
 .../impl/scan/TestScanOrchestratorEarlySchema.java | 24 +++---
 .../impl/scan/TestScanOrchestratorLateSchema.java  |  5 +-
 .../impl/scan/TestScanOrchestratorMetadata.java    | 17 ++--
 .../scan/project/TestConstantColumnLoader.java     | 10 +--
 .../impl/scan/project/TestRowBatchMerger.java      | 46 +++++------
 .../impl/svremover/AbstractGenericCopierTest.java  |  7 +-
 .../physical/impl/validate/TestBatchValidator.java | 42 +++++-----
 .../exec/physical/impl/xsort/TestExternalSort.java | 26 +++---
 .../physical/impl/xsort/managed/TestCopier.java    |  7 +-
 .../impl/xsort/managed/TestShortArrays.java        |  6 +-
 .../physical/impl/xsort/managed/TestSortImpl.java  |  3 +-
 .../physical/impl/xsort/managed/TestSorter.java    | 14 ++--
 .../impl/TestResultSetLoaderOmittedValues.java     |  5 +-
 .../rowSet/impl/TestResultSetLoaderProjection.java | 13 ++-
 .../rowSet/impl/TestResultSetSchemaChange.java     | 20 +++--
 .../drill/exec/physical/unit/TestMiniPlan.java     | 19 +++--
 .../exec/physical/unit/TestNullInputMiniPlan.java  | 69 ++++++++++------
 .../drill/exec/record/TestRecordBatchSizer.java    | 95 +++++++++++++++-------
 .../drill/exec/record/TestVectorContainer.java     | 13 +--
 .../exec/record/metadata/TestTupleSchema.java      |  6 +-
 .../apache/drill/exec/record/vector/TestLoad.java  | 76 +++++++++++------
 .../drill/exec/store/TestImplicitFileColumns.java  | 19 +++--
 .../drill/exec/store/httpd/TestHTTPDLogReader.java |  6 +-
 .../apache/drill/exec/store/log/TestLogReader.java | 54 ++++++------
 .../store/parquet/TestParquetMetadataCache.java    |  9 +-
 .../java/org/apache/drill/test/ClientFixture.java  |  7 +-
 .../java/org/apache/drill/test/ExampleTest.java    |  6 +-
 .../rowSet/test/TestRepeatedListAccessors.java     | 21 +++--
 .../test/rowSet/test/TestVariantAccessors.java     |  5 +-
 exec/vector/pom.xml                                | 21 +++++
 .../record/metadata/schema/parser/SchemaLexer.g4   |  0
 .../record/metadata/schema/parser/SchemaParser.g4  |  0
 .../record/metadata/AbstractColumnMetadata.java    |  0
 .../drill/exec/record/metadata/ColumnBuilder.java  |  0
 .../drill/exec/record/metadata/MapBuilder.java     |  0
 .../exec/record/metadata/MapColumnMetadata.java    |  0
 .../drill/exec/record/metadata/MetadataUtils.java  | 45 ----------
 .../record/metadata/PrimitiveColumnMetadata.java   |  6 +-
 .../exec/record/metadata/RepeatedListBuilder.java  |  0
 .../metadata/RepeatedListColumnMetadata.java       |  0
 .../drill/exec/record/metadata/SchemaBuilder.java  | 23 ------
 .../exec/record/metadata/SchemaContainer.java      |  0
 .../drill/exec/record/metadata/TupleBuilder.java   |  6 --
 .../drill/exec/record/metadata/TupleSchema.java    | 10 ---
 .../drill/exec/record/metadata/UnionBuilder.java   |  0
 .../record/metadata/VariantColumnMetadata.java     |  0
 .../drill/exec/record/metadata/VariantSchema.java  |  0
 .../drill/exec/record/metadata/package-info.java   |  2 +-
 .../metadata/schema/parser/SchemaExprParser.java   |  0
 .../schema/parser/SchemaParsingException.java      |  0
 .../metadata/schema/parser/SchemaVisitor.java      |  0
 .../schema/parser/UpperCaseCharStream.java         |  0
 .../record/metadata/TestMetadataProperties.java    |  0
 .../schema/parser/TestParserErrorHandling.java     |  0
 .../metadata/schema/parser/TestSchemaParser.java   |  0
 .../drill/metastore/util/SchemaPathUtils.java      | 37 +++++++++
 78 files changed, 675 insertions(+), 485 deletions(-)

diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 4e42616..b2a1899 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -547,10 +547,6 @@
       <groupId>sqlline</groupId>
       <artifactId>sqlline</artifactId>
     </dependency>
-    <dependency>
-      <groupId>org.antlr</groupId>
-      <artifactId>antlr4-runtime</artifactId>
-    </dependency>
   </dependencies>
 
   <profiles>
@@ -800,23 +796,6 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
       </plugin>
-      <plugin>
-        <groupId>org.antlr</groupId>
-        <artifactId>antlr4-maven-plugin</artifactId>
-        <version>${antlr.version}</version>
-        <configuration>
-          <listener>false</listener>
-          <visitor>true</visitor>
-          <outputDirectory>${project.build.directory}/generated-sources</outputDirectory>
-        </configuration>
-        <executions>
-          <execution>
-            <goals>
-              <goal>antlr4</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java
index de66a38..fc7b837 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/rowSet/impl/ColumnBuilder.java
@@ -119,8 +119,8 @@ public class ColumnBuilder {
    * a vector, a writer, and the column state which binds the two together
    * and manages the column.
    *
-   * @param columnSchema schema of the new primitive column
-   * @param projType implied projection type for the column
+   * @param parent schema of the new primitive column
+   * @param colProj implied projection type for the column
    * @return column state for the new column
    */
 
@@ -179,7 +179,8 @@ public class ColumnBuilder {
    * here, instead we create a tuple state to hold the columns, and defer the
    * map vector (or vector container) until harvest time.
    *
-   * @param columnSchema description of the map column
+   * @param parent description of the map column
+   * @param colProj implied projection type for the column
    * @return column state for the map column
    */
 
@@ -295,7 +296,7 @@ public class ColumnBuilder {
    * does so. Unions are fully tested in the row set writer mechanism.
    *
    * @param parent container
-   * @param columnSchema column schema
+   * @param colProj column schema
    * @return column
    */
   private ColumnState buildUnion(ContainerState parent, ColumnReadProjection colProj) {
@@ -362,7 +363,7 @@ public class ColumnBuilder {
    * not support the <tt>ListVector</tt> type.
    *
    * @param parent the parent (tuple, union or list) that holds this list
-   * @param columnSchema metadata description of the list which must contain
+   * @param colProj metadata description of the list which must contain
    * exactly one subtype
    * @return the column state for the list
    */
@@ -421,7 +422,7 @@ public class ColumnBuilder {
    * not support the <tt>ListVector</tt> type.
    *
    * @param parent the parent (tuple, union or list) that holds this list
-   * @param columnSchema metadata description of the list (must be empty of
+   * @param colProj metadata description of the list (must be empty of
    * subtypes)
    * @return the column state for the list
    */
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchemaBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchemaBuilder.java
new file mode 100644
index 0000000..e21e818
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/BatchSchemaBuilder.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.record;
+
+import org.apache.drill.exec.record.metadata.SchemaBuilder;
+
+public class BatchSchemaBuilder {
+  private BatchSchema.SelectionVectorMode svMode = BatchSchema.SelectionVectorMode.NONE;
+  private SchemaBuilder schemaBuilder;
+
+  public BatchSchemaBuilder() {
+  }
+
+  /**
+   * Create a new schema starting with the base schema. Allows appending
+   * additional columns to an additional schema.
+   */
+  public BatchSchemaBuilder(BatchSchema baseSchema) {
+    schemaBuilder = new SchemaBuilder();
+    for (MaterializedField field : baseSchema) {
+      schemaBuilder.add(field);
+    }
+  }
+
+  public BatchSchemaBuilder withSVMode(BatchSchema.SelectionVectorMode svMode) {
+    this.svMode = svMode;
+    return this;
+  }
+
+  public BatchSchemaBuilder withSchemaBuilder(SchemaBuilder schemaBuilder) {
+    this.schemaBuilder = schemaBuilder;
+    return this;
+  }
+
+  public SchemaBuilder schemaBuilder() {
+    return schemaBuilder;
+  }
+
+  public BatchSchema build() {
+    return new BatchSchema(svMode, schemaBuilder.buildSchema().toFieldList());
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
index 0deee65..56a781e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
@@ -29,6 +29,9 @@ import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.ops.OperatorContext;
+import org.apache.drill.exec.record.metadata.MetadataUtils;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
+import org.apache.drill.exec.record.metadata.TupleSchema;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.complex.UnionVector;
 
@@ -176,4 +179,12 @@ public class SchemaUtil {
     Preconditions.checkState(vectorMap.size() == 0, "Leftover vector from incoming batch");
     return c;
   }
+
+  public static TupleMetadata fromBatchSchema(BatchSchema batchSchema) {
+    TupleSchema tuple = new TupleSchema();
+    for (MaterializedField field : batchSchema) {
+      tuple.add(MetadataUtils.fromView(field));
+    }
+    return tuple;
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/BaseParquetMetadataProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/BaseParquetMetadataProvider.java
index ac6190b..cc5119a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/BaseParquetMetadataProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/BaseParquetMetadataProvider.java
@@ -26,7 +26,6 @@ import org.apache.drill.metastore.metadata.TableInfo;
 import org.apache.drill.metastore.statistics.TableStatisticsKind;
 import org.apache.drill.metastore.statistics.Statistic;
 import org.apache.drill.exec.planner.common.DrillStatsTable;
-import org.apache.drill.exec.record.metadata.MetadataUtils;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.metastore.metadata.BaseMetadata;
 import org.apache.drill.metastore.metadata.NonInterestingColumnsMetadata;
@@ -212,12 +211,12 @@ public abstract class BaseParquetMetadataProvider implements ParquetMetadataProv
 
       if (this.schema == null) {
         schema = new TupleSchema();
-        fields.forEach((schemaPath, majorType) -> MetadataUtils.addColumnMetadata(schema, schemaPath, majorType));
+        fields.forEach((schemaPath, majorType) -> SchemaPathUtils.addColumnMetadata(schema, schemaPath, majorType));
       } else {
         // merges specified schema with schema from table
         fields.forEach((schemaPath, majorType) -> {
           if (SchemaPathUtils.getColumnMetadata(schemaPath, schema) == null) {
-            MetadataUtils.addColumnMetadata(schema, schemaPath, majorType);
+            SchemaPathUtils.addColumnMetadata(schema, schemaPath, majorType);
           }
         });
       }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetTableMetadataUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetTableMetadataUtils.java
index 4f12230..b4d2bf2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetTableMetadataUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetTableMetadataUtils.java
@@ -25,7 +25,6 @@ import org.apache.drill.metastore.metadata.MetadataType;
 import org.apache.drill.metastore.metadata.TableInfo;
 import org.apache.drill.metastore.statistics.TableStatisticsKind;
 import org.apache.drill.metastore.statistics.Statistic;
-import org.apache.drill.exec.record.metadata.MetadataUtils;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.record.metadata.TupleSchema;
 import org.apache.drill.exec.resolver.TypeCastRules;
@@ -42,6 +41,7 @@ import org.apache.drill.metastore.metadata.NonInterestingColumnsMetadata;
 import org.apache.drill.metastore.metadata.PartitionMetadata;
 import org.apache.drill.metastore.metadata.RowGroupMetadata;
 import org.apache.drill.metastore.statistics.StatisticsHolder;
+import org.apache.drill.metastore.util.SchemaPathUtils;
 import org.apache.drill.metastore.util.TableMetadataUtils;
 import org.apache.drill.metastore.statistics.ExactStatisticsConstants;
 import org.apache.drill.exec.expr.StatisticsProvider;
@@ -149,7 +149,7 @@ public class ParquetTableMetadataUtils {
     Map<SchemaPath, TypeProtos.MajorType> columns = getRowGroupFields(tableMetadata, rowGroupMetadata);
 
     TupleSchema schema = new TupleSchema();
-    columns.forEach((schemaPath, majorType) -> MetadataUtils.addColumnMetadata(schema, schemaPath, majorType));
+    columns.forEach((schemaPath, majorType) -> SchemaPathUtils.addColumnMetadata(schema, schemaPath, majorType));
 
     MetadataInfo metadataInfo = new MetadataInfo(MetadataType.ROW_GROUP, MetadataInfo.GENERAL_INFO_KEY, null);
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
index ae08d92..d22bdab 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
@@ -33,6 +33,7 @@ import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.test.BaseTestQuery;
 import org.hamcrest.CoreMatchers;
@@ -998,8 +999,10 @@ public class TestFunctionsQuery extends BaseTestQuery {
         "isdate(employee_id)",
         "NOT (employee_id IS NULL)");
 
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .add("col1", TypeProtos.MinorType.BIT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .add("col1", TypeProtos.MinorType.BIT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     for (String condition : conditions) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index 8b062c7..d3cf0d8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -23,6 +23,7 @@ import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.BeforeClass;
@@ -524,11 +525,13 @@ public class TestStarQueries extends BaseTestQuery {
   @Test // DRILL-5845
   public void testSchemaForStarOrderByLimit() throws Exception {
     final String query = "select * from cp.`tpch/nation.parquet` order by n_name limit 1";
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("n_nationkey", TypeProtos.MinorType.INT)
-        .add("n_name",TypeProtos.MinorType.VARCHAR)
+        .add("n_name", TypeProtos.MinorType.VARCHAR)
         .add("n_regionkey", TypeProtos.MinorType.INT)
-        .add("n_comment", TypeProtos.MinorType.VARCHAR)
+        .add("n_comment", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -540,11 +543,13 @@ public class TestStarQueries extends BaseTestQuery {
 
   @Test // DRILL-5822
   public void testSchemaForParallelizedStarOrderBy() throws Exception {
-    final String query = "select * from cp.`tpch/region.parquet` order by r_name";
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    String query = "select * from cp.`tpch/region.parquet` order by r_name";
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("r_regionkey", TypeProtos.MinorType.INT)
-        .add("r_name",TypeProtos.MinorType.VARCHAR)
-        .add("r_comment", TypeProtos.MinorType.VARCHAR)
+        .add("r_name", TypeProtos.MinorType.VARCHAR)
+        .add("r_comment", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
index c89e730..3523bdf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 
 import org.apache.commons.lang3.tuple.Pair;
@@ -1250,8 +1251,10 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testUnionAllBothEmptyDirs() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
-        .addNullable("key", TypeProtos.MinorType.INT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .addNullable("key", TypeProtos.MinorType.INT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java b/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java
index bde7459..d3ec4e5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 
 import org.apache.commons.io.FileUtils;
@@ -826,8 +827,10 @@ public class TestUnionDistinct extends BaseTestQuery {
 
   @Test
   public void testUnionBothEmptyDirs() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
-        .addNullable("key", TypeProtos.MinorType.INT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .addNullable("key", TypeProtos.MinorType.INT);
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java b/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java
index 521531c..edadcb2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUntypedNull.java
@@ -22,6 +22,7 @@ import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
@@ -115,8 +116,10 @@ public class TestUntypedNull extends ClusterTest {
   @Test
   public void testCoalesceOnNotExistentColumns() throws Exception {
     String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` limit 5";
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .add("coal", UNTYPED_NULL_TYPE)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .add("coal", UNTYPED_NULL_TYPE);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -135,8 +138,10 @@ public class TestUntypedNull extends ClusterTest {
   @Test
   public void testCoalesceOnNotExistentColumnsWithGroupBy() throws Exception {
     String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` group by 1";
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .add("coal", UNTYPED_NULL_TYPE)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .add("coal", UNTYPED_NULL_TYPE);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -155,8 +160,10 @@ public class TestUntypedNull extends ClusterTest {
   @Test
   public void testCoalesceOnNotExistentColumnsWithOrderBy() throws Exception {
     String query = "select coalesce(unk1, unk2) as coal from cp.`tpch/nation.parquet` order by 1 limit 5";
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .add("coal", UNTYPED_NULL_TYPE)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .add("coal", UNTYPED_NULL_TYPE);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
index b2a8357..8460152 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec;
 
 import org.apache.drill.PlanTestBase;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
@@ -55,9 +56,11 @@ public class TestEmptyInputSql extends BaseTestQuery {
    */
   @Test
   public void testQueryEmptyJson() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
-        .addNullable("key", TypeProtos.MinorType.INT)
-        .addNullable("key2", TypeProtos.MinorType.INT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+      .addNullable("key", TypeProtos.MinorType.INT)
+      .addNullable("key2", TypeProtos.MinorType.INT);
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -73,7 +76,8 @@ public class TestEmptyInputSql extends BaseTestQuery {
    */
   @Test
   public void testQueryStarColEmptyJson() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(new SchemaBuilder())
         .build();
 
     testBuilder()
@@ -113,10 +117,12 @@ public class TestEmptyInputSql extends BaseTestQuery {
 
   @Test
   public void testQueryMapArrayEmptyJson() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("col1", TypeProtos.MinorType.INT)
         .addNullable("col2", TypeProtos.MinorType.INT)
-        .addNullable("col3", TypeProtos.MinorType.INT)
+        .addNullable("col3", TypeProtos.MinorType.INT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -136,10 +142,12 @@ public class TestEmptyInputSql extends BaseTestQuery {
   public void testQueryConstExprEmptyJson() throws Exception {
     try {
       alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, false);
-      BatchSchema expectedSchema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("key", TypeProtos.MinorType.FLOAT8)
           .add("name", TypeProtos.MinorType.VARCHAR, 100)
-          .addNullable("name2", TypeProtos.MinorType.VARCHAR, 100)
+          .addNullable("name2", TypeProtos.MinorType.VARCHAR, 100);
+      BatchSchema expectedSchema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
 
       testBuilder()
@@ -152,7 +160,7 @@ public class TestEmptyInputSql extends BaseTestQuery {
           .run();
 
       alterSession(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true);
-      expectedSchema = new SchemaBuilder()
+      schemaBuilder = new SchemaBuilder()
           .add("key",
               TypeProtos.MajorType.newBuilder()
                   .setMinorType(TypeProtos.MinorType.VARDECIMAL)
@@ -161,7 +169,9 @@ public class TestEmptyInputSql extends BaseTestQuery {
                   .setScale(1)
                   .build())
           .add("name", TypeProtos.MinorType.VARCHAR, 100)
-          .addNullable("name2", TypeProtos.MinorType.VARCHAR, 100)
+          .addNullable("name2", TypeProtos.MinorType.VARCHAR, 100);
+      expectedSchema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
 
       testBuilder()
@@ -182,7 +192,8 @@ public class TestEmptyInputSql extends BaseTestQuery {
    */
   @Test
   public void testQueryEmptyCsvH() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(new SchemaBuilder())
         .build();
 
     testBuilder()
@@ -198,8 +209,10 @@ public class TestEmptyInputSql extends BaseTestQuery {
    */
   @Test
   public void testQueryEmptyCsv() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
-        .addArray("columns", TypeProtos.MinorType.VARCHAR)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+      .addArray("columns", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -211,7 +224,9 @@ public class TestEmptyInputSql extends BaseTestQuery {
 
   @Test
   public void testEmptyDirectory() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder().build();
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(new SchemaBuilder())
+        .build();
 
     testBuilder()
         .sqlQuery("select * from dfs.tmp.`%s`", EMPTY_DIR_NAME)
@@ -222,8 +237,10 @@ public class TestEmptyInputSql extends BaseTestQuery {
 
   @Test
   public void testEmptyDirectoryAndFieldInQuery() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
-        .addNullable("key", TypeProtos.MinorType.INT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .addNullable("key", TypeProtos.MinorType.INT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -235,9 +252,11 @@ public class TestEmptyInputSql extends BaseTestQuery {
 
   @Test
   public void testRenameProjectEmptyDirectory() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("WeekId", TypeProtos.MinorType.INT)
-        .addNullable("ProductName", TypeProtos.MinorType.INT)
+        .addNullable("ProductName", TypeProtos.MinorType.INT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -250,9 +269,11 @@ public class TestEmptyInputSql extends BaseTestQuery {
 
   @Test
   public void testRenameProjectEmptyJson() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("WeekId", TypeProtos.MinorType.INT)
-        .addNullable("ProductName", TypeProtos.MinorType.INT)
+        .addNullable("ProductName", TypeProtos.MinorType.INT);
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestBatchSerialization.java b/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestBatchSerialization.java
index 4eb9016..9f41d2d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestBatchSerialization.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/cache/TestBatchSerialization.java
@@ -31,9 +31,9 @@ import java.nio.file.StandardOpenOption;
 
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.cache.VectorSerializer.Reader;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.record.selection.SelectionVector2;
 import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.DirTestWatcher;
@@ -56,7 +56,7 @@ public class TestBatchSerialization extends DrillTest {
   public static OperatorFixture fixture;
 
   @BeforeClass
-  public static void setUpBeforeClass() throws Exception {
+  public static void setUpBeforeClass() {
     fixture = OperatorFixture.builder(dirTestWatcher).build();
   }
 
@@ -65,7 +65,7 @@ public class TestBatchSerialization extends DrillTest {
     fixture.close();
   }
 
-  public SingleRowSet makeRowSet(BatchSchema schema, int rowCount) {
+  public SingleRowSet makeRowSet(TupleMetadata schema, int rowCount) {
     ExtendableRowSet rowSet = fixture.rowSet(schema);
     RowSetWriter writer = rowSet.writer(rowCount);
     for (int i = 0; i < rowCount; i++) {
@@ -76,7 +76,7 @@ public class TestBatchSerialization extends DrillTest {
     return rowSet;
   }
 
-  public SingleRowSet makeNullableRowSet(BatchSchema schema, int rowCount) {
+  public SingleRowSet makeNullableRowSet(TupleMetadata schema, int rowCount) {
     ExtendableRowSet rowSet = fixture.rowSet(schema);
     RowSetWriter writer = rowSet.writer(rowCount);
     for (int i = 0; i < rowCount; i++) {
@@ -97,18 +97,18 @@ public class TestBatchSerialization extends DrillTest {
   }
 
   public void testNonNullType(MinorType type) throws IOException {
-    BatchSchema schema = new SchemaBuilder( )
+    TupleMetadata schema = new SchemaBuilder()
         .add("col", type)
-        .build();
+        .buildSchema();
     int rowCount = 20;
     verifySerialize(makeRowSet(schema, rowCount),
                     makeRowSet(schema, rowCount));
   }
 
   public void testNullableType(MinorType type) throws IOException {
-    BatchSchema schema = new SchemaBuilder( )
+    TupleMetadata schema = new SchemaBuilder()
         .addNullable("col", type)
-        .build();
+        .buildSchema();
     int rowCount = 20;
     verifySerialize(makeNullableRowSet(schema, rowCount),
                     makeNullableRowSet(schema, rowCount));
@@ -179,7 +179,7 @@ public class TestBatchSerialization extends DrillTest {
     testType(MinorType.INTERVALDAY);
   }
 
-  private SingleRowSet buildMapSet(BatchSchema schema) {
+  private SingleRowSet buildMapSet(TupleMetadata schema) {
     return fixture.rowSetBuilder(schema)
         .addRow(1, objArray(100, "first"))
         .addRow(2, objArray(200, "second"))
@@ -187,7 +187,7 @@ public class TestBatchSerialization extends DrillTest {
         .build();
   }
 
-  private SingleRowSet buildArraySet(BatchSchema schema) {
+  private SingleRowSet buildArraySet(TupleMetadata schema) {
     return fixture.rowSetBuilder(schema)
         .addRow(1, strArray("first, second, third"))
         .addRow(2, null)
@@ -197,19 +197,16 @@ public class TestBatchSerialization extends DrillTest {
 
   /**
    * Tests a map type and an SV2.
-   *
-   * @throws IOException
    */
-
   @Test
   public void testMap() throws IOException {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("top", MinorType.INT)
         .addMap("map")
           .add("key", MinorType.INT)
           .add("value", MinorType.VARCHAR)
           .resumeSchema()
-        .build();
+        .buildSchema();
 
     verifySerialize(buildMapSet(schema).toIndirect(),
                     buildMapSet(schema));
@@ -217,10 +214,10 @@ public class TestBatchSerialization extends DrillTest {
 
   @Test
   public void testArray() throws IOException {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("top", MinorType.INT)
         .addArray("arr", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     verifySerialize(buildArraySet(schema).toIndirect(),
                     buildArraySet(schema));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
index fc06cb1..6efbb19 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCastFunctions.java
@@ -35,6 +35,7 @@ import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.vector.IntervalDayVector;
 import org.apache.drill.exec.vector.IntervalYearVector;
@@ -797,8 +798,10 @@ public class TestCastFunctions extends ClusterTest {
       String q = String.format(query, entry.getKey());
 
       MaterializedField field = MaterializedField.create("coal", entry.getValue());
-      BatchSchema expectedSchema = new SchemaBuilder()
-          .add(field)
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
+          .add(field);
+      BatchSchema expectedSchema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
 
       // Validate schema
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java
index 0e25fe1..19cb288 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java
@@ -24,7 +24,6 @@ import org.apache.drill.exec.physical.config.StreamingAggregate;
 import org.apache.drill.exec.physical.impl.BaseTestOpBatchEmitOutcome;
 import org.apache.drill.exec.physical.impl.MockRecordBatch;
 import org.apache.drill.exec.physical.impl.aggregate.StreamingAggBatch;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -1483,7 +1482,6 @@ public class TestStreamingAggEmitOutcome extends BaseTestOpBatchEmitOutcome {
       .add("id_left", TypeProtos.MinorType.INT)
       .add("cost_left", TypeProtos.MinorType.INT)
       .add("name_left", TypeProtos.MinorType.VARCHAR)
-      .withSVMode(BatchSchema.SelectionVectorMode.TWO_BYTE)
       .buildSchema();
 
     final RowSet.SingleRowSet emptyRowSet_Sv2 = operatorFixture.rowSetBuilder(inputSchema_sv2)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
index da51c95..82f8e47 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinAdvanced.java
@@ -24,6 +24,7 @@ import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.proto.UserBitShared;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.RecordBatchLoader;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.test.BaseTestQuery;
@@ -232,10 +233,12 @@ public class TestHashJoinAdvanced extends JoinTestBase {
       batchLoader.load(queryDataBatch.getHeader().getDef(), queryDataBatch.getData());
 
       final BatchSchema actualSchema = batchLoader.getSchema();
-      final BatchSchema expectedSchema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("l_quantity", TypeProtos.MinorType.FLOAT8, TypeProtos.DataMode.REQUIRED)
         .add("l_shipdate", TypeProtos.MinorType.DATE, TypeProtos.DataMode.REQUIRED)
-        .add("o_custkey", TypeProtos.MinorType.INT, TypeProtos.DataMode.REQUIRED)
+        .add("o_custkey", TypeProtos.MinorType.INT, TypeProtos.DataMode.REQUIRED);
+      final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
       Assert.assertTrue(expectedSchema.isEquivalent(actualSchema));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinOutcome.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinOutcome.java
index 429bb70..57dbfd8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinOutcome.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestHashJoinOutcome.java
@@ -85,11 +85,11 @@ public class TestHashJoinOutcome extends PhysicalOpUnitTestBase {
     inputSchemaRight = (TupleSchema) new SchemaBuilder()
       .add("rightcol", TypeProtos.MinorType.INT)
       .buildSchema();
-    batchSchemaRight = inputSchemaRight.toBatchSchema(BatchSchema.SelectionVectorMode.NONE);
+    batchSchemaRight = new BatchSchema(BatchSchema.SelectionVectorMode.NONE, inputSchemaRight.toFieldList());
     inputSchemaLeft = (TupleSchema) new SchemaBuilder()
       .add("leftcol", TypeProtos.MinorType.INT)
       .buildSchema();
-    batchSchemaLeft = inputSchemaLeft.toBatchSchema(BatchSchema.SelectionVectorMode.NONE);
+    batchSchemaLeft = new BatchSchema(BatchSchema.SelectionVectorMode.NONE, inputSchemaLeft.toFieldList());
   }
 
   private void prepareUninitContainers(List<VectorContainer> emptyInputContainers,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
index 89df598..d9d27c4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/protocol/TestOperatorRecordBatch.java
@@ -42,10 +42,12 @@ import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch.IterOutcome;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.VectorWrapper;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.VarCharVector;
 import org.apache.drill.test.SubOperatorTest;
@@ -114,10 +116,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
         return false;
       }
       if (nextCount == schemaChangeAt) {
-        BatchSchema newSchema = new SchemaBuilder(batchAccessor.getSchema())
-            .add("b", MinorType.VARCHAR)
-            .build();
-        VectorContainer newContainer = new VectorContainer(fixture.allocator(), newSchema);
+        BatchSchemaBuilder newSchema = new BatchSchemaBuilder(batchAccessor.getSchema());
+        newSchema.schemaBuilder()
+            .add("b", MinorType.VARCHAR);
+        VectorContainer newContainer = new VectorContainer(fixture.allocator(), newSchema.build());
         batchAccessor.setContainer(newContainer);
       }
       return true;
@@ -134,8 +136,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
   }
 
   private static VectorContainer mockBatch() {
-    VectorContainer container = new VectorContainer(fixture.allocator(), new SchemaBuilder()
-        .add("a", MinorType.INT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+      .add("a", MinorType.INT);
+    VectorContainer container = new VectorContainer(fixture.allocator(), new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build());
     container.buildSchema(SelectionVectorMode.NONE);
     return container;
@@ -349,9 +353,11 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
   @Test
   public void testBatchAccessor() {
-    BatchSchema schema = new SchemaBuilder()
-        .add("a", MinorType.INT)
-        .add("b", MinorType.VARCHAR)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+      .add("a", MinorType.INT)
+      .add("b", MinorType.VARCHAR);
+    BatchSchema schema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
     SingleRowSet rs = fixture.rowSetBuilder(schema)
         .addRow(10, "fred")
@@ -419,10 +425,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
   @Test
   public void testSchemaChange() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet rs = fixture.rowSetBuilder(schema)
         .addRow(10, "fred")
         .addRow(20, "wilma")
@@ -508,10 +514,10 @@ public class TestOperatorRecordBatch extends SubOperatorTest {
 
   @Test
   public void testSv2() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet rs = fixture.rowSetBuilder(schema)
         .addRow(10, "fred")
         .addRow(20, "wilma")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/BaseScanOperatorExecTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/BaseScanOperatorExecTest.java
index fe4a8b8..0e21092 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/BaseScanOperatorExecTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/BaseScanOperatorExecTest.java
@@ -30,7 +30,6 @@ import org.apache.drill.exec.physical.impl.scan.framework.ManagedReader;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
 import org.apache.drill.exec.physical.rowSet.ResultSetLoader;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -128,10 +127,10 @@ public class BaseScanOperatorExecTest extends SubOperatorTest {
   }
 
   protected SingleRowSet makeExpected(int offset) {
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addNullable("b", MinorType.VARCHAR, 10)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(offset + 10, "fred")
         .addRow(offset + 20, "wilma")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
index 0b3f9c8..4ade742 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestFileScanFramework.java
@@ -41,6 +41,7 @@ import org.apache.drill.exec.physical.rowSet.ResultSetLoader;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.ColumnBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -357,18 +358,20 @@ public class TestFileScanFramework extends SubOperatorTest {
     // Select table and implicit columns.
 
     FileScanFixtureBuilder builder = new FileScanFixtureBuilder();
-    builder.setProjection(new String[] {"a", "b", "filename", "suffix"});
+    builder.setProjection("a", "b", "filename", "suffix");
     builder.addReader(reader);
     ScanFixture scanFixture = builder.build();
     ScanOperatorExec scan = scanFixture.scanOp;
 
     // Expect data and implicit columns
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addNullable("b", MinorType.VARCHAR, 10)
         .add("filename", MinorType.VARCHAR)
-        .add("suffix", MinorType.VARCHAR)
+        .add("suffix", MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(10, "fred", MOCK_FILE_NAME, MOCK_SUFFIX)
@@ -411,19 +414,21 @@ public class TestFileScanFramework extends SubOperatorTest {
     // Select table and implicit columns.
 
     FileScanFixtureBuilder builder = new FileScanFixtureBuilder();
-    builder.setProjection(new String[] {"dir0", "b", "filename", "c", "suffix"});
+    builder.setProjection("dir0", "b", "filename", "c", "suffix");
     builder.addReader(reader);
     ScanFixture scanFixture = builder.build();
     ScanOperatorExec scan = scanFixture.scanOp;
 
     // Expect data and implicit columns
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("dir0", MinorType.VARCHAR)
         .addNullable("b", MinorType.VARCHAR, 10)
         .add("filename", MinorType.VARCHAR)
         .addNullable("c", MinorType.INT)
-        .add("suffix", MinorType.VARCHAR)
+        .add("suffix", MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(MOCK_DIR0, "fred", MOCK_FILE_NAME, null, MOCK_SUFFIX)
@@ -458,14 +463,15 @@ public class TestFileScanFramework extends SubOperatorTest {
     // Select no columns
 
     FileScanFixtureBuilder builder = new FileScanFixtureBuilder();
-    builder.setProjection(new String[] {});
+    builder.setProjection();
     builder.addReader(reader);
     ScanFixture scanFixture = builder.build();
     ScanOperatorExec scan = scanFixture.scanOp;
 
     // Expect data and implicit columns
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(new SchemaBuilder())
         .build();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow()
@@ -528,17 +534,19 @@ public class TestFileScanFramework extends SubOperatorTest {
     // Select one of the two map columns
 
     FileScanFixtureBuilder builder = new FileScanFixtureBuilder();
-    builder.setProjection(new String[] {"m1.a"});
+    builder.setProjection("m1.a");
     builder.addReader(reader);
     ScanFixture scanFixture = builder.build();
     ScanOperatorExec scan = scanFixture.scanOp;
 
     // Expect data and implicit columns
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addMap("m1")
           .add("a", MinorType.INT)
-          .resumeSchema()
+          .resumeSchema();
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addSingleCol(new Object[] {10})
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
index 03587cb..79cd0d5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanBatchWriters.java
@@ -24,10 +24,10 @@ import org.apache.drill.exec.physical.base.AbstractSubScan;
 import org.apache.drill.exec.physical.base.Scan;
 import org.apache.drill.exec.physical.impl.OutputMutator;
 import org.apache.drill.exec.physical.impl.ScanBatch;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
 import org.apache.drill.exec.vector.complex.writer.BaseWriter;
 import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
@@ -105,11 +105,11 @@ public class TestScanBatchWriters extends SubOperatorTest {
 
         // Expected
 
-        BatchSchema schema = new SchemaBuilder()
+        TupleMetadata schema = new SchemaBuilder()
             .addNullable("a", MinorType.INT)
             .addNullable("b", MinorType.VARCHAR)
             .addArray("c", MinorType.INT)
-            .build();
+            .buildSchema();
         RowSet expected = fixture.rowSetBuilder(schema)
             .addRow(10, "Fred", new int[] { 100, 110, 120 } )
             .addRow(20, "Wilma", null)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
index 575e541..523d826 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOperExecSmoothing.java
@@ -26,7 +26,6 @@ import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.ScanFixture;
 import org.apache.drill.exec.physical.impl.scan.framework.SchemaNegotiator;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.rowSet.RowSetUtilities;
@@ -140,10 +139,10 @@ public class TestScanOperExecSmoothing extends BaseScanOperatorExecTest {
 
     // Second reader.
 
-    BatchSchema expectedSchema2 = new SchemaBuilder()
+    TupleMetadata expectedSchema2 = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
         .addNullable("b", MinorType.VARCHAR, 10)
-        .build();
+        .buildSchema();
 
     assertTrue(scan.next());
     assertEquals(2, scan.batchAccessor().schemaVersion());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
index d90efb3..3f2b5f1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorEarlySchema.java
@@ -35,6 +35,7 @@ import org.apache.drill.exec.physical.rowSet.impl.RowSetTestUtils;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.SubOperatorTest;
@@ -226,10 +227,10 @@ public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
     ResultSetLoader loader = reader.makeTableLoader(tableSchema);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("b", MinorType.VARCHAR)
         .add("a", MinorType.INT)
-        .build();
+        .buildSchema();
 
     // Create a batch of data.
 
@@ -281,11 +282,11 @@ public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
     ResultSetLoader loader = reader.makeTableLoader(tableSchema);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.VARCHAR)
         .addNullable("c", MinorType.INT)
-        .build();
+        .buildSchema();
 
    // Create a batch of data.
 
@@ -345,11 +346,11 @@ public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
     ResultSetLoader loader = reader.makeTableLoader(tableSchema);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.VARCHAR)
         .addNullable("c", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     // Create a batch of data.
 
@@ -405,9 +406,9 @@ public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
     assertFalse(loader.writer().column("b").isProjected());
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
-        .build();
+        .buildSchema();
 
     // Create a batch of data.
 
@@ -468,7 +469,8 @@ public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
     // Verify empty batch.
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(new SchemaBuilder())
         .build();
 
     // Create a batch of data.
@@ -584,9 +586,9 @@ public class TestScanOrchestratorEarlySchema extends SubOperatorTest {
 
     reader.makeTableLoader(tableSchema);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("a", MinorType.INT)
-        .build();
+        .buildSchema();
 
     // Create a batch of data. Because there are no columns, it does
     // not make sense to ready any rows.
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
index 84fbc0c..c2262d2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorLateSchema.java
@@ -28,7 +28,6 @@ import org.apache.drill.exec.physical.impl.scan.project.ScanSchemaOrchestrator.S
 import org.apache.drill.exec.physical.rowSet.ResultSetLoader;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
 import org.apache.drill.exec.physical.rowSet.impl.RowSetTestUtils;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.SubOperatorTest;
@@ -143,10 +142,10 @@ public class TestScanOrchestratorLateSchema extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addNullable("c", MinorType.INT)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(1, null)
         .addRow(2, null)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorMetadata.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorMetadata.java
index 1803641..cebe055 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorMetadata.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/TestScanOrchestratorMetadata.java
@@ -35,7 +35,6 @@ import org.apache.drill.exec.physical.impl.scan.project.ScanSchemaOrchestrator;
 import org.apache.drill.exec.physical.impl.scan.project.ScanSchemaOrchestrator.ScanOrchestratorBuilder;
 import org.apache.drill.exec.physical.rowSet.ResultSetLoader;
 import org.apache.drill.exec.physical.rowSet.impl.RowSetTestUtils;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.SubOperatorTest;
@@ -164,9 +163,9 @@ public class TestScanOrchestratorMetadata extends SubOperatorTest {
 
     ResultSetLoader loader = reader.makeTableLoader(tableSchema);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("c", MinorType.INT)
-        .build();
+        .buildSchema();
 
     // Create a batch of data.
 
@@ -236,12 +235,12 @@ public class TestScanOrchestratorMetadata extends SubOperatorTest {
     // Verify empty batch.
 
     reader.defineSchema();
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.VARCHAR)
         .addNullable("dir0", MinorType.VARCHAR)
         .add("suffix", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     {
       SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
          .build();
@@ -309,12 +308,12 @@ public class TestScanOrchestratorMetadata extends SubOperatorTest {
 
     ResultSetLoader loader = reader.makeTableLoader(tableSchema);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("dir0", MinorType.VARCHAR)
         .add("b", MinorType.VARCHAR)
         .add("suffix", MinorType.VARCHAR)
         .addNullable("c", MinorType.INT)
-        .build();
+        .buildSchema();
 
     // Create a batch of data.
 
@@ -368,12 +367,12 @@ public class TestScanOrchestratorMetadata extends SubOperatorTest {
         .add("a", MinorType.INT)
         .addNullable("b", MinorType.VARCHAR, 10)
         .buildSchema();
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable(ScanTestUtils.partitionColName(0), MinorType.VARCHAR)
         .addNullable(ScanTestUtils.partitionColName(1), MinorType.VARCHAR)
         .add(ScanTestUtils.FILE_NAME_COL, MinorType.VARCHAR)
         .addNullable("b", MinorType.VARCHAR, 10)
-        .build();
+        .buildSchema();
 
     SchemaTracker tracker = new SchemaTracker();
     int schemaVersion;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
index 086da96..ff82446 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestConstantColumnLoader.java
@@ -31,9 +31,9 @@ import org.apache.drill.exec.physical.impl.scan.file.FileMetadataColumnDefn;
 import org.apache.drill.exec.physical.impl.scan.file.PartitionColumn;
 import org.apache.drill.exec.physical.impl.scan.project.ConstantColumnLoader.ConstantColumnSpec;
 import org.apache.drill.exec.physical.rowSet.impl.ResultVectorCacheImpl;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.store.ColumnExplorer.ImplicitFileColumns;
 import org.apache.drill.test.SubOperatorTest;
 import org.apache.drill.test.rowSet.RowSet.SingleRowSet;
@@ -108,10 +108,10 @@ public class TestConstantColumnLoader extends SubOperatorTest {
 
     // Verify
 
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    final TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", aType)
         .add("b", bType)
-        .build();
+        .buildSchema();
     final SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow("a-value", "b-value")
         .addRow("a-value", "b-value")
@@ -146,10 +146,10 @@ public class TestConstantColumnLoader extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add(ScanTestUtils.SUFFIX_COL, MinorType.VARCHAR)
         .addNullable(partColName, MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow("csv", "y")
         .addRow("csv", "y")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
index 56b0ae2..38707e8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/project/TestRowBatchMerger.java
@@ -23,10 +23,10 @@ import org.apache.drill.exec.physical.impl.scan.project.NullColumnBuilder.NullBu
 import org.apache.drill.exec.physical.impl.scan.project.ResolvedTuple.ResolvedRow;
 import org.apache.drill.exec.physical.rowSet.ResultVectorCache;
 import org.apache.drill.exec.physical.rowSet.impl.NullResultVectorCacheImpl;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.test.SubOperatorTest;
 import org.apache.drill.test.rowSet.RowSet;
@@ -96,10 +96,10 @@ public class TestRowBatchMerger extends SubOperatorTest {
   }
 
   private RowSetSource makeFirst() {
-    BatchSchema firstSchema = new SchemaBuilder()
+    TupleMetadata firstSchema = new SchemaBuilder()
         .add("d", MinorType.VARCHAR)
         .add("a", MinorType.INT)
-        .build();
+        .buildSchema();
     return new RowSetSource(
         fixture.rowSetBuilder(firstSchema)
           .addRow("barney", 10)
@@ -108,10 +108,10 @@ public class TestRowBatchMerger extends SubOperatorTest {
   }
 
   private RowSetSource makeSecond() {
-    BatchSchema secondSchema = new SchemaBuilder()
+    TupleMetadata secondSchema = new SchemaBuilder()
         .add("b", MinorType.INT)
         .add("c", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     return new RowSetSource(
         fixture.rowSetBuilder(secondSchema)
           .addRow(1, "foo.csv")
@@ -158,12 +158,12 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.INT)
         .add("c", MinorType.VARCHAR)
         .add("d", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(10, 1, "foo.csv", "barney")
         .addRow(20, 2, "foo.csv", "wilma")
@@ -199,12 +199,12 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.INT)
         .add("c", MinorType.VARCHAR)
         .add("d", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(10, 1, "foo.csv", "barney")
         .addRow(20, 2, "foo.csv", "wilma")
@@ -246,12 +246,12 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addNullable("null1", MinorType.INT)
         .addNullable("null2", MinorType.VARCHAR)
         .add("d", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(10, null, null, "barney")
         .addRow(20, null, null, "wilma")
@@ -312,7 +312,7 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addMap("map1")
           .addNullable("null1", MinorType.INT)
@@ -322,7 +322,7 @@ public class TestRowBatchMerger extends SubOperatorTest {
             .resumeMap()
           .resumeSchema()
         .add("d", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(10, mapValue(null, null, singleMap(null)), "barney")
         .addRow(20, mapValue(null, null, singleMap(null)), "wilma")
@@ -343,12 +343,12 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Create the first batch
 
-    BatchSchema inputSchema = new SchemaBuilder()
+    TupleMetadata inputSchema = new SchemaBuilder()
         .add("b", MinorType.VARCHAR)
         .addMap("a")
           .add("c", MinorType.INT)
           .resumeSchema()
-        .build();
+        .buildSchema();
     RowSetSource input = new RowSetSource(
         fixture.rowSetBuilder(inputSchema)
           .addRow("barney", singleMap(10))
@@ -362,7 +362,7 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     resolvedTuple.add(new TestProjection(resolvedTuple, 0));
     ResolvedMapColumn mapCol = new ResolvedMapColumn(resolvedTuple,
-        inputSchema.getColumn(1), 1);
+        inputSchema.column(1), 1);
     resolvedTuple.add(mapCol);
     ResolvedTuple map = mapCol.members();
     map.add(new TestProjection(map, 0));
@@ -386,13 +386,13 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("b", MinorType.VARCHAR)
         .addMap("a")
           .add("c", MinorType.INT)
           .addNullable("null1", MinorType.INT)
           .resumeSchema()
-        .build();
+        .buildSchema();
     RowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow("barney", mapValue(10, null))
         .addRow("wilma", mapValue(20, null))
@@ -412,12 +412,12 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Create the first batch
 
-    BatchSchema inputSchema = new SchemaBuilder()
+    TupleMetadata inputSchema = new SchemaBuilder()
         .add("b", MinorType.VARCHAR)
         .addMapArray("a")
           .add("c", MinorType.INT)
           .resumeSchema()
-        .build();
+        .buildSchema();
     RowSetSource input = new RowSetSource(
         fixture.rowSetBuilder(inputSchema)
           .addRow("barney", mapArray(singleMap(10), singleMap(11), singleMap(12)))
@@ -431,7 +431,7 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     resolvedTuple.add(new TestProjection(resolvedTuple, 0));
     ResolvedMapColumn mapCol = new ResolvedMapColumn(resolvedTuple,
-        inputSchema.getColumn(1), 1);
+        inputSchema.column(1), 1);
     resolvedTuple.add(mapCol);
     ResolvedTuple map = mapCol.members();
     map.add(new TestProjection(map, 0));
@@ -455,13 +455,13 @@ public class TestRowBatchMerger extends SubOperatorTest {
 
     // Verify
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("b", MinorType.VARCHAR)
         .addMapArray("a")
           .add("c", MinorType.INT)
           .addNullable("null1", MinorType.INT)
           .resumeSchema()
-        .build();
+        .buildSchema();
     RowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow("barney", mapArray(
             mapValue(10, null), mapValue(11, null), mapValue(12, null)))
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/AbstractGenericCopierTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/AbstractGenericCopierTest.java
index cc745eb..578065a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/AbstractGenericCopierTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/AbstractGenericCopierTest.java
@@ -26,6 +26,7 @@ import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.vector.SchemaChangeCallBack;
 import org.apache.drill.test.BaseDirTestWatcher;
@@ -153,10 +154,12 @@ public abstract class AbstractGenericCopierTest {
     MaterializedField colC = MaterializedField.create("colC", Types.repeated(TypeProtos.MinorType.FLOAT4));
     MaterializedField colD = MaterializedField.create("colD", Types.repeated(TypeProtos.MinorType.VARCHAR));
 
-    return new SchemaBuilder().add(colA)
+    SchemaBuilder schemaBuilder = new SchemaBuilder().add(colA)
       .add(colB)
       .add(colC)
-      .add(colD)
+      .add(colD);
+    return new BatchSchemaBuilder()
+      .withSchemaBuilder(schemaBuilder)
       .withSVMode(mode)
       .build();
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
index 13a53b7..2511ca4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestBatchValidator.java
@@ -26,9 +26,9 @@ import java.util.List;
 
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.VectorAccessible;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.RepeatedVarCharVector;
 import org.apache.drill.exec.vector.UInt4Vector;
 import org.apache.drill.exec.vector.ValueVector;
@@ -69,10 +69,10 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testValidFixed() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addNullable("b", MinorType.INT)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow(10, 100)
@@ -89,10 +89,10 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testValidVariable() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
         .addNullable("b", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow("col1.1", "col1.2")
@@ -109,10 +109,10 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testValidRepeated() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.INT, DataMode.REPEATED)
         .add("b", MinorType.VARCHAR, DataMode.REPEATED)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow(intArray(), strArray())
@@ -128,9 +128,9 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testVariableMissingLast() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow("x")
@@ -160,9 +160,9 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testVariableCorruptFirst() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow("x")
@@ -196,9 +196,9 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testVariableCorruptMiddleLow() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow("xx")
@@ -220,9 +220,9 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testVariableCorruptMiddleHigh() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow("xx")
@@ -244,9 +244,9 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testVariableCorruptLastOutOfRange() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow("xx")
@@ -268,9 +268,9 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testRepeatedBadArrayOffset() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR, DataMode.REPEATED)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow((Object) strArray())
@@ -294,9 +294,9 @@ public class TestBatchValidator /* TODO: extends SubOperatorTest */ {
 
   @Test
   public void testRepeatedBadValueOffset() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR, DataMode.REPEATED)
-        .build();
+        .buildSchema();
 
     SingleRowSet batch = fixture.rowSetBuilder(schema)
         .addRow((Object) strArray())
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
index c074976..5222cb9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
@@ -25,8 +25,8 @@ import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.Types;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.test.TestBuilder;
 import org.apache.drill.test.rowSet.RowSet;
@@ -66,9 +66,9 @@ public class TestExternalSort extends BaseTestQuery {
     final String tableDirName = "numericTypes";
 
     {
-      final BatchSchema schema = new SchemaBuilder()
+      TupleMetadata schema = new SchemaBuilder()
         .add("a", Types.required(TypeProtos.MinorType.INT))
-        .build();
+        .buildSchema();
       final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
 
       for (int i = 0; i <= record_count; i += 2) {
@@ -82,9 +82,9 @@ public class TestExternalSort extends BaseTestQuery {
     }
 
     {
-      final BatchSchema schema = new SchemaBuilder()
+      final TupleMetadata schema = new SchemaBuilder()
         .add("a", Types.required(TypeProtos.MinorType.FLOAT4))
-        .build();
+        .buildSchema();
       final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
 
       for (int i = 1; i <= record_count; i += 2) {
@@ -138,9 +138,9 @@ public class TestExternalSort extends BaseTestQuery {
     final String tableDirName = "numericAndStringTypes";
 
     {
-      final BatchSchema schema = new SchemaBuilder()
+      final TupleMetadata schema = new SchemaBuilder()
         .add("a", Types.required(TypeProtos.MinorType.INT))
-        .build();
+        .buildSchema();
       final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
 
       for (int i = 0; i <= record_count; i += 2) {
@@ -154,9 +154,9 @@ public class TestExternalSort extends BaseTestQuery {
     }
 
     {
-      final BatchSchema schema = new SchemaBuilder()
+      final TupleMetadata schema = new SchemaBuilder()
         .add("a", Types.required(TypeProtos.MinorType.INT))
-        .build();
+        .buildSchema();
       final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
 
       for (int i = 1; i <= record_count; i += 2) {
@@ -205,10 +205,10 @@ public class TestExternalSort extends BaseTestQuery {
     final String tableDirName = "newColumns";
 
     {
-      final BatchSchema schema = new SchemaBuilder()
+      final TupleMetadata schema = new SchemaBuilder()
         .add("a", TypeProtos.MinorType.INT)
         .add("b", TypeProtos.MinorType.INT)
-        .build();
+        .buildSchema();
       final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
 
       for (int i = 0; i <= record_count; i += 2) {
@@ -222,10 +222,10 @@ public class TestExternalSort extends BaseTestQuery {
     }
 
     {
-      final BatchSchema schema = new SchemaBuilder()
+      final TupleMetadata schema = new SchemaBuilder()
         .add("a", TypeProtos.MinorType.INT)
         .add("c", TypeProtos.MinorType.INT)
-        .build();
+        .buildSchema();
       final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
 
       for (int i = 1; i <= record_count; i += 2) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java
index 3ae54b4..22a5b35 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java
@@ -33,7 +33,6 @@ import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
-import org.apache.drill.exec.record.metadata.TupleSchema;
 import org.apache.drill.test.OperatorFixture;
 import org.apache.drill.test.SubOperatorTest;
 import org.apache.drill.test.rowSet.RowSet.ExtendableRowSet;
@@ -69,7 +68,7 @@ public class TestCopier extends SubOperatorTest {
       // code. Only nuisance is that we don't have the required metadata
       // readily at hand here...
 
-      copier.startMerge(TupleSchema.toBatchSchema(schema),
+      copier.startMerge(new BatchSchema(BatchSchema.SelectionVectorMode.NONE, schema.toFieldList()),
           batches, dest, 10, null);
       fail();
     } catch (AssertionError e) {
@@ -343,7 +342,7 @@ public class TestCopier extends SubOperatorTest {
   }
 
   public void testMapType(OperatorFixture fixture) throws Exception {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("key", MinorType.INT)
         .addMap("m1")
           .add("b", MinorType.INT)
@@ -351,7 +350,7 @@ public class TestCopier extends SubOperatorTest {
             .add("c", MinorType.INT)
             .resumeMap()
           .resumeSchema()
-        .build();
+        .buildSchema();
 
     CopierTester tester = new CopierTester(fixture);
     tester.addInput(fixture.rowSetBuilder(schema)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestShortArrays.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestShortArrays.java
index f4a8761..99aee7d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestShortArrays.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestShortArrays.java
@@ -24,12 +24,12 @@ import static org.junit.Assert.assertTrue;
 
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.RecordBatchSizer;
 import org.apache.drill.exec.record.RecordBatchSizer.ColumnSize;
 import org.apache.drill.exec.record.VectorInitializer;
 import org.apache.drill.exec.record.VectorInitializer.AllocationHint;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.RepeatedIntVector;
 import org.apache.drill.exec.vector.ValueVector;
@@ -57,10 +57,10 @@ public class TestShortArrays extends SubOperatorTest {
     // Create a row set with less than one item, on
     // average, per array.
 
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addArray("b", MinorType.INT)
-        .build();
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema)
         .addRow(1, intArray(10));
     for (int i = 2; i <= 10; i++) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java
index 8fdf6f5..f1481ab 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java
@@ -36,7 +36,6 @@ import org.apache.drill.exec.physical.impl.spill.SpillSet;
 import org.apache.drill.exec.physical.impl.xsort.managed.SortImpl.SortResults;
 import org.apache.drill.exec.proto.ExecProtos.FragmentHandle;
 import org.apache.drill.exec.proto.UserBitShared.QueryId;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -534,7 +533,7 @@ public class TestSortImpl extends DrillTest {
     for (int i = 0; i < colCount; i++) {
       builder.add("col" + (i+1), MinorType.INT);
     }
-    BatchSchema schema = builder.build();
+    TupleMetadata schema = builder.buildSchema();
     ExtendableRowSet rowSet = fixture.rowSet(schema);
     RowSetWriter writer = rowSet.writer(rowCount);
     for (int i = 0; i < rowCount; i++) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java
index 684e131..b03cce9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java
@@ -31,7 +31,6 @@ import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.ops.OperatorContext;
 import org.apache.drill.exec.physical.config.Sort;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.BaseDirTestWatcher;
@@ -357,10 +356,10 @@ public class TestSorter extends DrillTest {
     }
 
     public void test(MinorType type) throws SchemaChangeException {
-      BatchSchema schema = new SchemaBuilder()
+      TupleMetadata schema = new SchemaBuilder()
           .add("key", type)
-          .build();
-      SingleRowSet input = makeInputData(fixture.allocator(), schema);
+          .buildSchema();
+      SingleRowSet input = makeInputData(schema);
       input = input.toIndirect();
       sorter.sortBatch(input.container(), input.getSv2());
       sorter.close();
@@ -368,8 +367,7 @@ public class TestSorter extends DrillTest {
       input.clear();
     }
 
-    protected SingleRowSet makeInputData(BufferAllocator allocator,
-        BatchSchema schema) {
+    protected SingleRowSet makeInputData(TupleMetadata schema) {
       RowSetBuilder builder = fixture.rowSetBuilder(schema);
       int rowCount = 100;
       Random rand = new Random();
@@ -636,12 +634,12 @@ public class TestSorter extends DrillTest {
   @Test
   @Ignore("DRILL-5384")
   public void testMapKey() throws Exception {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
         .addMap("map")
           .add("key", MinorType.INT)
           .add("value", MinorType.VARCHAR)
           .resumeSchema()
-        .build();
+        .buildSchema();
 
     SingleRowSet input = fixture.rowSetBuilder(schema)
         .addRow(3, "third")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java
index 9ede6a5..94d8817 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderOmittedValues.java
@@ -27,7 +27,6 @@ import org.apache.drill.categories.RowSetTests;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.physical.rowSet.ResultSetLoader;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.ValueVector;
@@ -140,14 +139,14 @@ public class TestResultSetLoaderOmittedValues extends SubOperatorTest {
     RowSet actual = fixture.wrap(rsLoader.harvest());
 //    actual.print();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .add("b", MinorType.VARCHAR)
         .addNullable("c", MinorType.VARCHAR)
         .add("d", MinorType.INT)
         .addNullable("e", MinorType.INT)
         .addArray("f", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(  1, "b_1", "c_1",  10,  100, strArray("f_1-1",  "f_1-2"))
         .addRow(  2, "b_2", "c_2",  20,  200, strArray("f_2-1",  "f_2-2"))
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderProjection.java
index 9a9afff..cffd910 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetLoaderProjection.java
@@ -38,7 +38,6 @@ import org.apache.drill.exec.physical.rowSet.ResultSetLoader;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
 import org.apache.drill.exec.physical.rowSet.impl.ResultSetLoaderImpl.ResultSetOptions;
 import org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -135,10 +134,10 @@ public class TestResultSetLoaderProjection extends SubOperatorTest {
     // columns, only if defined by the loader, in the order
     // of definition.
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("b", MinorType.INT)
         .add("c", MinorType.INT)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(1, 10)
         .addRow(2, 20)
@@ -259,7 +258,7 @@ public class TestResultSetLoaderProjection extends SubOperatorTest {
 
     // Verify. Only the projected columns appear in the result set.
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
       .addMap("m1")
         .add("a", MinorType.INT)
         .add("b", MinorType.INT)
@@ -267,7 +266,7 @@ public class TestResultSetLoaderProjection extends SubOperatorTest {
       .addMap("m2")
         .add("d", MinorType.INT)
         .resumeSchema()
-      .build();
+      .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
       .addRow(mapValue( 1,  2), mapValue( 4))
       .addRow(mapValue(11, 12), mapValue(14))
@@ -373,7 +372,7 @@ public class TestResultSetLoaderProjection extends SubOperatorTest {
 
     // Verify. Only the projected columns appear in the result set.
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
       .addMapArray("m1")
         .add("a", MinorType.INT)
         .add("b", MinorType.INT)
@@ -381,7 +380,7 @@ public class TestResultSetLoaderProjection extends SubOperatorTest {
       .addMapArray("m2")
         .add("d", MinorType.INT)
         .resumeSchema()
-      .build();
+      .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
       .addRow(
           objArray(objArray(10, 20), objArray(11, 21)),
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetSchemaChange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetSchemaChange.java
index 2dcd001..c46d30c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetSchemaChange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/rowSet/impl/TestResultSetSchemaChange.java
@@ -30,7 +30,9 @@ import org.apache.drill.exec.physical.rowSet.ResultSetLoader;
 import org.apache.drill.exec.physical.rowSet.RowSetLoader;
 import org.apache.drill.exec.physical.rowSet.impl.ResultSetLoaderImpl.ResultSetOptions;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.accessor.ScalarWriter;
 import org.apache.drill.test.SubOperatorTest;
@@ -131,14 +133,14 @@ public class TestResultSetSchemaChange extends SubOperatorTest {
 
     RowSet actual = fixture.wrap(rsLoader.harvest());
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.VARCHAR)
         .addNullable("b", MinorType.INT)
         .addNullable("c", MinorType.VARCHAR)
         .add("d", MinorType.VARCHAR)
         .add("e", MinorType.INT)
         .addArray("f", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow("a_1", null, null,   "",       0, strArray())
         .addRow("a_2", null, null,   "",       0, strArray())
@@ -206,8 +208,10 @@ public class TestResultSetSchemaChange extends SubOperatorTest {
 
     // Result should include only the first column.
 
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .add("a", MinorType.VARCHAR)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+      .add("a", MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
     RowSet result = fixture.wrap(rsLoader.harvest());
     assertTrue(result.batchSchema().isEquivalent(expectedSchema));
@@ -228,12 +232,14 @@ public class TestResultSetSchemaChange extends SubOperatorTest {
     result = fixture.wrap(rsLoader.harvest());
     assertEquals(5, rsLoader.schemaVersion());
     assertEquals(1, result.rowCount());
-    expectedSchema = new SchemaBuilder(expectedSchema)
+    BatchSchemaBuilder batchSchemaBuilder = new BatchSchemaBuilder(expectedSchema);
+    batchSchemaBuilder.schemaBuilder()
         .addNullable("b", MinorType.INT)
         .addNullable("c", MinorType.VARCHAR)
         .add("d", MinorType.INT)
-        .add("e", MinorType.INT)
-        .build();
+        .add("e", MinorType.INT);
+
+    expectedSchema = batchSchemaBuilder.build();
     assertTrue(result.batchSchema().isEquivalent(expectedSchema));
     RowSetReader reader = result.reader();
     reader.next();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java
index a1b7001..7f5632e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java
@@ -28,6 +28,7 @@ import org.apache.drill.exec.physical.config.Filter;
 import org.apache.drill.exec.physical.config.UnionAll;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.hadoop.conf.Configuration;
@@ -67,8 +68,10 @@ public class TestMiniPlan extends MiniPlanUnitTestBase {
         .inputPaths(filePath)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .add("R_REGIONKEY", TypeProtos.MinorType.BIGINT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .add("R_REGIONKEY", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     new MiniPlanTestBuilder()
@@ -89,8 +92,10 @@ public class TestMiniPlan extends MiniPlanUnitTestBase {
         .jsonBatches(jsonBatches)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .addNullable("a", TypeProtos.MinorType.BIGINT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .addNullable("a", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     new MiniPlanTestBuilder()
@@ -129,9 +134,11 @@ public class TestMiniPlan extends MiniPlanUnitTestBase {
           .buildAddAsInput()
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("a", TypeProtos.MinorType.BIGINT)
-        .addNullable("b", TypeProtos.MinorType.BIGINT)
+        .addNullable("b", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .withSVMode(BatchSchema.SelectionVectorMode.NONE)
         .build();
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
index 69a4214..c980322 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.physical.unit;
 
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.calcite.rel.RelFieldCollation;
 import org.apache.calcite.rel.core.JoinRelType;
@@ -87,9 +88,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
   public void testJsonInputMixedWithEmptyFiles1() throws Exception {
     RecordBatch scanBatch = createScanBatchFromJson(SINGLE_EMPTY_JSON, SINGLE_JSON, SINGLE_EMPTY_JSON2, SINGLE_JSON2);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("id", TypeProtos.MinorType.BIGINT)
-        .addNullable("name", TypeProtos.MinorType.VARCHAR)
+        .addNullable("name", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     new MiniPlanTestBuilder()
@@ -110,9 +113,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
   public void testJsonInputMixedWithEmptyFiles2() throws Exception {
     RecordBatch scanBatch = createScanBatchFromJson(SINGLE_EMPTY_JSON, SINGLE_EMPTY_JSON2, SINGLE_JSON, SINGLE_JSON2);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("id", TypeProtos.MinorType.BIGINT)
-        .addNullable("name", TypeProtos.MinorType.VARCHAR)
+        .addNullable("name", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     new MiniPlanTestBuilder()
@@ -132,9 +137,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
   public void testJsonInputMixedWithEmptyFiles3() throws Exception {
     RecordBatch scanBatch = createScanBatchFromJson(SINGLE_EMPTY_JSON, SINGLE_JSON, SINGLE_JSON2, SINGLE_EMPTY_JSON2);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("id", TypeProtos.MinorType.BIGINT)
-        .addNullable("name", TypeProtos.MinorType.VARCHAR)
+        .addNullable("name", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     new MiniPlanTestBuilder()
@@ -154,9 +161,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
   public void testJsonInputMixedWithEmptyFiles4() throws Exception {
     RecordBatch scanBatch = createScanBatchFromJson(SINGLE_JSON, SINGLE_JSON2, SINGLE_EMPTY_JSON2, SINGLE_EMPTY_JSON2);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("id", TypeProtos.MinorType.BIGINT)
-        .addNullable("name", TypeProtos.MinorType.VARCHAR)
+        .addNullable("name", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     new MiniPlanTestBuilder()
@@ -292,8 +301,10 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
         .addInput(projectBatch)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
-        .add("regionkey", TypeProtos.MinorType.BIGINT)
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
+        .add("regionkey", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     new MiniPlanTestBuilder()
@@ -323,9 +334,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
         .addInput(rightScan)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("a", TypeProtos.MinorType.BIGINT)
-        .addNullable("b", TypeProtos.MinorType.BIGINT)
+        .addNullable("b", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .withSVMode(BatchSchema.SelectionVectorMode.NONE)
         .build();
 
@@ -354,9 +367,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
         .addInput(right)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("a", TypeProtos.MinorType.BIGINT)
-        .addNullable("b", TypeProtos.MinorType.BIGINT)
+        .addNullable("b", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .withSVMode(BatchSchema.SelectionVectorMode.NONE)
         .build();
 
@@ -386,9 +401,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
         .addInput(rightScan)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("a", TypeProtos.MinorType.BIGINT)
-        .addNullable("b", TypeProtos.MinorType.BIGINT)
+        .addNullable("b", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .withSVMode(BatchSchema.SelectionVectorMode.NONE)
         .build();
 
@@ -417,9 +434,11 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
         .addInput(right)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("a", TypeProtos.MinorType.BIGINT)
-        .addNullable("b", TypeProtos.MinorType.BIGINT)
+        .addNullable("b", TypeProtos.MinorType.BIGINT);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .withSVMode(BatchSchema.SelectionVectorMode.NONE)
         .build();
 
@@ -459,14 +478,16 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
         .build();
 
     RecordBatch batch = new PopBuilder()
-        .physicalOperator(new UnionAll(Collections.EMPTY_LIST)) // Children list is provided through RecordBatch
+        .physicalOperator(new UnionAll(Collections.emptyList())) // Children list is provided through RecordBatch
         .addInput(leftFilter)
         .addInput(rightFilter)
         .build();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("a", TypeProtos.MinorType.BIGINT)
-        .addNullable("b", TypeProtos.MinorType.VARCHAR)
+        .addNullable("b", TypeProtos.MinorType.VARCHAR);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .withSVMode(BatchSchema.SelectionVectorMode.NONE)
         .build();
 
@@ -486,11 +507,13 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
         "100.0", "col3",
         "cast(nonExist as varchar(100))", "col4"), null, true);
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("col1", TypeProtos.MinorType.INT)
         .addNullable("col2", TypeProtos.MinorType.INT)
         .add("col3", TypeProtos.MinorType.FLOAT8)
-        .addNullable("col4", TypeProtos.MinorType.VARCHAR, 100)
+        .addNullable("col4", TypeProtos.MinorType.VARCHAR, 100);
+    BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .withSVMode(BatchSchema.SelectionVectorMode.NONE)
         .build();
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java
index e27d634..125c1fa 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordBatchSizer.java
@@ -20,6 +20,7 @@ package org.apache.drill.exec.record;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.RecordBatchSizer.ColumnSize;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.vector.NullableVector;
 import org.apache.drill.exec.vector.UInt4Vector;
 import org.apache.drill.exec.vector.ValueVector;
@@ -68,7 +69,10 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerFixedWidth() {
-    BatchSchema schema = new SchemaBuilder().add("a", MinorType.BIGINT).add("b", MinorType.FLOAT8).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.BIGINT)
+        .add("b", MinorType.FLOAT8)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
     for (long i = 0; i < 10; i++) {
@@ -127,7 +131,10 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerRepeatedFixedWidth() {
-    BatchSchema schema = new SchemaBuilder().addArray("a", MinorType.BIGINT).addArray("b", MinorType.FLOAT8).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addArray("a", MinorType.BIGINT)
+        .addArray("b", MinorType.FLOAT8)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
     for (long i = 0; i < 10; i++) {
@@ -200,7 +207,10 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerNullableFixedWidth() {
-    BatchSchema schema = new SchemaBuilder().addNullable("a", MinorType.BIGINT).addNullable("b", MinorType.FLOAT8).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addNullable("a", MinorType.BIGINT)
+        .addNullable("b", MinorType.FLOAT8)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
     for (long i = 0; i < 10; i++) {
@@ -275,7 +285,9 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerVariableWidth() {
-    BatchSchema schema = new SchemaBuilder().add("a", MinorType.VARCHAR).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.VARCHAR)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
     StringBuilder stringBuilder = new StringBuilder();
@@ -347,7 +359,9 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerRepeatedVariableWidth() {
-    BatchSchema schema = new SchemaBuilder().addArray("b", MinorType.VARCHAR).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addArray("b", MinorType.VARCHAR)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
     // size = (5*6)/2 = 15
@@ -426,7 +440,9 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerNullableVariableWidth() {
-    BatchSchema schema = new SchemaBuilder().addNullable("b", MinorType.VARCHAR).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addNullable("b", MinorType.VARCHAR)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
     StringBuilder stringBuilder = new StringBuilder();
@@ -506,12 +522,12 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerMap() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
       .addMap("map")
         .add("key", MinorType.INT)
         .add("value", MinorType.VARCHAR)
       .resumeSchema()
-      .build();
+      .buildSchema();
 
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
@@ -590,10 +606,12 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerRepeatedMap() {
-    BatchSchema schema = new SchemaBuilder().addMapArray("map").
-      add("key", MinorType.INT).
-      add("value", MinorType.VARCHAR).
-      resumeSchema().build();
+    TupleMetadata schema = new SchemaBuilder()
+      .addMapArray("map")
+        .add("key", MinorType.INT)
+        .add("value", MinorType.VARCHAR)
+        .resumeSchema()
+      .buildSchema();
 
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
@@ -691,7 +709,7 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testSizerNestedMap() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
       .addMap("map")
         .add("key", MinorType.INT)
         .add("value", MinorType.VARCHAR)
@@ -700,7 +718,7 @@ public class TestRecordBatchSizer extends SubOperatorTest {
           .add("childValue", MinorType.VARCHAR)
           .resumeMap()
        .resumeSchema()
-      .build();
+      .buildSchema();
 
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
 
@@ -809,7 +827,10 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchFixedWidth() {
-    BatchSchema schema = new SchemaBuilder().add("a", MinorType.BIGINT).add("b", MinorType.FLOAT8).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.BIGINT)
+        .add("b", MinorType.FLOAT8)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
 
@@ -865,7 +886,10 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchRepeatedFixedWidth() {
-    BatchSchema schema = new SchemaBuilder().addArray("a", MinorType.BIGINT).addArray("b", MinorType.FLOAT8).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addArray("a", MinorType.BIGINT)
+        .addArray("b", MinorType.FLOAT8)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
 
@@ -933,7 +957,10 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchNullableFixedWidth() {
-    BatchSchema schema = new SchemaBuilder().addNullable("a", MinorType.BIGINT).addNullable("b", MinorType.FLOAT8).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addNullable("a", MinorType.BIGINT)
+        .addNullable("b", MinorType.FLOAT8)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
 
@@ -1003,7 +1030,9 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchVariableWidth() {
-    BatchSchema schema = new SchemaBuilder().add("a", MinorType.VARCHAR).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .add("a", MinorType.VARCHAR)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
 
@@ -1066,7 +1095,9 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchRepeatedVariableWidth() {
-    BatchSchema schema = new SchemaBuilder().addArray("b", MinorType.VARCHAR).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addArray("b", MinorType.VARCHAR)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
 
@@ -1138,7 +1169,9 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchNullableVariableWidth() {
-    BatchSchema schema = new SchemaBuilder().addNullable("b", MinorType.VARCHAR).build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addNullable("b", MinorType.VARCHAR)
+        .buildSchema();
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
 
@@ -1210,12 +1243,12 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchMap() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
       .addMap("map")
       .add("key", MinorType.INT)
       .add("value", MinorType.VARCHAR)
       .resumeSchema()
-      .build();
+      .buildSchema();
 
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
@@ -1291,10 +1324,12 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchRepeatedMap() {
-    BatchSchema schema = new SchemaBuilder().addMapArray("map").
-      add("key", MinorType.INT).
-      add("value", MinorType.VARCHAR).
-      resumeSchema().build();
+    TupleMetadata schema = new SchemaBuilder()
+        .addMapArray("map")
+        .add("key", MinorType.INT)
+        .add("value", MinorType.VARCHAR)
+        .resumeSchema()
+        .buildSchema();
 
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
@@ -1387,7 +1422,7 @@ public class TestRecordBatchSizer extends SubOperatorTest {
 
   @Test
   public void testEmptyBatchNestedMap() {
-    BatchSchema schema = new SchemaBuilder()
+    TupleMetadata schema = new SchemaBuilder()
       .addMap("map")
       .add("key", MinorType.INT)
       .add("value", MinorType.VARCHAR)
@@ -1396,7 +1431,7 @@ public class TestRecordBatchSizer extends SubOperatorTest {
       .add("childValue", MinorType.VARCHAR)
       .resumeMap()
       .resumeSchema()
-      .build();
+      .buildSchema();
 
     RowSetBuilder builder = fixture.rowSetBuilder(schema);
     RowSet rows = builder.build();
@@ -1507,10 +1542,10 @@ public class TestRecordBatchSizer extends SubOperatorTest {
    */
   @Test
   public void testEmptyVariableWidthVector() {
-    final BatchSchema schema = new SchemaBuilder()
+    final TupleMetadata schema = new SchemaBuilder()
       .add("key", MinorType.INT)
       .add("value", MinorType.VARCHAR)
-      .build();
+      .buildSchema();
 
     final RowSetBuilder builder = fixture.rowSetBuilder(schema);
     final RowSet rows = builder.build();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestVectorContainer.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestVectorContainer.java
index 576c4c8..f3b3b87 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestVectorContainer.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestVectorContainer.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.record;
 
 import static org.junit.Assert.fail;
 
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import org.apache.drill.categories.VectorTest;
 import org.apache.drill.common.types.TypeProtos;
@@ -75,10 +76,10 @@ public class TestVectorContainer extends DrillTest {
 
     // Simulated data from a reader
 
-    BatchSchema leftSchema = new SchemaBuilder()
+    TupleMetadata leftSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addNullable("b", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet left = fixture.rowSetBuilder(leftSchema)
         .addRow(10, "fred")
         .addRow(20, "barney")
@@ -87,10 +88,10 @@ public class TestVectorContainer extends DrillTest {
 
     // Simulated "implicit" columns: row number and file name
 
-    BatchSchema rightSchema = new SchemaBuilder()
+    TupleMetadata rightSchema = new SchemaBuilder()
         .add("x", MinorType.SMALLINT)
         .add("y", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet right = fixture.rowSetBuilder(rightSchema)
         .addRow(1, "foo.txt")
         .addRow(2, "bar.txt")
@@ -99,12 +100,12 @@ public class TestVectorContainer extends DrillTest {
 
     // The merge batch we expect to see
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addNullable("b", MinorType.VARCHAR)
         .add("x", MinorType.SMALLINT)
         .add("y", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
     SingleRowSet expected = fixture.rowSetBuilder(expectedSchema)
         .addRow(10, "fred", 1, "foo.txt")
         .addRow(20, "barney", 2, "bar.txt")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
index 11f60cb..24adf67 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestTupleSchema.java
@@ -528,7 +528,7 @@ public class TestTupleSchema extends SubOperatorTest {
 
     // And it is equivalent to the round trip to a batch schema.
 
-    BatchSchema batchSchema = root.toBatchSchema(SelectionVectorMode.NONE);
+    BatchSchema batchSchema = new BatchSchema(SelectionVectorMode.NONE, root.toFieldList());
     assertTrue(root.isEquivalent(MetadataUtils.fromFields(batchSchema)));
   }
 
@@ -709,7 +709,7 @@ public class TestTupleSchema extends SubOperatorTest {
     assertTrue(types.contains(MinorType.BIGINT));
     assertTrue(types.contains(MinorType.VARCHAR));
 
-    BatchSchema batchSchema = ((TupleSchema) schema).toBatchSchema(SelectionVectorMode.NONE);
+    BatchSchema batchSchema = new BatchSchema(SelectionVectorMode.NONE, schema.toFieldList());
 
     MaterializedField field = batchSchema.getColumn(0);
     assertEquals("u", field.getName());
@@ -759,7 +759,7 @@ public class TestTupleSchema extends SubOperatorTest {
     assertTrue(types.contains(MinorType.BIGINT));
     assertTrue(types.contains(MinorType.VARCHAR));
 
-    BatchSchema batchSchema = ((TupleSchema) schema).toBatchSchema(SelectionVectorMode.NONE);
+    BatchSchema batchSchema = new BatchSchema(SelectionVectorMode.NONE, schema.toFieldList());
 
     MaterializedField field = batchSchema.getColumn(0);
     assertEquals("list", field.getName());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java
index 632e126..8d62a2b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestLoad.java
@@ -36,6 +36,7 @@ import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.record.WritableBatch;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.vector.AllocationHelper;
 import org.apache.drill.exec.vector.ValueVector;
@@ -52,10 +53,12 @@ public class TestLoad extends ExecTest {
   @Test
   public void testLoadValueVector() throws Exception {
     final BufferAllocator allocator = RootAllocatorFactory.newRoot(drillConfig);
-    BatchSchema schema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("ints", MinorType.INT)
         .add("chars", MinorType.VARCHAR)
-        .addNullable("chars2", MinorType.VARCHAR)
+        .addNullable("chars2", MinorType.VARCHAR);
+    BatchSchema schema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     // Create vectors
@@ -157,9 +160,11 @@ public class TestLoad extends ExecTest {
     // Initial schema: a: INT, b: VARCHAR
     // Schema change: N/A
 
-    BatchSchema schema1 = new SchemaBuilder()
+    SchemaBuilder schemaBuilder1 = new SchemaBuilder()
         .add("a", MinorType.INT)
-        .add("b", MinorType.VARCHAR)
+        .add("b", MinorType.VARCHAR);
+    BatchSchema schema1 = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder1)
         .build();
     {
       assertTrue(loadBatch(allocator, batchLoader, schema1));
@@ -180,9 +185,11 @@ public class TestLoad extends ExecTest {
     // Schema change: No
 
     {
-      BatchSchema schema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("b", MinorType.VARCHAR)
-          .add("a", MinorType.INT)
+          .add("a", MinorType.INT);
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertFalse(loadBatch(allocator, batchLoader, schema));
 
@@ -196,8 +203,10 @@ public class TestLoad extends ExecTest {
     // Schema change: Yes
 
     {
-      BatchSchema schema = new SchemaBuilder()
-          .add("a", MinorType.INT)
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
+          .add("a", MinorType.INT);
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
@@ -212,10 +221,12 @@ public class TestLoad extends ExecTest {
       assertTrue(schema1.isEquivalent(batchLoader.getSchema()));
       batchLoader.getContainer().zeroVectors();
 
-      BatchSchema schema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("a", MinorType.INT)
           .add("b", MinorType.VARCHAR)
-          .add("c", MinorType.INT)
+          .add("c", MinorType.INT);
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
@@ -226,10 +237,12 @@ public class TestLoad extends ExecTest {
     // Schema change: Yes
 
     {
-      BatchSchema schema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("a", MinorType.INT)
           .add("b", MinorType.VARCHAR)
-          .add("c", MinorType.VARCHAR)
+          .add("c", MinorType.VARCHAR);
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
@@ -240,7 +253,8 @@ public class TestLoad extends ExecTest {
     // Schema change: Yes
 
     {
-      BatchSchema schema = new SchemaBuilder()
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(new SchemaBuilder())
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
@@ -258,10 +272,12 @@ public class TestLoad extends ExecTest {
 
     // Initial schema: a: INT, m: MAP{}
 
-    BatchSchema schema1 = new SchemaBuilder()
+    SchemaBuilder schemaBuilder1 = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addMap("m")
-          .resumeSchema()
+          .resumeSchema();
+    BatchSchema schema1 = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder1)
         .build();
     {
       assertTrue(loadBatch(allocator, batchLoader, schema1));
@@ -281,11 +297,13 @@ public class TestLoad extends ExecTest {
     // Add column to map: a: INT, m: MAP{b: VARCHAR}
     // Schema change: Yes
 
-    BatchSchema schema2 = new SchemaBuilder()
+    SchemaBuilder schemaBuilder2 = new SchemaBuilder()
         .add("a", MinorType.INT)
         .addMap("m")
           .add("b", MinorType.VARCHAR)
-          .resumeSchema()
+          .resumeSchema();
+    BatchSchema schema2 = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder2)
         .build();
     {
       assertTrue(loadBatch(allocator, batchLoader, schema2));
@@ -306,12 +324,14 @@ public class TestLoad extends ExecTest {
     // Schema change: Yes
 
     {
-      BatchSchema schema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("a", MinorType.INT)
           .addMap("m")
             .add("b", MinorType.VARCHAR)
             .add("c", MinorType.INT)
-            .resumeSchema()
+            .resumeSchema();
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
@@ -322,11 +342,13 @@ public class TestLoad extends ExecTest {
     // Schema change: Yes
 
     {
-      BatchSchema schema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("a", MinorType.INT)
           .addMap("m")
             .add("b", MinorType.VARCHAR)
-            .resumeSchema()
+            .resumeSchema();
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
@@ -337,11 +359,13 @@ public class TestLoad extends ExecTest {
     // Schema change: Yes
 
     {
-      BatchSchema schema = new SchemaBuilder()
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
           .add("a", MinorType.INT)
           .addMap("m")
             .add("b", MinorType.INT)
-            .resumeSchema()
+            .resumeSchema();
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
@@ -359,8 +383,10 @@ public class TestLoad extends ExecTest {
     // Drop map: a: INT
 
     {
-      BatchSchema schema = new SchemaBuilder()
-          .add("a", MinorType.INT)
+      SchemaBuilder schemaBuilder = new SchemaBuilder()
+          .add("a", MinorType.INT);
+      BatchSchema schema = new BatchSchemaBuilder()
+          .withSchemaBuilder(schemaBuilder)
           .build();
       assertTrue(loadBatch(allocator, batchLoader, schema));
       assertTrue(schema.isEquivalent(batchLoader.getSchema()));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
index 403aab1..50f9786 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
@@ -23,6 +23,7 @@ import java.nio.file.Paths;
 
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.util.JsonStringArrayList;
 import org.apache.drill.exec.util.Text;
@@ -160,10 +161,12 @@ public class TestImplicitFileColumns extends BaseTestQuery {
 
   @Test
   public void testStarColumnJson() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
         .addNullable("id", TypeProtos.MinorType.BIGINT)
-        .addNullable("name", TypeProtos.MinorType.VARCHAR)
+        .addNullable("name", TypeProtos.MinorType.VARCHAR);
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -175,7 +178,7 @@ public class TestImplicitFileColumns extends BaseTestQuery {
 
   @Test
   public void testStarColumnParquet() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
         .addNullable("dir1", TypeProtos.MinorType.VARCHAR)
         .add("o_orderkey", TypeProtos.MinorType.INT)
@@ -186,7 +189,9 @@ public class TestImplicitFileColumns extends BaseTestQuery {
         .add("o_orderpriority", TypeProtos.MinorType.VARCHAR)
         .add("o_clerk", TypeProtos.MinorType.VARCHAR)
         .add("o_shippriority", TypeProtos.MinorType.INT)
-        .add("o_comment", TypeProtos.MinorType.VARCHAR)
+        .add("o_comment", TypeProtos.MinorType.VARCHAR);
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
@@ -198,10 +203,12 @@ public class TestImplicitFileColumns extends BaseTestQuery {
 
   @Test
   public void testStarColumnCsv() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .addArray("columns", TypeProtos.MinorType.VARCHAR)
         .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
-        .addNullable("dir1", TypeProtos.MinorType.VARCHAR)
+        .addNullable("dir1", TypeProtos.MinorType.VARCHAR);
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     testBuilder()
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
index ac85a92..1b90d00 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
@@ -19,8 +19,8 @@ package org.apache.drill.exec.store.httpd;
 
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.exec.server.Drillbit;
 import org.apache.drill.exec.store.StoragePluginRegistry;
@@ -74,9 +74,9 @@ public class TestHTTPDLogReader extends ClusterTest {
     String sql = "SELECT `request_receive_time` FROM cp.`httpd/hackers-access-small.httpd` LIMIT 5";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
             .addNullable("request_receive_time", MinorType.TIMESTAMP)
-            .build();
+            .buildSchema();
     RowSet expected = client.rowSetBuilder(expectedSchema)
             .addRow(1445742685000L)
             .addRow(1445742686000L)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
index 9b5d109..ffb7c12 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
@@ -24,8 +24,8 @@ import java.util.List;
 import org.apache.drill.categories.RowSetTests;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.server.Drillbit;
@@ -131,11 +131,11 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT * FROM cp.`regex/simple.log1`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("year", MinorType.INT)
         .addNullable("month", MinorType.INT)
         .addNullable("day", MinorType.INT)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow(2017, 12, 17)
@@ -161,10 +161,10 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT `day`, `month` FROM cp.`regex/simple.log1`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("day", MinorType.INT)
         .addNullable("month", MinorType.INT)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow(17, 12)
@@ -172,8 +172,6 @@ public class TestLogReader extends ClusterTest {
         .addRow(19, 12)
         .build();
 
-//    results.print();
-//    expected.print();
     RowSetUtilities.verify(expected, results);
   }
 
@@ -182,11 +180,11 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT `day`, `missing`, `month` FROM cp.`regex/simple.log1`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("day", MinorType.INT)
         .addNullable("missing", MinorType.VARCHAR)
         .addNullable("month", MinorType.INT)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow(17, null, 12)
@@ -194,8 +192,6 @@ public class TestLogReader extends ClusterTest {
         .addRow(19, null, 12)
         .build();
 
-//    results.print();
-//    expected.print();
     RowSetUtilities.verify(expected, results);
   }
 
@@ -204,9 +200,9 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT `_raw` FROM cp.`regex/simple.log1`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("_raw", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("2017-12-17 10:52:41,820 [main] INFO  o.a.d.e.e.f.FunctionImplementationRegistry - Function registry loaded.  459 functions loaded in 1396 ms.")
@@ -221,9 +217,9 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT TYPEOF(`entry_date`) AS entry_date FROM cp.`regex/simple.log2` LIMIT 1";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .add("entry_date", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("TIMESTAMP")
@@ -244,11 +240,11 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT * FROM cp.`regex/simple.log1`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("year", MinorType.INT)
         .addNullable("month", MinorType.INT)
         .addNullable("day", MinorType.INT)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow(2017, 12, 17)
@@ -265,13 +261,13 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT * FROM cp.`regex/mysql.sqllog`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("field_0", MinorType.VARCHAR)
         .addNullable("field_1", MinorType.VARCHAR)
         .addNullable("field_2", MinorType.VARCHAR)
         .addNullable("field_3", MinorType.VARCHAR)
         .addNullable("field_4", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("070823", "21:00:32", "1", "Connect", "root@localhost on test1")
@@ -291,13 +287,13 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT field_0, field_1, field_2, field_3, field_4 FROM cp.`regex/mysql.sqllog`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("field_0", MinorType.VARCHAR)
         .addNullable("field_1", MinorType.VARCHAR)
         .addNullable("field_2", MinorType.VARCHAR)
         .addNullable("field_3", MinorType.VARCHAR)
         .addNullable("field_4", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("070823", "21:00:32", "1", "Connect", "root@localhost on test1")
@@ -315,10 +311,10 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT field_0, field_4 FROM cp.`regex/mysql.sqllog`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("field_0", MinorType.VARCHAR)
         .addNullable("field_4", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("070823", "root@localhost on test1")
@@ -336,9 +332,9 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT _raw FROM cp.`regex/mysql.sqllog`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("_raw", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("070823 21:00:32       1 Connect     root@localhost on test1")
@@ -357,9 +353,9 @@ public class TestLogReader extends ClusterTest {
     RowSet results = client.queryBuilder().sql(sql).rowSet();
     results.print();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("_unmatched_rows", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("dfadkfjaldkjafsdfjlksdjflksjdlkfjsldkfjslkjl")
@@ -373,10 +369,10 @@ public class TestLogReader extends ClusterTest {
     String sql = "SELECT _raw, _unmatched_rows FROM cp.`regex/mysql.sqllog`";
     RowSet results = client.queryBuilder().sql(sql).rowSet();
 
-    BatchSchema expectedSchema = new SchemaBuilder()
+    TupleMetadata expectedSchema = new SchemaBuilder()
         .addNullable("_raw", MinorType.VARCHAR)
         .addNullable("_unmatched_rows", MinorType.VARCHAR)
-        .build();
+        .buildSchema();
 
     RowSet expected = client.rowSetBuilder(expectedSchema)
         .addRow("070823 21:00:32       1 Connect     root@localhost on test1", null)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetMetadataCache.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetMetadataCache.java
index 5799299..d8241cf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetMetadataCache.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetMetadataCache.java
@@ -23,6 +23,7 @@ import org.apache.drill.PlanTestBase;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.store.parquet.metadata.Metadata;
 import org.apache.drill.exec.store.parquet.metadata.MetadataVersion;
@@ -874,7 +875,9 @@ public class TestParquetMetadataCache extends PlanTestBase {
         Paths.get("parquet", "metadata_files_with_old_versions", "v3_1", "metadata_table.requires_replace.txt"),
         Paths.get(emptyDirNameWithMetadataFile, Metadata.OLD_METADATA_FILENAME));
 
-    final BatchSchema expectedSchema = new SchemaBuilder().build();
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(new SchemaBuilder())
+        .build();
 
     testBuilder()
         .sqlQuery("select * from dfs.tmp.`%s`", emptyDirNameWithMetadataFile)
@@ -893,7 +896,9 @@ public class TestParquetMetadataCache extends PlanTestBase {
             Paths.get("parquet", "metadata_files_with_old_versions", "v3_1", "metadata_directories.requires_replace.txt"),
             Paths.get(emptyDirNameWithMetadataFile, Metadata.METADATA_DIRECTORIES_FILENAME));
 
-    final BatchSchema expectedSchema = new SchemaBuilder().build();
+    final BatchSchema expectedSchema = new BatchSchemaBuilder()
+        .withSchemaBuilder(new SchemaBuilder())
+        .build();
 
     testBuilder()
             .sqlQuery("select * from dfs.tmp.`%s`", emptyDirNameWithMetadataFile)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
index b43e9d7..2cf823b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
@@ -35,6 +35,7 @@ import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.proto.UserBitShared.QueryType;
 import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.exec.rpc.DrillRpcFuture;
 import org.apache.drill.exec.rpc.RpcException;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
@@ -278,7 +279,7 @@ public class ClientFixture implements AutoCloseable {
    * before the SELECT statement.
    *
    * @param controls the controls string created by
-   * {@link Controls#newBuilder()} builder.
+   * {@link org.apache.drill.exec.testing.Controls#newBuilder()} builder.
    */
 
   public void setControls(String controls) {
@@ -290,6 +291,10 @@ public class ClientFixture implements AutoCloseable {
     return new RowSetBuilder(allocator(), schema);
   }
 
+  public RowSetBuilder rowSetBuilder(TupleMetadata schema) {
+    return new RowSetBuilder(allocator(), schema);
+  }
+
   /**
    * Very simple parser for semi-colon separated lists of SQL statements which
    * handles quoted semicolons. Drill can execute only one statement at a time
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java
index afa1a87..8de59cb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ExampleTest.java
@@ -31,8 +31,8 @@ import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.memory.RootAllocator;
 import org.apache.drill.exec.physical.impl.xsort.managed.ExternalSortBatch;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
+import org.apache.drill.exec.record.metadata.TupleMetadata;
 import org.apache.drill.test.LogFixture.LogFixtureBuilder;
 import org.apache.drill.test.QueryBuilder.QuerySummary;
 import org.apache.drill.test.rowSet.RowSet;
@@ -119,10 +119,10 @@ public class ExampleTest {
         .resolve("employee.json")
         .toFile();
 
-      final BatchSchema schema = new SchemaBuilder()
+      final TupleMetadata schema = new SchemaBuilder()
         .add("id", Types.required(TypeProtos.MinorType.VARCHAR))
         .add("name", Types.required(TypeProtos.MinorType.VARCHAR))
-        .build();
+        .buildSchema();
 
       final RowSet rowSet = new RowSetBuilder(allocator, schema)
         .addRow("1", "kiwi")
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRepeatedListAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRepeatedListAccessors.java
index 72c145e..321fae4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRepeatedListAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestRepeatedListAccessors.java
@@ -33,6 +33,7 @@ import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.record.BatchSchemaBuilder;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -76,10 +77,12 @@ public class TestRepeatedListAccessors extends SubOperatorTest {
 
   @Test
   public void testSchemaIncompleteBatch() {
-    final BatchSchema schema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("id", MinorType.INT)
-        .addRepeatedList("list2")
-          .resumeSchema()
+          .addRepeatedList("list2")
+          .resumeSchema();
+    BatchSchema schema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     assertEquals(2, schema.getFieldCount());
@@ -113,11 +116,13 @@ public class TestRepeatedListAccessors extends SubOperatorTest {
 
   @Test
   public void testSchema2DBatch() {
-    final BatchSchema schema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("id", MinorType.INT)
         .addRepeatedList("list2")
           .addArray(MinorType.VARCHAR)
-          .resumeSchema()
+          .resumeSchema();
+    BatchSchema schema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     assertEquals(2, schema.getFieldCount());
@@ -172,13 +177,15 @@ public class TestRepeatedListAccessors extends SubOperatorTest {
 
   @Test
   public void testSchema3DBatch() {
-    final BatchSchema schema = new SchemaBuilder()
+    SchemaBuilder schemaBuilder = new SchemaBuilder()
         .add("id", MinorType.INT)
         .addRepeatedList("list2")
           .addDimension()
             .addArray(MinorType.VARCHAR)
             .resumeList()
-          .resumeSchema()
+            .resumeSchema();
+    BatchSchema schema = new BatchSchemaBuilder()
+        .withSchemaBuilder(schemaBuilder)
         .build();
 
     assertEquals(2, schema.getFieldCount());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariantAccessors.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariantAccessors.java
index 9fbb374..8542c36 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariantAccessors.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/TestVariantAccessors.java
@@ -29,7 +29,6 @@ import java.util.List;
 import org.apache.drill.categories.RowSetTests;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.metadata.SchemaBuilder;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
@@ -991,9 +990,9 @@ public class TestVariantAccessors extends SubOperatorTest {
 
   @Test
   public void testAddTypes() {
-    final BatchSchema batchSchema = new SchemaBuilder()
+    final TupleMetadata batchSchema = new SchemaBuilder()
         .addNullable("v", MinorType.UNION)
-        .build();
+        .buildSchema();
 
     final ExtendableRowSet rs = fixture.rowSet(batchSchema);
     final RowSetWriter writer = rs.writer();
diff --git a/exec/vector/pom.xml b/exec/vector/pom.xml
index 830b545..8d31477 100644
--- a/exec/vector/pom.xml
+++ b/exec/vector/pom.xml
@@ -72,6 +72,10 @@
       <artifactId>hppc</artifactId>
       <version>0.7.1</version>
     </dependency>
+    <dependency>
+      <groupId>org.antlr</groupId>
+      <artifactId>antlr4-runtime</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
@@ -122,6 +126,23 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.antlr</groupId>
+        <artifactId>antlr4-maven-plugin</artifactId>
+        <version>${antlr.version}</version>
+        <configuration>
+          <listener>false</listener>
+          <visitor>true</visitor>
+          <outputDirectory>${project.build.directory}/generated-sources</outputDirectory>
+        </configuration>
+        <executions>
+          <execution>
+            <goals>
+              <goal>antlr4</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
     <pluginManagement>
       <plugins>
diff --git a/exec/java-exec/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaLexer.g4 b/exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaLexer.g4
similarity index 100%
rename from exec/java-exec/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaLexer.g4
rename to exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaLexer.g4
diff --git a/exec/java-exec/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4 b/exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4
similarity index 100%
rename from exec/java-exec/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4
rename to exec/vector/src/main/antlr4/org/apache/drill/exec/record/metadata/schema/parser/SchemaParser.g4
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/AbstractColumnMetadata.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/ColumnBuilder.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnBuilder.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/ColumnBuilder.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/ColumnBuilder.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MapBuilder.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MapColumnMetadata.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
similarity index 78%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
index f1cf45a..0ded113 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/MetadataUtils.java
@@ -19,15 +19,10 @@ package org.apache.drill.exec.record.metadata;
 
 import java.util.List;
 
-import org.apache.drill.common.exceptions.DrillRuntimeException;
-import org.apache.drill.common.expression.PathSegment;
-import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
 import org.apache.drill.common.types.Types;
-import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.MaterializedField;
 
 public class MetadataUtils {
@@ -114,14 +109,6 @@ public class MetadataUtils {
     return tuple;
   }
 
-  public static TupleMetadata fromBatchSchema(BatchSchema batchSchema) {
-    TupleSchema tuple = new TupleSchema();
-    for (MaterializedField field : batchSchema) {
-      tuple.add(fromView(field));
-    }
-    return tuple;
-  }
-
   /**
    * Create a column metadata object for a map column, given the
    * {@link MaterializedField} that describes the column, and a list
@@ -208,36 +195,4 @@ public class MetadataUtils {
     return new PrimitiveColumnMetadata(field);
   }
 
-  /**
-   * Adds column with specified schema path and type into specified {@code TupleMetadata schema}.
-   *
-   * @param schema     tuple schema where column should be added
-   * @param schemaPath schema path of the column which should be added
-   * @param type       type of the column which should be added
-   */
-  public static void addColumnMetadata(TupleMetadata schema, SchemaPath schemaPath, TypeProtos.MajorType type) {
-    PathSegment.NameSegment colPath = schemaPath.getUnIndexed().getRootSegment();
-    ColumnMetadata colMetadata;
-
-    while (!colPath.isLastPath()) {
-      colMetadata = schema.metadata(colPath.getPath());
-      if (colMetadata == null) {
-        colMetadata = MetadataUtils.newMap(colPath.getPath(), null);
-        schema.addColumn(colMetadata);
-      }
-      if (!colMetadata.isMap()) {
-        throw new DrillRuntimeException(String.format("Expected map, but was %s", colMetadata.majorType()));
-      }
-
-      schema = colMetadata.mapSchema();
-      colPath = (PathSegment.NameSegment) colPath.getChild();
-    }
-
-    colMetadata = schema.metadata(colPath.getPath());
-    if (colMetadata == null) {
-      schema.addColumn(new PrimitiveColumnMetadata(MaterializedField.create(colPath.getPath(), type)));
-    } else if (!colMetadata.majorType().equals(type)) {
-      throw new DrillRuntimeException(String.format("Types mismatch: existing type: %s, new type: %s", colMetadata.majorType(), type));
-    }
-  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
similarity index 98%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
index 1e0b5b7..de8b668 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/PrimitiveColumnMetadata.java
@@ -21,7 +21,7 @@ import org.apache.drill.common.types.BooleanType;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.expr.TypeHelper;
+import org.apache.drill.exec.expr.BasicTypeHelper;
 import org.apache.drill.exec.record.MaterializedField;
 import org.joda.time.Instant;
 import org.joda.time.LocalDate;
@@ -79,10 +79,10 @@ public class PrimitiveColumnMetadata extends AbstractColumnMetadata {
       } else {
         // TypeHelper includes the offset vector width
 
-        return TypeHelper.getSize(majorType) - 4;
+        return BasicTypeHelper.getSize(majorType) - 4;
       }
     } else {
-      return TypeHelper.getSize(majorType);
+      return BasicTypeHelper.getSize(majorType);
     }
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListBuilder.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListBuilder.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListBuilder.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListBuilder.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListColumnMetadata.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListColumnMetadata.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/RepeatedListColumnMetadata.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
similarity index 91%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
index ff68dee..5d9fcba 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/SchemaBuilder.java
@@ -20,8 +20,6 @@ package org.apache.drill.exec.record.metadata;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.record.BatchSchema;
-import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.MaterializedField;
 
 /**
@@ -74,22 +72,10 @@ public class SchemaBuilder implements SchemaContainer {
    */
 
   private TupleBuilder tupleBuilder = new TupleBuilder();
-  private SelectionVectorMode svMode = SelectionVectorMode.NONE;
 
   public SchemaBuilder() { }
 
   /**
-   * Create a new schema starting with the base schema. Allows appending
-   * additional columns to an additional schema.
-   */
-
-  public SchemaBuilder(BatchSchema baseSchema) {
-    for (MaterializedField field : baseSchema) {
-      add(field);
-    }
-  }
-
-  /**
    * Create a column schema using the "basic three" properties of name, type and
    * cardinality (AKA "data mode.") Use the {@link ColumnBuilder} for to set
    * other schema attributes. Name is relative to the enclosing map or tuple;
@@ -208,15 +194,6 @@ public class SchemaBuilder implements SchemaContainer {
     return tupleBuilder.addRepeatedList(this, name);
   }
 
-  public SchemaBuilder withSVMode(SelectionVectorMode svMode) {
-    this.svMode = svMode;
-    return this;
-  }
-
-  public BatchSchema build() {
-    return tupleBuilder.batchSchema(svMode);
-  }
-
   public TupleMetadata buildSchema() {
     return tupleBuilder.schema();
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaContainer.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/SchemaContainer.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/SchemaContainer.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/SchemaContainer.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleBuilder.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/TupleBuilder.java
similarity index 95%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleBuilder.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/TupleBuilder.java
index d5ffebb..7ce2607 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleBuilder.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/TupleBuilder.java
@@ -20,8 +20,6 @@ package org.apache.drill.exec.record.metadata;
 import org.apache.drill.common.types.TypeProtos.DataMode;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.exec.record.BatchSchema;
-import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.MaterializedField;
 
 /**
@@ -146,10 +144,6 @@ public class TupleBuilder implements SchemaContainer {
     return new RepeatedListBuilder(parent, name);
   }
 
-  public BatchSchema batchSchema(SelectionVectorMode svMode) {
-    return schema.toBatchSchema(svMode);
-  }
-
   public TupleSchema schema() {
     return schema;
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
similarity index 94%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
index 38baaee..080e8fc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/TupleSchema.java
@@ -22,8 +22,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonPropertyOrder;
-import org.apache.drill.exec.record.BatchSchema;
-import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.MaterializedField;
 
 import java.util.ArrayList;
@@ -174,14 +172,6 @@ public class TupleSchema extends AbstractPropertied implements TupleMetadata {
     return new ArrayList<>(nameSpace.entries());
   }
 
-  public BatchSchema toBatchSchema(SelectionVectorMode svMode) {
-    return new BatchSchema(svMode, toFieldList());
-  }
-
-  public static BatchSchema toBatchSchema(TupleMetadata schema) {
-    return ((TupleSchema) schema).toBatchSchema(SelectionVectorMode.NONE);
-  }
-
   @Override
   public String fullName(int index) {
     return fullName(metadata(index));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/UnionBuilder.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/UnionBuilder.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/UnionBuilder.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/UnionBuilder.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/VariantColumnMetadata.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/VariantColumnMetadata.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/VariantColumnMetadata.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/VariantColumnMetadata.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/VariantSchema.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/VariantSchema.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/VariantSchema.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/VariantSchema.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/package-info.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/package-info.java
similarity index 90%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/package-info.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/package-info.java
index 5f1e0fa..855ad78 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/package-info.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/package-info.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 /**
- * Provides a fluent schema builder for use in tests. Handles all
+ * Provides a fluent schema builder. Handles all
  * forms of Drill schemas, with emphasis on ease of use for the typical
  * cases (flat schema or nested maps.) Enables construction of unions,
  * union lists (AKA "list vector") repeated lists and combinations of
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaExprParser.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaParsingException.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaParsingException.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaParsingException.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaParsingException.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/SchemaVisitor.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/UpperCaseCharStream.java b/exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/UpperCaseCharStream.java
similarity index 100%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/UpperCaseCharStream.java
rename to exec/vector/src/main/java/org/apache/drill/exec/record/metadata/schema/parser/UpperCaseCharStream.java
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
similarity index 100%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
rename to exec/vector/src/test/java/org/apache/drill/exec/record/metadata/TestMetadataProperties.java
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java
similarity index 100%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java
rename to exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestParserErrorHandling.java
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java b/exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
similarity index 100%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
rename to exec/vector/src/test/java/org/apache/drill/exec/record/metadata/schema/parser/TestSchemaParser.java
diff --git a/metastore/metastore-api/src/main/java/org/apache/drill/metastore/util/SchemaPathUtils.java b/metastore/metastore-api/src/main/java/org/apache/drill/metastore/util/SchemaPathUtils.java
index 5d66035..2bbf3c1 100644
--- a/metastore/metastore-api/src/main/java/org/apache/drill/metastore/util/SchemaPathUtils.java
+++ b/metastore/metastore-api/src/main/java/org/apache/drill/metastore/util/SchemaPathUtils.java
@@ -17,9 +17,14 @@
  */
 package org.apache.drill.metastore.util;
 
+import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.metadata.ColumnMetadata;
+import org.apache.drill.exec.record.metadata.MetadataUtils;
+import org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata;
 import org.apache.drill.exec.record.metadata.TupleMetadata;
 
 public class SchemaPathUtils {
@@ -50,4 +55,36 @@ public class SchemaPathUtils {
   }
 
 
+  /**
+   * Adds column with specified schema path and type into specified {@code TupleMetadata schema}.
+   *
+   * @param schema     tuple schema where column should be added
+   * @param schemaPath schema path of the column which should be added
+   * @param type       type of the column which should be added
+   */
+  public static void addColumnMetadata(TupleMetadata schema, SchemaPath schemaPath, TypeProtos.MajorType type) {
+    PathSegment.NameSegment colPath = schemaPath.getUnIndexed().getRootSegment();
+    ColumnMetadata colMetadata;
+
+    while (!colPath.isLastPath()) {
+      colMetadata = schema.metadata(colPath.getPath());
+      if (colMetadata == null) {
+        colMetadata = MetadataUtils.newMap(colPath.getPath(), null);
+        schema.addColumn(colMetadata);
+      }
+      if (!colMetadata.isMap()) {
+        throw new DrillRuntimeException(String.format("Expected map, but was %s", colMetadata.majorType()));
+      }
+
+      schema = colMetadata.mapSchema();
+      colPath = (PathSegment.NameSegment) colPath.getChild();
+    }
+
+    colMetadata = schema.metadata(colPath.getPath());
+    if (colMetadata == null) {
+      schema.addColumn(new PrimitiveColumnMetadata(MaterializedField.create(colPath.getPath(), type)));
+    } else if (!colMetadata.majorType().equals(type)) {
+      throw new DrillRuntimeException(String.format("Types mismatch: existing type: %s, new type: %s", colMetadata.majorType(), type));
+    }
+  }
 }


[drill] 01/02: DRILL-6711: Use jitpack repository for Drill Calcite project artifacts instead of repository.mapr.com

Posted by ar...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

arina pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit e8d9b0ae6253db10fa5c2e8b5aeb4eecb3a1feb9
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Mon Jun 24 14:52:59 2019 +0300

    DRILL-6711: Use jitpack repository for Drill Calcite project artifacts instead of repository.mapr.com
    
    closes #1815
---
 common/pom.xml                               |  2 +-
 contrib/storage-hive/core/pom.xml            | 14 ++++++++++-
 contrib/storage-hive/hive-exec-shade/pom.xml |  4 ++--
 docs/dev/Calcite.md                          | 26 +++++++++++++++++++++
 exec/java-exec/pom.xml                       |  4 ++--
 exec/jdbc-all/pom.xml                        |  8 +++----
 logical/pom.xml                              |  2 +-
 pom.xml                                      | 35 +++++++---------------------
 8 files changed, 57 insertions(+), 38 deletions(-)

diff --git a/common/pom.xml b/common/pom.xml
index 8c71404..8c9593c 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -46,7 +46,7 @@
     </dependency>
 
     <dependency>
-      <groupId>org.apache.calcite</groupId>
+      <groupId>com.github.vvysotskyi.drill-calcite</groupId>
       <artifactId>calcite-core</artifactId>
     </dependency>
 
diff --git a/contrib/storage-hive/core/pom.xml b/contrib/storage-hive/core/pom.xml
index 9912b83..88003ae 100644
--- a/contrib/storage-hive/core/pom.xml
+++ b/contrib/storage-hive/core/pom.xml
@@ -96,7 +96,7 @@
       </exclusions>
     </dependency>
     <dependency>
-      <groupId>org.apache.calcite</groupId>
+      <groupId>com.github.vvysotskyi.drill-calcite</groupId>
       <artifactId>calcite-core</artifactId>
     </dependency>
     <dependency>
@@ -150,6 +150,18 @@
       <scope>test</scope>
       <exclusions>
         <exclusion>
+          <groupId>org.apache.calcite</groupId>
+          <artifactId>calcite-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.calcite</groupId>
+          <artifactId>calcite-linq4j</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.calcite</groupId>
+          <artifactId>calcite-druid</artifactId>
+        </exclusion>
+        <exclusion>
           <groupId>commons-logging</groupId>
           <artifactId>commons-logging</artifactId>
         </exclusion>
diff --git a/contrib/storage-hive/hive-exec-shade/pom.xml b/contrib/storage-hive/hive-exec-shade/pom.xml
index a767000..4e79444 100644
--- a/contrib/storage-hive/hive-exec-shade/pom.xml
+++ b/contrib/storage-hive/hive-exec-shade/pom.xml
@@ -65,8 +65,8 @@
           <artifactId>calcite-core</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.apache.calcite</groupId>
-          <artifactId>calcite-avatica</artifactId>
+          <groupId>org.apache.calcite.avatica</groupId>
+          <artifactId>avatica</artifactId>
         </exclusion>
         <exclusion>
           <groupId>org.apache.calcite</groupId>
diff --git a/docs/dev/Calcite.md b/docs/dev/Calcite.md
new file mode 100644
index 0000000..39d5fe4
--- /dev/null
+++ b/docs/dev/Calcite.md
@@ -0,0 +1,26 @@
+# Drill-specific commits in Apache Calcite
+
+Currently, Drill uses Apache Calcite with additional changes, required for Drill. All the commits were left after 
+update from Calcite `1.4.0` to Calcite `1.15.0` and weren't merged to the Calcite's master yet since there is no consensus on them in Calcite community.
+
+List of Jiras with Drill-specific commits:
+
+|Jira|Summary|The reason why it wasn't merged|
+|----|-------|-------------------------------|
+|[CALCITE-2018](https://issues.apache.org/jira/browse/CALCITE-2018)|Queries failed with AssertionError: rel has lower cost than best cost of subset|Pull request with the fix was created ([PR-552](https://github.com/apache/calcite/pull/552)), but [CALCITE-2166](https://issues.apache.org/jira/browse/CALCITE-2166) which blocks it was found and is not resolved yet.|
+|[CALCITE-2087](https://issues.apache.org/jira/browse/CALCITE-2087)|Add new method to ViewExpander interface to allow passing SchemaPlus.|Pull request into Apache Calcite was created, but it was declined. See conversation in Jira.|
+|[CALCITE-1178](https://issues.apache.org/jira/browse/CALCITE-1178)|Allow SqlBetweenOperator to compare DATE and TIMESTAMP|SQL spec does not allow to compare datetime types if they have different `<primary datetime field>`s. Therefore Calcite community won’t accept these changes. Similar issues were reported in [CALCITE-2829](https://issues.apache.org/jira/browse/CALCITE-2829) and in [CALCITE-2745](https://issues.apache.org/jira/browse/CALCITE-2745).|
+
+# Drill-Calcite repository
+
+Repository with source code is placed [here](https://github.com/vvysotskyi/drill-calcite). For backward 
+compatibility, a couple of previous Drill Calcite branches were pushed into this repo.
+
+Drill committers who need write permissions to the repository, should notify its owner.
+
+# Process of updating Calcite version
+
+- Push required changes to the existing branch, or create new branch. *Though this repository contains Drill specific commits, it is forbidden to add additional specific commits which were not merged to Apache Calcite master if it wasn't discussed in Drill community first.*
+- The last commit must be a commit which updates the version number.
+- Create and push tag for commit with the version update. Tag name should match the version, for example, `1.18.0-drill-r0`, `1.18.0-drill-r1`, `1.18.0-drill-r2` and so on.
+- Bump-up Drill Calcite version in Drill pom.xml file.
\ No newline at end of file
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 6387960..4e42616 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -207,7 +207,7 @@
       </exclusions>
     </dependency>
     <dependency>
-      <groupId>org.apache.calcite</groupId>
+      <groupId>com.github.vvysotskyi.drill-calcite</groupId>
       <artifactId>calcite-core</artifactId>
     </dependency>
     <dependency>
@@ -677,7 +677,7 @@
             <configuration>
               <artifactItems>
                 <artifactItem>
-                  <groupId>org.apache.calcite</groupId>
+                  <groupId>com.github.vvysotskyi.drill-calcite</groupId>
                   <artifactId>calcite-core</artifactId>
                   <type>jar</type>
                   <overWrite>true</overWrite>
diff --git a/exec/jdbc-all/pom.xml b/exec/jdbc-all/pom.xml
index a7effe3..e782d46 100644
--- a/exec/jdbc-all/pom.xml
+++ b/exec/jdbc-all/pom.xml
@@ -300,8 +300,8 @@
             </includes>
             <excludes>
               <exclude>io.protostuff:*</exclude>
-              <exclude>org.apache.calcite:calcite-core</exclude>
-              <exclude>org.apache.calcite:calcite-linq4j</exclude>
+              <exclude>com.github.vvysotskyi.drill-calcite:calcite-core</exclude>
+              <exclude>com.github.vvysotskyi.drill-calcite:calcite-linq4j</exclude>
               <exclude>org.pentaho:*</exclude>
               <exclude>org.msgpack:*</exclude>
               <exclude>xerces:*</exclude>
@@ -615,8 +615,8 @@
                   <excludes>
                     <exclude>org.slf4j:jcl-over-slf4j</exclude>
                     <exclude>io.protostuff:*</exclude>
-                    <exclude>org.apache.calcite:calcite-core</exclude>
-                    <exclude>org.apache.calcite:calcite-linq4j</exclude>
+                    <exclude>com.github.vvysotskyi.drill-calcite:calcite-core</exclude>
+                    <exclude>com.github.vvysotskyi.drill-calcite:calcite-linq4j</exclude>
                     <exclude>org.pentaho:*</exclude>
                     <exclude>org.msgpack:*</exclude>
                     <exclude>xerces:*</exclude>
diff --git a/logical/pom.xml b/logical/pom.xml
index b1e8ab8..a685bb2 100644
--- a/logical/pom.xml
+++ b/logical/pom.xml
@@ -59,7 +59,7 @@
     </dependency>
 
     <dependency>
-      <groupId>org.apache.calcite</groupId>
+      <groupId>com.github.vvysotskyi.drill-calcite</groupId>
       <artifactId>calcite-core</artifactId>
     </dependency>
 
diff --git a/pom.xml b/pom.xml
index 550d8ff..5626bb1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -103,8 +103,8 @@
   </properties>
 
   <scm>
-    <connection>scm:git:https://git-wip-us.apache.org/repos/asf/drill.git</connection>
-    <developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/drill.git</developerConnection>
+    <connection>scm:git:https://gitbox.apache.org/repos/asf/drill.git</connection>
+    <developerConnection>scm:git:https://gitbox.apache.org/repos/asf/drill.git</developerConnection>
     <url>https://github.com/apache/drill</url>
     <tag>HEAD</tag>
   </scm>
@@ -167,10 +167,8 @@
     </repository>
 
     <repository>
-      <!-- Keep until we move back to released calcite versions. -->
-      <id>mapr-drill-optiq-snapshots</id>
-      <name>MapR Drill Optiq Snapshots</name>
-      <url>http://repository.mapr.com/nexus/content/repositories/drill-optiq/</url>
+      <id>sonatype-nexus-snapshots</id>
+      <url>https://oss.sonatype.org/content/repositories/snapshots</url>
       <releases>
         <enabled>false</enabled>
       </releases>
@@ -180,26 +178,8 @@
     </repository>
 
     <repository>
-      <id>mapr-drill-thirdparty</id>
-      <name>MapR Drill Third Party Artifacts</name>
-      <url>http://repository.mapr.com/nexus/content/repositories/drill/</url>
-      <releases>
-        <enabled>true</enabled>
-      </releases>
-      <snapshots>
-        <enabled>false</enabled>
-      </snapshots>
-    </repository>
-
-    <repository>
-      <id>sonatype-nexus-snapshots</id>
-      <url>https://oss.sonatype.org/content/repositories/snapshots</url>
-      <releases>
-        <enabled>false</enabled>
-      </releases>
-      <snapshots>
-        <enabled>true</enabled>
-      </snapshots>
+      <id>jitpack.io</id>
+      <url>https://jitpack.io</url>
     </repository>
   </repositories>
 
@@ -527,6 +507,7 @@
                     <exclude>jdk.tools:jdk.tools</exclude>
                     <exclude>org.json:json</exclude>
                     <exclude>org.beanshell:bsh</exclude>
+                    <exclude>org.apache.calcite:*</exclude>
                   </excludes>
                 </bannedDependencies>
               </rules>
@@ -1058,7 +1039,7 @@
   <dependencyManagement>
     <dependencies>
       <dependency>
-        <groupId>org.apache.calcite</groupId>
+        <groupId>com.github.vvysotskyi.drill-calcite</groupId>
         <artifactId>calcite-core</artifactId>
         <version>${calcite.version}</version>
         <exclusions>