You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by hs...@apache.org on 2016/05/03 07:41:07 UTC

drill git commit: DRILL-4577: Construct a specific path for querying all the tables from a hive database

Repository: drill
Updated Branches:
  refs/heads/master 5705d4509 -> b8f6ebc65


DRILL-4577: Construct a specific path for querying all the tables from a hive database


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/b8f6ebc6
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/b8f6ebc6
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/b8f6ebc6

Branch: refs/heads/master
Commit: b8f6ebc651445ccecd3e393250f6cd2781fc07e3
Parents: 5705d45
Author: Hsuan-Yi Chu <hs...@usc.edu>
Authored: Mon Apr 4 16:05:04 2016 -0700
Committer: hsuanyi <hs...@apache.org>
Committed: Mon May 2 21:41:32 2016 -0700

----------------------------------------------------------------------
 .../store/hive/schema/HiveDatabaseSchema.java   |  71 ++++++++++-
 .../hive/BaseTestHiveImpersonation.java         |  21 ++++
 .../hive/TestStorageBasedHiveAuthorization.java | 125 +++++++++++++++++++
 .../org/apache/drill/exec/ExecConstants.java    |   3 +
 .../server/options/SystemOptionManager.java     |   3 +-
 .../apache/drill/exec/store/AbstractSchema.java |  39 +++++-
 .../store/ischema/InfoSchemaBatchCreator.java   |   2 +-
 .../exec/store/ischema/InfoSchemaTable.java     |  23 ++--
 .../exec/store/ischema/RecordGenerator.java     |  85 ++++++++++---
 .../drill/exec/store/ischema/SelectedTable.java |   5 +-
 10 files changed, 344 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveDatabaseSchema.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveDatabaseSchema.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveDatabaseSchema.java
index 6f43639..ff61f8d 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveDatabaseSchema.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveDatabaseSchema.java
@@ -17,17 +17,25 @@
  */
 package org.apache.drill.exec.store.hive.schema;
 
+import java.util.List;
 import java.util.Set;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+import org.apache.calcite.rel.type.RelDataType;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
+import org.apache.calcite.schema.Schema;
+import org.apache.calcite.schema.Statistic;
 import org.apache.calcite.schema.Table;
 
+import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.SchemaConfig;
 import org.apache.drill.exec.store.hive.DrillHiveMetaStoreClient;
 import org.apache.drill.exec.store.hive.HiveStoragePluginConfig;
 import org.apache.drill.exec.store.hive.schema.HiveSchemaFactory.HiveSchema;
 
-import com.google.common.collect.Sets;
 import org.apache.thrift.TException;
 
 public class HiveDatabaseSchema extends AbstractSchema{
@@ -72,4 +80,65 @@ public class HiveDatabaseSchema extends AbstractSchema{
     return HiveStoragePluginConfig.NAME;
   }
 
+  @Override
+  public List<Pair<String, ? extends Table>> getTablesByNamesByBulkLoad(final List<String> tableNames) {
+    final String schemaName = getName();
+    final List<Pair<String, ? extends Table>> tableNameToTable = Lists.newArrayList();
+    List<org.apache.hadoop.hive.metastore.api.Table> tables;
+    // Retries once if the first call to fetch the metadata fails
+    synchronized(mClient) {
+      try {
+        tables = mClient.getTableObjectsByName(schemaName, tableNames);
+      } catch(TException tException) {
+        try {
+          mClient.reconnect();
+          tables = mClient.getTableObjectsByName(schemaName, tableNames);
+        } catch(Exception e) {
+          logger.warn("Exception occurred while trying to read tables from {}: {}", schemaName, e.getCause());
+          return tableNameToTable;
+        }
+      }
+    }
+
+    for(final org.apache.hadoop.hive.metastore.api.Table table : tables) {
+      if(table == null) {
+        continue;
+      }
+
+      final String tableName = table.getTableName();
+      final TableType tableType;
+      if(table.getTableType().equals(org.apache.hadoop.hive.metastore.TableType.VIRTUAL_VIEW.toString())) {
+        tableType = TableType.VIEW;
+      } else {
+        tableType = TableType.TABLE;
+      }
+      tableNameToTable.add(Pair.of(
+          tableName,
+          new HiveTableWithoutStatisticAndRowType(tableType)));
+    }
+    return tableNameToTable;
+  }
+
+  private static class HiveTableWithoutStatisticAndRowType implements Table {
+    private final TableType tableType;
+
+    public HiveTableWithoutStatisticAndRowType(final TableType tableType) {
+      this.tableType = tableType;
+    }
+
+    @Override
+    public RelDataType getRowType(RelDataTypeFactory typeFactory) {
+      throw new UnsupportedOperationException("RowType was not retrieved when this table had been being requested");
+    }
+
+    @Override
+    public Statistic getStatistic() {
+      throw new UnsupportedOperationException("Statistic was not retrieved when this table had been being requested");
+    }
+
+    @Override
+    public Schema.TableType getJdbcTableType() {
+      return tableType;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
index 35c9d64..66c595f 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.impersonation.hive;
 
+import org.apache.calcite.schema.Schema.TableType;
 import org.apache.drill.TestBuilder;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.dotdrill.DotDrillType;
@@ -124,6 +125,26 @@ public class BaseTestHiveImpersonation extends BaseTestImpersonation {
     testBuilder.go();
   }
 
+  protected void fromInfoSchemaHelper(final String pluginName, final String db, List<String> expectedTables, List<TableType> expectedTableTypes) throws Exception {
+    final String dbQualified = pluginName + "." + db;
+    final TestBuilder testBuilder = testBuilder()
+        .sqlQuery("SELECT TABLE_SCHEMA, TABLE_NAME, TABLE_TYPE \n" +
+            "FROM INFORMATION_SCHEMA.`TABLES` \n" +
+            "WHERE TABLE_SCHEMA = '" + dbQualified + "'")
+        .unOrdered()
+        .baselineColumns("TABLE_SCHEMA", "TABLE_NAME", "TABLE_TYPE");
+
+    if (expectedTables.size() == 0) {
+      testBuilder.expectsEmptyResultSet();
+    } else {
+      for (int i = 0; i < expectedTables.size(); ++i) {
+        testBuilder.baselineValues(dbQualified, expectedTables.get(i), expectedTableTypes.get(i).toString());
+      }
+    }
+
+    testBuilder.go();
+  }
+
   protected static void createView(final String viewOwner, final String viewGroup, final String viewName,
                                  final String viewDef) throws Exception {
     updateClient(viewOwner);

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
index 21559c9..978c34e 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java
@@ -19,6 +19,8 @@ package org.apache.drill.exec.impersonation.hive;
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Maps;
+
+import org.apache.calcite.schema.Schema.TableType;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -269,6 +271,47 @@ public class TestStorageBasedHiveAuthorization extends BaseTestHiveImpersonation
   }
 
   @Test
+  public void fromInfoSchemaUser0() throws Exception {
+    updateClient(org1Users[0]);
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_general,
+        ImmutableList.of(
+            g_student_u0_700,
+            g_student_u0g0_750,
+            g_student_all_755,
+            g_voter_all_755,
+            g_partitioned_student_u0_700
+        ),
+        ImmutableList.of(
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE
+        ));
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_u0_only,
+        ImmutableList.of(
+            u0_student_all_755,
+            u0_voter_all_755
+        ),
+        ImmutableList.of(
+            TableType.TABLE,
+            TableType.TABLE
+        ));
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_u1g1_only,
+        Collections.<String>emptyList(),
+        Collections.<TableType>emptyList());
+  }
+
+  @Test
   public void showTablesUser1() throws Exception {
     updateClient(org1Users[1]);
 
@@ -293,6 +336,51 @@ public class TestStorageBasedHiveAuthorization extends BaseTestHiveImpersonation
   }
 
   @Test
+  public void fromInfoSchemaUser1() throws Exception {
+    updateClient(org1Users[1]);
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_general,
+        ImmutableList.of(
+            g_student_u0g0_750,
+            g_student_all_755,
+            g_voter_u1_700,
+            g_voter_u2g1_750,
+            g_voter_all_755
+        ),
+        ImmutableList.of(
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE
+        ));
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_u1g1_only,
+        ImmutableList.of(
+            u1g1_student_all_755,
+            u1g1_student_u1_700,
+            u1g1_voter_all_755,
+            u1g1_voter_u1_700
+        ),
+        ImmutableList.of(
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE
+        ));
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_u0_only,
+        Collections.<String>emptyList(),
+        Collections.<TableType>emptyList());
+  }
+
+  @Test
   public void showTablesUser2() throws Exception {
     updateClient(org1Users[2]);
 
@@ -312,6 +400,43 @@ public class TestStorageBasedHiveAuthorization extends BaseTestHiveImpersonation
     showTablesHelper(db_u0_only, Collections.<String>emptyList());
   }
 
+  @Test
+  public void fromInfoSchemaUser2() throws Exception {
+    updateClient(org1Users[2]);
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_general,
+        ImmutableList.of(
+            g_student_all_755,
+            g_voter_u2g1_750,
+            g_voter_all_755
+        ),
+        ImmutableList.of(
+            TableType.TABLE,
+            TableType.TABLE,
+            TableType.TABLE
+        ));
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_u1g1_only,
+        ImmutableList.of(
+            u1g1_student_all_755,
+            u1g1_voter_all_755
+        ),
+        ImmutableList.of(
+            TableType.TABLE,
+            TableType.TABLE
+        ));
+
+    fromInfoSchemaHelper(
+        hivePluginName,
+        db_u0_only,
+        Collections.<String>emptyList(),
+        Collections.<TableType>emptyList());
+  }
+
   // Try to read the tables "user0" has access to read in db_general.
   @Test
   public void selectUser0_db_general() throws Exception {

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 6a0889d..7f216f0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -270,6 +270,9 @@ public interface ExecConstants {
   String CTAS_PARTITIONING_HASH_DISTRIBUTE = "store.partition.hash_distribute";
   BooleanValidator CTAS_PARTITIONING_HASH_DISTRIBUTE_VALIDATOR = new BooleanValidator(CTAS_PARTITIONING_HASH_DISTRIBUTE, false);
 
+  String ENABLE_BULK_LOAD_TABLE_LIST_KEY = "exec.enable_bulk_load_table_list";
+  BooleanValidator ENABLE_BULK_LOAD_TABLE_LIST = new BooleanValidator(ENABLE_BULK_LOAD_TABLE_LIST_KEY, false);
+
   /**
    * Option whose value is a comma separated list of admin usernames. Admin users are users who have special privileges
    * such as changing system options.

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index 0abdb76..db78108 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -134,7 +134,8 @@ public class SystemOptionManager extends BaseOptionManager implements AutoClosea
       ExecConstants.ENABLE_VERBOSE_ERRORS,
       ExecConstants.ENABLE_WINDOW_FUNCTIONS_VALIDATOR,
       ClassTransformer.SCALAR_REPLACEMENT_VALIDATOR,
-      ExecConstants.ENABLE_NEW_TEXT_READER
+      ExecConstants.ENABLE_NEW_TEXT_READER,
+      ExecConstants.ENABLE_BULK_LOAD_TABLE_LIST
     };
     final Map<String, OptionValidator> tmp = new HashMap<>();
     for (final OptionValidator validator : validators) {

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
index 2403252..f7ec3fe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
@@ -25,10 +25,12 @@ import java.util.Set;
 
 import org.apache.calcite.linq4j.tree.DefaultExpression;
 import org.apache.calcite.linq4j.tree.Expression;
+import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.schema.Function;
 import org.apache.calcite.schema.Schema;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.schema.Table;
+import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.exec.dotdrill.View;
 import org.apache.drill.exec.planner.logical.CreateTableEntry;
@@ -194,4 +196,39 @@ public abstract class AbstractSchema implements Schema, SchemaPartitionExplorer,
         .message("Dropping tables is not supported in schema [%s]", getSchemaPath())
         .build(logger);
   }
-}
+
+  /**
+   * Get the collection of {@link Table} tables specified in the tableNames with bulk-load (if the underlying storage
+   * plugin supports).
+   * It is not guaranteed that the retrieved tables would have RowType and Statistic being fully populated.
+   *
+   * Specifically, calling {@link Table#getRowType(RelDataTypeFactory)} or {@link Table#getStatistic()} might incur
+   * {@link UnsupportedOperationException} being thrown.
+   *
+   * @param  tableNames the requested tables, specified by the table names
+   * @return the collection of requested tables
+   */
+  public List<Pair<String, ? extends Table>> getTablesByNamesByBulkLoad(final List<String> tableNames) {
+    return getTablesByNames(tableNames);
+  }
+
+  /**
+   * Get the collection of {@link Table} tables specified in the tableNames.
+   *
+   * @param  tableNames the requested tables, specified by the table names
+   * @return the collection of requested tables
+   */
+  public List<Pair<String, ? extends Table>> getTablesByNames(final List<String> tableNames) {
+    final List<Pair<String, ? extends Table>> tables = Lists.newArrayList();
+    for (String tableName : tableNames) {
+      final Table table = getTable(tableName);
+      if (table == null) {
+        // Schema may return NULL for table if the query user doesn't have permissions to load the table. Ignore such
+        // tables as INFO SCHEMA is about showing tables which the use has access to query.
+        continue;
+      }
+      tables.add(Pair.of(tableName, table));
+    }
+    return tables;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaBatchCreator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaBatchCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaBatchCreator.java
index 2ef2333..199119d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaBatchCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaBatchCreator.java
@@ -33,7 +33,7 @@ public class InfoSchemaBatchCreator implements BatchCreator<InfoSchemaSubScan>{
   @Override
   public ScanBatch getBatch(FragmentContext context, InfoSchemaSubScan config, List<RecordBatch> children)
       throws ExecutionSetupException {
-    RecordReader rr = config.getTable().getRecordReader(context.getRootSchema(), config.getFilter());
+    RecordReader rr = config.getTable().getRecordReader(context.getRootSchema(), config.getFilter(), context.getOptions());
     return new ScanBatch(config, context, Collections.singleton(rr).iterator());
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTable.java
index 3f8d35f..a6d56b3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaTable.java
@@ -29,6 +29,7 @@ import org.apache.calcite.sql.type.SqlTypeName;
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
+import org.apache.drill.exec.server.options.OptionManager;
 
 /**
  * Base class for tables in INFORMATION_SCHEMA.  Defines the table (fields and
@@ -85,7 +86,7 @@ public abstract class InfoSchemaTable {
     return typeFactory.createStructType(relTypes, fieldNames);
   }
 
-  public abstract RecordGenerator getRecordGenerator();
+  public abstract RecordGenerator getRecordGenerator(OptionManager optionManager);
 
   /** Layout for the CATALOGS table. */
   static public class Catalogs extends InfoSchemaTable {
@@ -101,8 +102,8 @@ public abstract class InfoSchemaTable {
     }
 
     @Override
-    public RecordGenerator getRecordGenerator() {
-      return new RecordGenerator.Catalogs();
+    public RecordGenerator getRecordGenerator(OptionManager optionManager) {
+      return new RecordGenerator.Catalogs(optionManager);
     }
   }
 
@@ -122,8 +123,8 @@ public abstract class InfoSchemaTable {
     }
 
     @Override
-    public RecordGenerator getRecordGenerator() {
-      return new RecordGenerator.Schemata();
+    public RecordGenerator getRecordGenerator(OptionManager optionManager) {
+      return new RecordGenerator.Schemata(optionManager);
     }
   }
 
@@ -142,8 +143,8 @@ public abstract class InfoSchemaTable {
     }
 
     @Override
-    public RecordGenerator getRecordGenerator() {
-      return new RecordGenerator.Tables();
+    public RecordGenerator getRecordGenerator(OptionManager optionManager) {
+      return new RecordGenerator.Tables(optionManager);
     }
   }
 
@@ -162,8 +163,8 @@ public abstract class InfoSchemaTable {
     }
 
     @Override
-    public RecordGenerator getRecordGenerator() {
-      return new RecordGenerator.Views();
+    public RecordGenerator getRecordGenerator(OptionManager optionManager) {
+      return new RecordGenerator.Views(optionManager);
     }
   }
 
@@ -214,8 +215,8 @@ public abstract class InfoSchemaTable {
     }
 
     @Override
-    public RecordGenerator getRecordGenerator() {
-      return new RecordGenerator.Columns();
+    public RecordGenerator getRecordGenerator(OptionManager optionManager) {
+      return new RecordGenerator.Columns(optionManager);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/RecordGenerator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/RecordGenerator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/RecordGenerator.java
index f464727..29ccbce 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/RecordGenerator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/RecordGenerator.java
@@ -31,7 +31,10 @@ import org.apache.calcite.rel.type.RelDataTypeField;
 import org.apache.calcite.schema.Schema.TableType;
 import org.apache.calcite.schema.SchemaPlus;
 import org.apache.calcite.schema.Table;
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.logical.DrillViewInfoProvider;
+import org.apache.drill.exec.server.options.OptionManager;
 import org.apache.drill.exec.store.AbstractSchema;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.ischema.InfoSchemaFilter.Result;
@@ -48,6 +51,11 @@ import com.google.common.collect.Lists;
 public abstract class RecordGenerator {
   protected InfoSchemaFilter filter;
 
+  protected OptionManager optionManager;
+  public RecordGenerator(OptionManager optionManager) {
+    this.optionManager = optionManager;
+  }
+
   public void setInfoSchemaFilter(InfoSchemaFilter filter) {
     this.filter = filter;
   }
@@ -124,29 +132,37 @@ public abstract class RecordGenerator {
 
     // Visit this schema and if requested ...
     if (shouldVisitSchema(schemaPath, schema) && visitSchema(schemaPath, schema)) {
-      // ... do for each of the schema's tables.
-      for (String tableName: schema.getTableNames()) {
-        Table table = schema.getTable(tableName);
-
-        if (table == null) {
-          // Schema may return NULL for table if the query user doesn't have permissions to load the table. Ignore such
-          // tables as INFO SCHEMA is about showing tables which the use has access to query.
-          continue;
-        }
+      visitTables(schemaPath, schema);
+    }
+  }
 
-        // Visit the table, and if requested ...
-        if (shouldVisitTable(schemaPath, tableName) && visitTable(schemaPath,  tableName, table)) {
-          // ... do for each of the table's fields.
-          RelDataType tableRow = table.getRowType(new JavaTypeFactoryImpl());
-          for (RelDataTypeField field: tableRow.getFieldList()) {
-            visitField(schemaPath,  tableName, field);
-          }
+  /**
+   * Visit the tables in the given schema. The
+   * @param  schemaPath  the path to the given schema
+   * @param  schema  the given schema
+   */
+  public void visitTables(String schemaPath, SchemaPlus schema) {
+    final AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class);
+    final List<String> tableNames = Lists.newArrayList(schema.getTableNames());
+    for(Pair<String, ? extends Table> tableNameToTable : drillSchema.getTablesByNames(tableNames)) {
+      final String tableName = tableNameToTable.getKey();
+      final Table table = tableNameToTable.getValue();
+      // Visit the table, and if requested ...
+      if(shouldVisitTable(schemaPath, tableName) && visitTable(schemaPath,  tableName, table)) {
+        // ... do for each of the table's fields.
+        final RelDataType tableRow = table.getRowType(new JavaTypeFactoryImpl());
+        for (RelDataTypeField field: tableRow.getFieldList()) {
+          visitField(schemaPath, tableName, field);
         }
       }
     }
   }
 
   public static class Catalogs extends RecordGenerator {
+    public Catalogs(OptionManager optionManager) {
+      super(optionManager);
+    }
+
     @Override
     public RecordReader getRecordReader() {
       Records.Catalog catalogRecord =
@@ -159,6 +175,10 @@ public abstract class RecordGenerator {
   public static class Schemata extends RecordGenerator {
     List<Records.Schema> records = Lists.newArrayList();
 
+    public Schemata(OptionManager optionManager) {
+      super(optionManager);
+    }
+
     @Override
     public RecordReader getRecordReader() {
       return new PojoRecordReader<>(Records.Schema.class, records.iterator());
@@ -176,12 +196,38 @@ public abstract class RecordGenerator {
   public static class Tables extends RecordGenerator {
     List<Records.Table> records = Lists.newArrayList();
 
+    public Tables(OptionManager optionManager) {
+      super(optionManager);
+    }
+
     @Override
     public RecordReader getRecordReader() {
       return new PojoRecordReader<>(Records.Table.class, records.iterator());
     }
 
     @Override
+    public void visitTables(String schemaPath, SchemaPlus schema) {
+      final AbstractSchema drillSchema = schema.unwrap(AbstractSchema.class);
+
+      final List<String> tableNames = Lists.newArrayList(schema.getTableNames());
+      final List<Pair<String, ? extends Table>> tableNameToTables;
+      if(optionManager.getOption(ExecConstants.ENABLE_BULK_LOAD_TABLE_LIST)) {
+        tableNameToTables = drillSchema.getTablesByNamesByBulkLoad(tableNames);
+      } else {
+        tableNameToTables = drillSchema.getTablesByNames(tableNames);
+      }
+
+      for(Pair<String, ? extends Table> tableNameToTable : tableNameToTables) {
+        final String tableName = tableNameToTable.getKey();
+        final Table table = tableNameToTable.getValue();
+        // Visit the table, and if requested ...
+        if(shouldVisitTable(schemaPath, tableName)) {
+          visitTable(schemaPath, tableName, table);
+        }
+      }
+    }
+
+    @Override
     public boolean visitTable(String schemaName, String tableName, Table table) {
       Preconditions.checkNotNull(table, "Error. Table %s.%s provided is null.", schemaName, tableName);
 
@@ -198,6 +244,10 @@ public abstract class RecordGenerator {
   public static class Views extends RecordGenerator {
     List<Records.View> records = Lists.newArrayList();
 
+    public Views(OptionManager optionManager) {
+      super(optionManager);
+    }
+
     @Override
     public RecordReader getRecordReader() {
       return new PojoRecordReader<>(Records.View.class, records.iterator());
@@ -215,6 +265,9 @@ public abstract class RecordGenerator {
 
   public static class Columns extends RecordGenerator {
     List<Records.Column> records = Lists.newArrayList();
+    public Columns(OptionManager optionManager) {
+      super(optionManager);
+    }
 
     @Override
     public RecordReader getRecordReader() {

http://git-wip-us.apache.org/repos/asf/drill/blob/b8f6ebc6/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/SelectedTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/SelectedTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/SelectedTable.java
index 79e7fd2..e2a2b2f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/SelectedTable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/SelectedTable.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.store.ischema;
 
 import org.apache.calcite.schema.SchemaPlus;
 
+import org.apache.drill.exec.server.options.OptionManager;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.store.ischema.InfoSchemaTable.Catalogs;
 import org.apache.drill.exec.store.ischema.InfoSchemaTable.Columns;
@@ -51,8 +52,8 @@ public enum SelectedTable{
     this.tableDef = tableDef;
   }
 
-  public RecordReader getRecordReader(SchemaPlus rootSchema, InfoSchemaFilter filter) {
-    RecordGenerator recordGenerator = tableDef.getRecordGenerator();
+  public RecordReader getRecordReader(SchemaPlus rootSchema, InfoSchemaFilter filter, OptionManager optionManager) {
+    RecordGenerator recordGenerator = tableDef.getRecordGenerator(optionManager);
     recordGenerator.setInfoSchemaFilter(filter);
     recordGenerator.scanSchema(rootSchema);
     return recordGenerator.getRecordReader();