You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@impala.apache.org by ta...@apache.org on 2018/02/23 17:22:16 UTC

[1/3] impala git commit: IMPALA-5152: Introduce metadata loading phase

Repository: impala
Updated Branches:
  refs/heads/2.x d89db5b0d -> e0c09181f


http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java
index 93a4090..49aa91c 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java
@@ -27,7 +27,6 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.impala.analysis.TimestampArithmeticExpr.TimeUnit;
-import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.CatalogException;
 import org.apache.impala.catalog.Column;
@@ -1369,8 +1368,8 @@ public class AnalyzeExprsTest extends AnalyzerTest {
   /**
    * Get the result type of a select statement with a single select list element.
    */
-  Type getReturnType(String stmt, Analyzer analyzer) {
-    SelectStmt select = (SelectStmt) AnalyzesOk(stmt, analyzer, null);
+  Type getReturnType(String stmt, AnalysisContext ctx) {
+    SelectStmt select = (SelectStmt) AnalyzesOk(stmt, ctx);
     List<Expr> selectListExprs = select.getResultExprs();
     assertNotNull(selectListExprs);
     assertEquals(selectListExprs.size(), 1);
@@ -1379,13 +1378,13 @@ public class AnalyzeExprsTest extends AnalyzerTest {
     return expr.getType();
   }
 
-  private void checkReturnType(String stmt, Type resultType, Analyzer analyzer) {
-    Type exprType = getReturnType(stmt, analyzer);
+  private void checkReturnType(String stmt, Type resultType, AnalysisContext ctx) {
+    Type exprType = getReturnType(stmt, ctx);
     assertEquals("Expected: " + resultType + " != " + exprType, resultType, exprType);
   }
 
   private void checkReturnType(String stmt, Type resultType) {
-    checkReturnType(stmt, resultType, createAnalyzer(Catalog.DEFAULT_DB));
+    checkReturnType(stmt, resultType, createAnalysisCtx(Catalog.DEFAULT_DB));
   }
 
   /**
@@ -1394,12 +1393,12 @@ public class AnalyzeExprsTest extends AnalyzerTest {
    */
   private void checkDecimalReturnType(String stmt, Type decimalV1ResultType,
       Type decimalV2ResultType) {
-    Analyzer analyzer = createAnalyzer(Catalog.DEFAULT_DB);
-    analyzer.getQueryOptions().setDecimal_v2(false);
-    checkReturnType(stmt, decimalV1ResultType, analyzer);
-    analyzer = createAnalyzer(Catalog.DEFAULT_DB);
-    analyzer.getQueryOptions().setDecimal_v2(true);
-    checkReturnType(stmt, decimalV2ResultType, analyzer);
+    AnalysisContext ctx = createAnalysisCtx(Catalog.DEFAULT_DB);
+    ctx.getQueryOptions().setDecimal_v2(false);
+    checkReturnType(stmt, decimalV1ResultType, ctx);
+    ctx = createAnalysisCtx(Catalog.DEFAULT_DB);
+    ctx.getQueryOptions().setDecimal_v2(true);
+    checkReturnType(stmt, decimalV2ResultType, ctx);
   }
 
   /**
@@ -2549,16 +2548,16 @@ public class AnalyzeExprsTest extends AnalyzerTest {
           "select count(distinct %s), sum(distinct smallint_col), " +
           "avg(float_col), min(%s) " +
           "from functional.alltypes",
-          colName, colName), createAnalyzer(queryOptions));
+          colName, colName), createAnalysisCtx(queryOptions));
       countDistinctFns.add(String.format("count(distinct %s)", colName));
     }
     // Test a single query with a count(distinct) on all columns of alltypesTbl.
     AnalyzesOk(String.format("select %s from functional.alltypes",
-        Joiner.on(",").join(countDistinctFns)), createAnalyzer(queryOptions));
+        Joiner.on(",").join(countDistinctFns)), createAnalysisCtx(queryOptions));
 
     allCountDistinctFns.addAll(countDistinctFns);
     countDistinctFns.clear();
-    Table decimalTbl = catalog_.getTable("functional", "decimal_tbl");
+    Table decimalTbl = catalog_.getOrLoadTable("functional", "decimal_tbl");
     for (Column col: decimalTbl.getColumns()) {
       String colName = col.getName();
       // Test a single count(distinct) with some other aggs.
@@ -2566,12 +2565,12 @@ public class AnalyzeExprsTest extends AnalyzerTest {
           "select count(distinct %s), sum(distinct d1), " +
           "avg(d2), min(%s) " +
           "from functional.decimal_tbl",
-          colName, colName), createAnalyzer(queryOptions));
+          colName, colName), createAnalysisCtx(queryOptions));
       countDistinctFns.add(String.format("count(distinct %s)", colName));
     }
     // Test a single query with a count(distinct) on all columns of decimalTbl.
     AnalyzesOk(String.format("select %s from functional.decimal_tbl",
-        Joiner.on(",").join(countDistinctFns)), createAnalyzer(queryOptions));
+        Joiner.on(",").join(countDistinctFns)), createAnalysisCtx(queryOptions));
 
     allCountDistinctFns.addAll(countDistinctFns);
 
@@ -2579,19 +2578,19 @@ public class AnalyzeExprsTest extends AnalyzerTest {
     // alltypes/decimalTbl.
     AnalyzesOk(String.format(
         "select %s from functional.alltypes cross join functional.decimal_tbl",
-        Joiner.on(",").join(countDistinctFns)), createAnalyzer(queryOptions));
+        Joiner.on(",").join(countDistinctFns)), createAnalysisCtx(queryOptions));
 
     // The rewrite does not work for multiple count() arguments.
     AnalysisError("select count(distinct int_col, bigint_col), " +
         "count(distinct string_col, float_col) from functional.alltypes",
-        createAnalyzer(queryOptions),
+        createAnalysisCtx(queryOptions),
         "all DISTINCT aggregate functions need to have the same set of parameters as " +
         "count(DISTINCT int_col, bigint_col); deviating function: " +
         "count(DISTINCT string_col, float_col)");
     // The rewrite only applies to the count() function.
     AnalysisError(
         "select avg(distinct int_col), sum(distinct float_col) from functional.alltypes",
-        createAnalyzer(queryOptions),
+        createAnalysisCtx(queryOptions),
         "all DISTINCT aggregate functions need to have the same set of parameters as " +
         "avg(DISTINCT int_col); deviating function: sum(DISTINCT");
   }
@@ -2609,8 +2608,7 @@ public class AnalyzeExprsTest extends AnalyzerTest {
     Assert.assertTrue(tinyIntFn.compare(decimalFn,
         CompareMode.IS_NONSTRICT_SUPERTYPE_OF));
     // Check that this resolves to the decimal version of the function.
-    Analyzer analyzer = createAnalyzer(Catalog.BUILTINS_DB);
-    Db db = analyzer.getDb(Catalog.BUILTINS_DB, Privilege.VIEW_METADATA, true);
+    Db db = catalog_.getDb(Catalog.BUILTINS_DB);
     Function foundFn = db.getFunction(decimalFn, CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
     assertNotNull(foundFn);
     Assert.assertTrue(foundFn.getArgs()[0].isDecimal());

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzeModifyStmtsTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeModifyStmtsTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeModifyStmtsTest.java
index 152add6..3f6d72d 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeModifyStmtsTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeModifyStmtsTest.java
@@ -17,9 +17,8 @@
 
 package org.apache.impala.analysis;
 
-import org.junit.Test;
-
 import org.apache.impala.testutil.TestUtils;
+import org.junit.Test;
 
 /**
  * Tests analysis phase of the ModifyStmt and its sub-classes.
@@ -42,12 +41,12 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
     AnalyzesOk("update a set a.name = 'values' from functional_kudu.testtbl a " +
         "where a.zip in (select zip from functional.testtbl limit 10)");
     AnalyzesOk("update functional_kudu.dimtbl set name = 'Oskar' FROM dimtbl",
-        createAnalyzer("functional_kudu"));
+        createAnalysisCtx("functional_kudu"));
     AnalysisError("update a set b.name = 'Oskar' FROM dimtbl b",
-        createAnalyzer("functional_kudu"),
+        createAnalysisCtx("functional_kudu"),
         "'a' is not a valid table alias or reference.");
     AnalyzesOk("update a set a.name = 'Oskar' FROM dimtbl a",
-        createAnalyzer("functional_kudu"));
+        createAnalysisCtx("functional_kudu"));
     // Table name is an implicit alias
     AnalyzesOk(
         "update functional_kudu.dimtbl set name = 'Oskar' FROM functional_kudu.dimtbl");
@@ -65,7 +64,7 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
     // Location of the kudu table doesnt matter
     AnalyzesOk(
         "update a set a.name = 'Oskar' from functional.testtbl b, dimtbl a where b.id =" +
-        " a.id ", createAnalyzer("functional_kudu"));
+        " a.id ", createAnalysisCtx("functional_kudu"));
     AnalyzesOk("update a set name = 'Oskar' from functional_kudu.testtbl a");
     AnalysisError(
         "update functional_kudu.testtbl set name = 'Oskar' from functional_kudu.dimtbl",
@@ -87,7 +86,8 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
     TestUtils.assumeKuduIsSupported();
     AnalyzesOk("update functional_kudu.dimtbl set name = 'Oskar'");
     // Correct default database resolution
-    AnalyzesOk("update dimtbl set name = 'Oskar'", createAnalyzer("functional_kudu"));
+    AnalyzesOk("update dimtbl set name = 'Oskar'",
+        createAnalysisCtx("functional_kudu"));
     // Correct table alias resolution
     AnalyzesOk("update functional_kudu.dimtbl set name = '10'");
     // Check type compatibility, zip is int, 4711 is smallint
@@ -239,5 +239,4 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
         "'functional.allcomplextypes.int_array_col' is not a valid table alias or " +
         "reference.");
   }
-
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java
index f9d5cf5..cdcb9bc 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java
@@ -122,7 +122,7 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     // Parent/collection join requires the child to use an alias of the parent.
     AnalysisError(String.format(
         "select %s from allcomplextypes, %s", collectionField, collectionTable),
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         String.format("Could not resolve table reference: '%s'", collectionTable));
     AnalysisError(String.format(
         "select %s from functional.allcomplextypes, %s",
@@ -315,7 +315,7 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     AnalysisError("select alltypes.smallint_col, functional.alltypes.int_col " +
         "from alltypes inner join functional.alltypes " +
         "on (alltypes.id = functional.alltypes.id)",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "Duplicate table alias: 'functional.alltypes'");
   }
 
@@ -410,7 +410,6 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     }
     List<List<Integer>> expectedPaths = Lists.newArrayList(expectedAbsPaths);
     Assert.assertEquals("Mismatched absolute paths.", expectedPaths, actualAbsPaths);
-
   }
 
   /**
@@ -805,13 +804,13 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     AnalyzesOk("select 1 from a.a");
     AnalyzesOk("select 1 from a.a.a");
     AnalyzesOk("select 1 from a.a.a.a");
-    AnalyzesOk("select 1 from a", createAnalyzer("a"));
-    AnalyzesOk("select 1 from a.a.a.a", createAnalyzer("a"));
+    AnalyzesOk("select 1 from a", createAnalysisCtx("a"));
+    AnalyzesOk("select 1 from a.a.a.a", createAnalysisCtx("a"));
 
     // Table paths are ambiguous.
-    AnalysisError("select 1 from a.a", createAnalyzer("a"),
+    AnalysisError("select 1 from a.a", createAnalysisCtx("a"),
         "Table reference is ambiguous: 'a.a'");
-    AnalysisError("select 1 from a.a.a", createAnalyzer("a"),
+    AnalysisError("select 1 from a.a.a", createAnalysisCtx("a"),
         "Table reference is ambiguous: 'a.a.a'");
 
     // Ambiguous reference to registered table aliases.
@@ -1128,18 +1127,18 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     // test auto-generated column labels by enforcing their use in inline views
     AnalyzesOk("select _c0, a, int_col, _c3 from " +
         "(select int_col * 1, int_col as a, int_col, !bool_col, concat(string_col) " +
-        "from functional.alltypes) t", createAnalyzerUsingHiveColLabels());
+        "from functional.alltypes) t", createAnalysisCtxUsingHiveColLabels());
     // test auto-generated column labels in group by and order by
     AnalyzesOk("select _c0, count(a), count(int_col), _c3 from " +
         "(select int_col * 1, int_col as a, int_col, !bool_col, concat(string_col) " +
         "from functional.alltypes) t group by _c0, _c3 order by _c0 limit 10",
-        createAnalyzerUsingHiveColLabels());
+        createAnalysisCtxUsingHiveColLabels());
     // test auto-generated column labels in multiple scopes
     AnalyzesOk("select x.front, x._c1, x._c2 from " +
         "(select y.back as front, y._c0 * 10, y._c2 + 2 from " +
         "(select int_col * 10, int_col as back, int_col + 2 from " +
         "functional.alltypestiny) y) x",
-        createAnalyzerUsingHiveColLabels());
+        createAnalysisCtxUsingHiveColLabels());
     // IMPALA-3537: Test that auto-generated column labels are only applied in
     // the appropriate child query blocks.
     SelectStmt colLabelsStmt =
@@ -1153,13 +1152,13 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
         "(select int_col * 2, id from functional.alltypes) a inner join " +
         "(select int_col + 6, id from functional.alltypes) b " +
         "on (a.id = b.id)",
-        createAnalyzerUsingHiveColLabels(),
+        createAnalysisCtxUsingHiveColLabels(),
         "Column/field reference is ambiguous: '_c0'");
     // auto-generated column doesn't exist
     AnalysisError("select _c0, a, _c2, _c3 from " +
         "(select int_col * 1, int_col as a, int_col, !bool_col, concat(string_col) " +
         "from functional.alltypes) t",
-        createAnalyzerUsingHiveColLabels(),
+        createAnalysisCtxUsingHiveColLabels(),
         "Could not resolve column/field reference: '_c2'");
 
     // Regression test for IMPALA-984.
@@ -1249,7 +1248,7 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     AnalyzesOk("select cnt from functional.allcomplextypes, " +
         "(select count(1) cnt from functional.allcomplextypes) v");
     AnalyzesOk("select cnt from functional.allcomplextypes, " +
-        "(select count(1) cnt from allcomplextypes) v", createAnalyzer("functional"));
+        "(select count(1) cnt from allcomplextypes) v", createAnalysisCtx("functional"));
     // Illegal correlated reference.
     AnalysisError("select cnt from functional.allcomplextypes t, " +
         "(select count(1) cnt from t) v",
@@ -2758,19 +2757,19 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     AnalyzesOk("with t1 as (select int_col x, bigint_col y from alltypes), " +
         "alltypes as (select x a, y b from t1)" +
         "select a, b from alltypes",
-        createAnalyzer("functional"));
+        createAnalysisCtx("functional"));
     // Recursion is prevented because of scoping rules. The inner 'complex_view'
     // refers to a view in the catalog.
     AnalyzesOk("with t1 as (select abc x, xyz y from complex_view), " +
         "complex_view as (select x a, y b from t1)" +
         "select a, b from complex_view",
-        createAnalyzer("functional"));
+        createAnalysisCtx("functional"));
     // Nested WITH clauses. Scoping prevents recursion.
     AnalyzesOk("with t1 as (with t1 as (select int_col x, bigint_col y from alltypes) " +
         "select x, y from t1), " +
         "alltypes as (select x a, y b from t1) " +
         "select a, b from alltypes",
-        createAnalyzer("functional"));
+        createAnalysisCtx("functional"));
     // Nested WITH clause inside a subquery.
     AnalyzesOk("with t1 as " +
         "(select * from (with t2 as (select * from functional.alltypes) " +
@@ -2843,7 +2842,7 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     // The inner alltypes_view gets resolved to the catalog view.
     AnalyzesOk("with alltypes_view as (select int_col x from alltypes_view) " +
         "select x from alltypes_view",
-        createAnalyzer("functional"));
+        createAnalysisCtx("functional"));
     // The inner 't' is resolved to a non-existent base table.
     AnalysisError("with t as (select int_col x, bigint_col y from t1) " +
         "select x, y from t",
@@ -3296,7 +3295,8 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
 
     // Unknown target DB
     AnalysisError("INSERT " + qualifier + " table UNKNOWNDB.alltypesnopart SELECT * " +
-        "from functional.alltypesnopart", "Database does not exist: UNKNOWNDB");
+        "from functional.alltypesnopart",
+        "Database does not exist: UNKNOWNDB");
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzeSubqueriesTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeSubqueriesTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeSubqueriesTest.java
index e49e289..de8632e 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeSubqueriesTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeSubqueriesTest.java
@@ -17,10 +17,9 @@
 
 package org.apache.impala.analysis;
 
-import org.junit.Test;
-
 import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
+import org.junit.Test;
 
 public class AnalyzeSubqueriesTest extends AnalyzerTest {
   private static String cmpOperators[] = {"=", "!=", "<=", ">=", ">", "<"};
@@ -1198,7 +1197,7 @@ public class AnalyzeSubqueriesTest extends AnalyzerTest {
         "where id in (select id from functional.allcomplextypes)");
     AnalyzesOk("select id from functional.allcomplextypes " +
         "where id < (select count(1) cnt from allcomplextypes)",
-        createAnalyzer("functional"));
+        createAnalysisCtx("functional"));
     // Illegal correlated table references.
     AnalysisError("select id from (select * from functional.alltypestiny) t " +
         "where t.int_col = (select count(*) from t)",

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzeUpsertStmtTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeUpsertStmtTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeUpsertStmtTest.java
index d6cd804..fc7b51a 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeUpsertStmtTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeUpsertStmtTest.java
@@ -17,9 +17,8 @@
 
 package org.apache.impala.analysis;
 
-import org.junit.Test;
-
 import org.apache.impala.testutil.TestUtils;
+import org.junit.Test;
 
 public class AnalyzeUpsertStmtTest extends AnalyzerTest {
   @Test
@@ -101,7 +100,8 @@ public class AnalyzeUpsertStmtTest extends AnalyzerTest {
         "UPSERT is only supported for Kudu tables");
     // Unknown target DB
     AnalysisError("upsert into UNKNOWNDB.testtbl select * " +
-        "from functional.alltypesnopart", "Database does not exist: UNKNOWNDB");
+        "from functional.alltypesnopart",
+        "Database does not exist: UNKNOWNDB");
     // WITH-clause tables cannot be upserted into
     AnalysisError("with t1 as (select 'a' x) upsert into t1 values('b' x)",
         "Table does not exist: default.t1");

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java
index aab2dba..7fc8768 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java
@@ -114,7 +114,7 @@ public class AnalyzerTest extends FrontendTestBase {
     Preconditions.checkState(tbl.isFullyQualified());
     Preconditions.checkState(query.contains("$TBL"));
     String uqQuery = query.replace("$TBL", tbl.getTbl());
-    AnalyzesOk(uqQuery, createAnalyzer(tbl.getDb()));
+    AnalyzesOk(uqQuery, createAnalysisCtx(tbl.getDb()));
     String fqQuery = query.replace("$TBL", tbl.toString());
     AnalyzesOk(fqQuery);
   }
@@ -128,7 +128,7 @@ public class AnalyzerTest extends FrontendTestBase {
     Preconditions.checkState(tbl.isFullyQualified());
     Preconditions.checkState(query.contains("$TBL"));
     String uqQuery = query.replace("$TBL", tbl.getTbl());
-    AnalysisError(uqQuery, createAnalyzer(tbl.getDb()), expectedError);
+    AnalysisError(uqQuery, createAnalysisCtx(tbl.getDb()), expectedError);
     String fqQuery = query.replace("$TBL", tbl.toString());
     AnalysisError(fqQuery, expectedError);
   }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AuditingTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AuditingTest.java b/fe/src/test/java/org/apache/impala/analysis/AuditingTest.java
index a14fbc9..610bf56 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AuditingTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AuditingTest.java
@@ -19,27 +19,28 @@ package org.apache.impala.analysis;
 
 import java.util.Set;
 
-import org.junit.Assert;
-import org.junit.Test;
-
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.catalog.AuthorizationException;
 import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.ImpaladCatalog;
 import org.apache.impala.common.AnalysisException;
-import org.apache.impala.common.InternalException;
+import org.apache.impala.common.FrontendTestBase;
+import org.apache.impala.common.ImpalaException;
 import org.apache.impala.service.Frontend;
 import org.apache.impala.testutil.ImpaladTestCatalog;
 import org.apache.impala.testutil.TestUtils;
 import org.apache.impala.thrift.TAccessEvent;
 import org.apache.impala.thrift.TCatalogObjectType;
+import org.junit.Assert;
+import org.junit.Test;
+
 import com.google.common.collect.Sets;
 
 /**
  * Tests that auditing access events are properly captured during analysis for all
  * statement types.
  */
-public class AuditingTest extends AnalyzerTest {
+public class AuditingTest extends FrontendTestBase {
   @Test
   public void TestSelect() throws AuthorizationException, AnalysisException {
     // Simple select from a table.
@@ -354,22 +355,20 @@ public class AuditingTest extends AnalyzerTest {
 
   @Test
   public void TestAccessEventsOnAuthFailure() throws AuthorizationException,
-      AnalysisException, InternalException {
+      ImpalaException {
     // The policy file doesn't exist so all operations will result in
     // an AuthorizationError
     AuthorizationConfig config = AuthorizationConfig.createHadoopGroupAuthConfig(
         "server1", "/does/not/exist", "");
     ImpaladCatalog catalog = new ImpaladTestCatalog(config);
     Frontend fe = new Frontend(config, catalog);
-    AnalysisContext analysisContext =
-        new AnalysisContext(catalog, TestUtils.createQueryContext(), config);
+    AnalysisContext analysisCtx = createAnalysisCtx(config);
     // We should get an audit event even when an authorization failure occurs.
     try {
-      analysisContext.analyze("create table foo_does_not_exist(i int)");
-      analysisContext.authorize(fe.getAuthzChecker());
+      parseAndAnalyze("create table foo_does_not_exist(i int)", analysisCtx, fe);
       Assert.fail("Expected AuthorizationException");
     } catch (AuthorizationException e) {
-      Assert.assertEquals(1, analysisContext.getAnalyzer().getAccessEvents().size());
+      Assert.assertEquals(1, analysisCtx.getAnalyzer().getAccessEvents().size());
     }
   }
 
@@ -468,8 +467,8 @@ public class AuditingTest extends AnalyzerTest {
 
   private Set<TAccessEvent> AnalyzeAccessEvents(String stmt, String db)
       throws AuthorizationException, AnalysisException {
-    Analyzer analyzer = createAnalyzer(db);
-    AnalyzesOk(stmt, analyzer);
-    return analyzer.getAccessEvents();
+    AnalysisContext ctx = createAnalysisCtx(db);
+    AnalyzesOk(stmt, ctx);
+    return ctx.getAnalyzer().getAccessEvents();
   }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java b/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
index 9140e52..bcea229 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 
@@ -29,39 +30,24 @@ import java.util.Map;
 import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
-import org.apache.sentry.provider.common.ResourceAuthorizationProvider;
 import org.apache.hive.service.rpc.thrift.TGetColumnsReq;
 import org.apache.hive.service.rpc.thrift.TGetSchemasReq;
 import org.apache.hive.service.rpc.thrift.TGetTablesReq;
-import org.apache.sentry.provider.file.LocalGroupResourceAuthorizationProvider;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.authorization.AuthorizeableTable;
 import org.apache.impala.authorization.User;
 import org.apache.impala.catalog.AuthorizationException;
-import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.ImpaladCatalog;
 import org.apache.impala.catalog.ScalarFunction;
 import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
+import org.apache.impala.common.FrontendTestBase;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.common.RuntimeEnv;
 import org.apache.impala.service.Frontend;
 import org.apache.impala.testutil.ImpaladTestCatalog;
-import org.apache.impala.testutil.TestUtils;
 import org.apache.impala.thrift.TFunctionBinaryType;
 import org.apache.impala.thrift.TMetadataOpRequest;
 import org.apache.impala.thrift.TMetadataOpcode;
@@ -69,17 +55,27 @@ import org.apache.impala.thrift.TNetworkAddress;
 import org.apache.impala.thrift.TPrivilege;
 import org.apache.impala.thrift.TPrivilegeLevel;
 import org.apache.impala.thrift.TPrivilegeScope;
-import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TResultSet;
 import org.apache.impala.thrift.TSessionState;
 import org.apache.impala.util.PatternMatcher;
 import org.apache.impala.util.SentryPolicyService;
+import org.apache.sentry.provider.common.ResourceAuthorizationProvider;
+import org.apache.sentry.provider.file.LocalGroupResourceAuthorizationProvider;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 @RunWith(Parameterized.class)
-public class AuthorizationTest {
+public class AuthorizationTest extends FrontendTestBase {
   // Policy file has defined current user and 'test_user' have:
   //   ALL permission on 'tpch' database and 'newdb' database
   //   ALL permission on 'functional_seq_snap' database
@@ -123,7 +119,6 @@ public class AuthorizationTest {
   }
 
   private final TestContext ctx_;
-  private final TQueryCtx queryCtx_;
   private final AnalysisContext analysisContext_;
   private final Frontend fe_;
 
@@ -177,8 +172,7 @@ public class AuthorizationTest {
 
   public AuthorizationTest(TestContext ctx) throws Exception {
     ctx_ = ctx;
-    queryCtx_ = TestUtils.createQueryContext(Catalog.DEFAULT_DB, USER.getName());
-    analysisContext_ = new AnalysisContext(ctx_.catalog, queryCtx_, ctx_.authzConfig);
+    analysisContext_ = createAnalysisCtx(ctx_.authzConfig, USER.getName());
     fe_ = new Frontend(ctx_.authzConfig, ctx_.catalog);
   }
 
@@ -1860,17 +1854,14 @@ public class AuthorizationTest {
         new User(USER.getName() + "/abc.host.com@REAL.COM"),
         new User(USER.getName() + "@REAL.COM"));
     for (User user: users) {
-      AnalysisContext context = new AnalysisContext(ctx_.catalog,
-          TestUtils.createQueryContext(Catalog.DEFAULT_DB, user.getName()),
-          ctx_.authzConfig);
+      AnalysisContext ctx = createAnalysisCtx(ctx_.authzConfig, user.getName());
 
       // Can select from table that user has privileges on.
-      AuthzOk(context, "select * from functional.alltypesagg");
+      AuthzOk(ctx, "select * from functional.alltypesagg");
 
       // Unqualified table name.
-      AuthzError(context, "select * from alltypes",
-          "User '%s' does not have privileges to execute 'SELECT' on: default.alltypes",
-          user);
+      AuthzError(ctx, "select * from alltypes",
+          "User '%s' does not have privileges to execute 'SELECT' on: default.alltypes");
     }
     // If the first character is '/', the short username should be the same as
     // the full username.
@@ -1904,16 +1895,14 @@ public class AuthorizationTest {
     User.setRulesForTesting(
         new Configuration().get(HADOOP_SECURITY_AUTH_TO_LOCAL, "DEFAULT"));
     User user = new User("authtest/hostname@REALM.COM");
-    AnalysisContext context = new AnalysisContext(catalog,
-        TestUtils.createQueryContext(Catalog.DEFAULT_DB, user.getName()), authzConfig);
+    AnalysisContext ctx = createAnalysisCtx(authzConfig, user.getName());
     Frontend fe = new Frontend(authzConfig, catalog);
 
     // Can select from table that user has privileges on.
-    AuthzOk(fe, context, "select * from tpcds.customer");
+    AuthzOk(fe, ctx, "select * from tpcds.customer");
     // Does not have privileges to execute a query
-    AuthzError(fe, context, "select * from functional.alltypes",
-        "User '%s' does not have privileges to execute 'SELECT' on: functional.alltypes",
-        user);
+    AuthzError(fe, ctx, "select * from functional.alltypes",
+        "User '%s' does not have privileges to execute 'SELECT' on: functional.alltypes");
 
     // Unit tests for User#getShortName()
     // Different auth_to_local rules to apply on the username.
@@ -1951,31 +1940,28 @@ public class AuthorizationTest {
   @Test
   public void TestFunction() throws Exception {
     // First try with the less privileged user.
-    User currentUser = USER;
-    AnalysisContext context = new AnalysisContext(ctx_.catalog,
-        TestUtils.createQueryContext(Catalog.DEFAULT_DB, currentUser.getName()),
-        ctx_.authzConfig);
-    AuthzError(context, "show functions",
-        "User '%s' does not have privileges to access: default", currentUser);
-    AuthzOk(context, "show functions in tpch");
-
-    AuthzError(context, "create function f() returns int location " +
+    AnalysisContext ctx = createAnalysisCtx(ctx_.authzConfig, USER.getName());
+    AuthzError(ctx, "show functions",
+        "User '%s' does not have privileges to access: default");
+    AuthzOk(ctx, "show functions in tpch");
+
+    AuthzError(ctx, "create function f() returns int location " +
         "'/test-warehouse/libTestUdfs.so' symbol='NoArgs'",
-        "User '%s' does not have privileges to CREATE/DROP functions.", currentUser);
+        "User '%s' does not have privileges to CREATE/DROP functions.");
 
-    AuthzError(context, "create function tpch.f() returns int location " +
+    AuthzError(ctx, "create function tpch.f() returns int location " +
         "'/test-warehouse/libTestUdfs.so' symbol='NoArgs'",
-        "User '%s' does not have privileges to CREATE/DROP functions.", currentUser);
+        "User '%s' does not have privileges to CREATE/DROP functions.");
 
-    AuthzError(context, "create function notdb.f() returns int location " +
+    AuthzError(ctx, "create function notdb.f() returns int location " +
         "'/test-warehouse/libTestUdfs.so' symbol='NoArgs'",
-        "User '%s' does not have privileges to CREATE/DROP functions.", currentUser);
+        "User '%s' does not have privileges to CREATE/DROP functions.");
 
-    AuthzError(context, "drop function if exists f()",
-        "User '%s' does not have privileges to CREATE/DROP functions.", currentUser);
+    AuthzError(ctx, "drop function if exists f()",
+        "User '%s' does not have privileges to CREATE/DROP functions.");
 
-    AuthzError(context, "drop function notdb.f()",
-        "User '%s' does not have privileges to CREATE/DROP functions.", currentUser);
+    AuthzError(ctx, "drop function notdb.f()",
+        "User '%s' does not have privileges to CREATE/DROP functions.");
 
     // TODO: Add test support for dynamically changing privileges for
     // file-based policy.
@@ -2017,9 +2003,9 @@ public class AuthorizationTest {
       sentryService.revokeRoleFromGroup(USER, "admin", USER.getName());
       ctx_.catalog.reset();
 
-      AuthzError(context, "create function tpch.f() returns int location " +
+      AuthzError(ctx, "create function tpch.f() returns int location " +
           "'/test-warehouse/libTestUdfs.so' symbol='NoArgs'",
-          "User '%s' does not have privileges to CREATE/DROP functions.", currentUser);
+          "User '%s' does not have privileges to CREATE/DROP functions.");
 
       // Couldn't create tpch.f() but can run it.
       AuthzOk("select tpch.f()");
@@ -2036,8 +2022,7 @@ public class AuthorizationTest {
   }
 
   @Test
-  public void TestServerNameAuthorized()
-      throws AnalysisException, InternalException {
+  public void TestServerNameAuthorized() throws ImpalaException {
     if (ctx_.authzConfig.isFileBasedPolicy()) {
       // Authorization config that has a different server name from policy file.
       TestWithIncorrectConfig(AuthorizationConfig.createHadoopGroupAuthConfig(
@@ -2047,8 +2032,7 @@ public class AuthorizationTest {
   }
 
   @Test
-  public void TestNoPermissionsWhenPolicyFileDoesNotExist()
-      throws AnalysisException, InternalException {
+  public void TestNoPermissionsWhenPolicyFileDoesNotExist() throws ImpalaException {
     // Test doesn't make sense except for file based policies.
     if (!ctx_.authzConfig.isFileBasedPolicy()) return;
 
@@ -2172,47 +2156,42 @@ public class AuthorizationTest {
 
     // Create an analysis context + FE with the test user (as defined in the policy file)
     User user = new User("test_user");
-    AnalysisContext context = new AnalysisContext(catalog,
-        TestUtils.createQueryContext(Catalog.DEFAULT_DB, user.getName()), authzConfig);
+    AnalysisContext ctx = createAnalysisCtx(authzConfig, user.getName());
     Frontend fe = new Frontend(authzConfig, catalog);
 
     // Can select from table that user has privileges on.
-    AuthzOk(fe, context, "select * from functional.alltypesagg");
+    AuthzOk(fe, ctx, "select * from functional.alltypesagg");
     // Does not have privileges to execute a query
-    AuthzError(fe, context, "select * from functional.alltypes",
-        "User '%s' does not have privileges to execute 'SELECT' on: functional.alltypes",
-        user);
+    AuthzError(fe, ctx, "select * from functional.alltypes",
+        "User '%s' does not have privileges to execute 'SELECT' on: functional.alltypes");
 
     // Verify with the admin user
     user = new User("admin_user");
-    context = new AnalysisContext(catalog,
-        TestUtils.createQueryContext(Catalog.DEFAULT_DB, user.getName()), authzConfig);
+    ctx = createAnalysisCtx(authzConfig, user.getName());
     fe = new Frontend(authzConfig, catalog);
 
     // Admin user should have privileges to do anything
-    AuthzOk(fe, context, "select * from functional.alltypesagg");
-    AuthzOk(fe, context, "select * from functional.alltypes");
-    AuthzOk(fe, context, "invalidate metadata");
-    AuthzOk(fe, context, "create external table tpch.kudu_tbl stored as kudu " +
+    AuthzOk(fe, ctx, "select * from functional.alltypesagg");
+    AuthzOk(fe, ctx, "select * from functional.alltypes");
+    AuthzOk(fe, ctx, "invalidate metadata");
+    AuthzOk(fe, ctx, "create external table tpch.kudu_tbl stored as kudu " +
         "TBLPROPERTIES ('kudu.master_addresses'='127.0.0.1', 'kudu.table_name'='tbl')");
   }
 
   private void TestWithIncorrectConfig(AuthorizationConfig authzConfig, User user)
-      throws AnalysisException, InternalException {
+      throws ImpalaException {
     Frontend fe = new Frontend(authzConfig, ctx_.catalog);
-    AnalysisContext ac = new AnalysisContext(new ImpaladTestCatalog(),
-        TestUtils.createQueryContext(Catalog.DEFAULT_DB, user.getName()), authzConfig);
-    AuthzError(fe, ac, "select * from functional.alltypesagg",
+    AnalysisContext ctx = createAnalysisCtx(authzConfig, user.getName());
+    AuthzError(fe, ctx, "select * from functional.alltypesagg",
         "User '%s' does not have privileges to execute 'SELECT' on: " +
-        "functional.alltypesagg", user);
-    AuthzError(fe, ac, "ALTER TABLE functional_seq_snap.alltypes ADD COLUMNS (c1 int)",
+        "functional.alltypesagg");
+    AuthzError(fe, ctx, "ALTER TABLE functional_seq_snap.alltypes ADD COLUMNS (c1 int)",
         "User '%s' does not have privileges to execute 'ALTER' on: " +
-        "functional_seq_snap.alltypes", user);
-    AuthzError(fe, ac, "drop table tpch.lineitem",
-        "User '%s' does not have privileges to execute 'DROP' on: tpch.lineitem",
-        user);
-    AuthzError(fe, ac, "show tables in functional",
-        "User '%s' does not have privileges to access: functional.*", user);
+        "functional_seq_snap.alltypes");
+    AuthzError(fe, ctx, "drop table tpch.lineitem",
+        "User '%s' does not have privileges to execute 'DROP' on: tpch.lineitem");
+    AuthzError(fe, ctx, "show tables in functional",
+        "User '%s' does not have privileges to access: functional.*");
   }
 
   private void AuthzOk(String stmt) throws ImpalaException {
@@ -2223,10 +2202,9 @@ public class AuthorizationTest {
     AuthzOk(fe_, context, stmt);
   }
 
-  private static void AuthzOk(Frontend fe, AnalysisContext context, String stmt)
+  private void AuthzOk(Frontend fe, AnalysisContext context, String stmt)
       throws ImpalaException {
-    context.analyze(stmt);
-    context.authorize(fe.getAuthzChecker());
+    parseAndAnalyze(stmt, context, fe);
   }
 
   /**
@@ -2234,29 +2212,24 @@ public class AuthorizationTest {
    * string matches.
    */
   private void AuthzError(String stmt, String expectedErrorString)
-      throws AnalysisException, InternalException {
-    AuthzError(analysisContext_, stmt, expectedErrorString, USER);
+      throws ImpalaException {
+    AuthzError(analysisContext_, stmt, expectedErrorString);
   }
 
-  private void AuthzError(AnalysisContext analysisContext,
-      String stmt, String expectedErrorString, User user)
-      throws AnalysisException, InternalException {
-    AuthzError(fe_, analysisContext, stmt, expectedErrorString, user);
+  private void AuthzError(AnalysisContext ctx, String stmt, String expectedErrorString)
+      throws ImpalaException {
+    AuthzError(fe_, ctx, stmt, expectedErrorString);
   }
 
-  private static void AuthzError(Frontend fe, AnalysisContext analysisContext,
-      String stmt, String expectedErrorString, User user)
-      throws AnalysisException, InternalException {
+  private void AuthzError(Frontend fe, AnalysisContext ctx,
+      String stmt, String expectedErrorString)
+      throws ImpalaException {
     Preconditions.checkNotNull(expectedErrorString);
     try {
-      try {
-        analysisContext.analyze(stmt);
-      } finally {
-        analysisContext.authorize(fe.getAuthzChecker());
-      }
+      parseAndAnalyze(stmt, ctx, fe);
     } catch (AuthorizationException e) {
       // Insert the username into the error.
-      expectedErrorString = String.format(expectedErrorString, user.getName());
+      expectedErrorString = String.format(expectedErrorString, ctx.getUser());
       String errorString = e.getMessage();
       Assert.assertTrue(
           "got error:\n" + errorString + "\nexpected:\n" + expectedErrorString,

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/ExprNdvTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/ExprNdvTest.java b/fe/src/test/java/org/apache/impala/analysis/ExprNdvTest.java
index e49347e..88e1160 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ExprNdvTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ExprNdvTest.java
@@ -17,9 +17,9 @@
 
 package org.apache.impala.analysis;
 
-import org.apache.impala.catalog.Catalog;
-import org.apache.impala.common.AnalysisException;
+import org.apache.impala.analysis.AnalysisContext.AnalysisResult;
 import org.apache.impala.common.FrontendTestBase;
+import org.apache.impala.common.ImpalaException;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -29,7 +29,7 @@ import org.junit.Test;
 public class ExprNdvTest extends FrontendTestBase {
 
   public void verifyNdv(String expr, long expectedNdv)
-      throws AnalysisException {
+      throws ImpalaException {
     String stmtStr = "select " + expr + " from functional.alltypes";
     verifyNdvStmt(stmtStr, expectedNdv);
   }
@@ -40,18 +40,17 @@ public class ExprNdvTest extends FrontendTestBase {
    * functional.tinytable (tiny) does not
    */
   public void verifyNdvTwoTable(String expr, long expectedNdv)
-      throws AnalysisException {
+      throws ImpalaException {
     String stmtStr = "select " + expr + " from functional.alltypes a, " +
                      "functional.tinytable tiny";
     verifyNdvStmt(stmtStr, expectedNdv);
   }
 
-  public void verifyNdvStmt(String stmtStr, long expectedNdv)
-      throws AnalysisException {
-    SelectStmt stmt = (SelectStmt) ParsesOk(stmtStr);
-    Analyzer analyzer = createAnalyzer(Catalog.DEFAULT_DB);
-    stmt.analyze(analyzer);
-    Expr analyzedExpr = stmt.getSelectList().getItems().get(0).getExpr();
+  public void verifyNdvStmt(String stmt, long expectedNdv) throws ImpalaException {
+    AnalysisContext ctx = createAnalysisCtx();
+    AnalysisResult result = parseAndAnalyze(stmt, ctx);
+    SelectStmt parsedStmt = (SelectStmt) result.getStmt();
+    Expr analyzedExpr = parsedStmt.getSelectList().getItems().get(0).getExpr();
     long calculatedNdv = analyzedExpr.getNumDistinctValues();
     assertEquals(expectedNdv, calculatedNdv);
   }
@@ -66,7 +65,7 @@ public class ExprNdvTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestCaseExprBasic() throws AnalysisException {
+  public void TestCaseExprBasic() throws ImpalaException {
     // All constants tests
     verifyNdv("case when id = 1 then 'yes' else 'no' end", 2);
     verifyNdv("case when id = 1 then 'yes' " +
@@ -92,7 +91,7 @@ public class ExprNdvTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestCaseExprMissingStats() throws AnalysisException {
+  public void TestCaseExprMissingStats() throws ImpalaException {
 
     // Consts still work
     verifyNdvTwoTable("case when a.id = 1 then 'yes' " +

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java b/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java
index fa5bc05..209e69d 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java
@@ -19,12 +19,10 @@ package org.apache.impala.analysis;
 
 import java.util.List;
 
-import org.apache.impala.catalog.Catalog;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.FrontendTestBase;
+import org.apache.impala.common.ImpalaException;
 import org.apache.impala.rewrite.BetweenToCompoundRule;
-import org.apache.impala.rewrite.RemoveRedundantStringCast;
-import org.apache.impala.rewrite.SimplifyDistinctFromRule;
 import org.apache.impala.rewrite.EqualityDisjunctsToInRule;
 import org.apache.impala.rewrite.ExprRewriteRule;
 import org.apache.impala.rewrite.ExprRewriter;
@@ -33,7 +31,9 @@ import org.apache.impala.rewrite.FoldConstantsRule;
 import org.apache.impala.rewrite.NormalizeBinaryPredicatesRule;
 import org.apache.impala.rewrite.NormalizeCountStarRule;
 import org.apache.impala.rewrite.NormalizeExprsRule;
+import org.apache.impala.rewrite.RemoveRedundantStringCast;
 import org.apache.impala.rewrite.SimplifyConditionalsRule;
+import org.apache.impala.rewrite.SimplifyDistinctFromRule;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -62,50 +62,51 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   public Expr RewritesOk(String exprStr, ExprRewriteRule rule, String expectedExprStr)
-      throws AnalysisException {
+      throws ImpalaException {
     return RewritesOk("functional.alltypessmall", exprStr, rule, expectedExprStr);
   }
 
   public Expr RewritesOk(String tableName, String exprStr, ExprRewriteRule rule, String expectedExprStr)
-      throws AnalysisException {
+      throws ImpalaException {
     return RewritesOk(tableName, exprStr, Lists.newArrayList(rule), expectedExprStr);
   }
 
   public Expr RewritesOk(String exprStr, List<ExprRewriteRule> rules, String expectedExprStr)
-      throws AnalysisException {
+      throws ImpalaException {
     return RewritesOk("functional.alltypessmall", exprStr, rules, expectedExprStr);
   }
 
   public Expr RewritesOk(String tableName, String exprStr, List<ExprRewriteRule> rules,
-      String expectedExprStr) throws AnalysisException {
+      String expectedExprStr) throws ImpalaException {
     String stmtStr = "select " + exprStr + " from " + tableName;
+    // Analyze without rewrites since that's what we want to test here.
     SelectStmt stmt = (SelectStmt) ParsesOk(stmtStr);
-    Analyzer analyzer = createAnalyzer(Catalog.DEFAULT_DB);
-    stmt.analyze(analyzer);
+    AnalyzesOkNoRewrite(stmt);
     Expr origExpr = stmt.getSelectList().getItems().get(0).getExpr();
-    Expr rewrittenExpr = verifyExprEquivalence(origExpr, expectedExprStr, rules, analyzer);
+    Expr rewrittenExpr =
+        verifyExprEquivalence(origExpr, expectedExprStr, rules, stmt.getAnalyzer());
     return rewrittenExpr;
   }
 
   public Expr RewritesOkWhereExpr(String exprStr, ExprRewriteRule rule, String expectedExprStr)
-      throws AnalysisException {
+      throws ImpalaException {
     return RewritesOkWhereExpr("functional.alltypessmall", exprStr, rule, expectedExprStr);
   }
 
   public Expr RewritesOkWhereExpr(String tableName, String exprStr, ExprRewriteRule rule, String expectedExprStr)
-      throws AnalysisException {
+      throws ImpalaException {
     return RewritesOkWhereExpr(tableName, exprStr, Lists.newArrayList(rule), expectedExprStr);
   }
 
   public Expr RewritesOkWhereExpr(String tableName, String exprStr, List<ExprRewriteRule> rules,
-      String expectedExprStr) throws AnalysisException {
+      String expectedExprStr) throws ImpalaException {
     String stmtStr = "select count(1)  from " + tableName + " where " + exprStr;
-    System.out.println(stmtStr);
+    // Analyze without rewrites since that's what we want to test here.
     SelectStmt stmt = (SelectStmt) ParsesOk(stmtStr);
-    Analyzer analyzer = createAnalyzer(Catalog.DEFAULT_DB);
-    stmt.analyze(analyzer);
+    AnalyzesOkNoRewrite(stmt);
     Expr origExpr = stmt.getWhereClause();
-    Expr rewrittenExpr = verifyExprEquivalence(origExpr, expectedExprStr, rules, analyzer);
+    Expr rewrittenExpr =
+        verifyExprEquivalence(origExpr, expectedExprStr, rules, stmt.getAnalyzer());
     return rewrittenExpr;
   }
 
@@ -151,7 +152,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestBetweenToCompoundRule() throws AnalysisException {
+  public void TestBetweenToCompoundRule() throws ImpalaException {
     ExprRewriteRule rule = BetweenToCompoundRule.INSTANCE;
 
     // Basic BETWEEN predicates.
@@ -190,7 +191,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestExtractCommonConjunctsRule() throws AnalysisException {
+  public void TestExtractCommonConjunctsRule() throws ImpalaException {
     ExprRewriteRule rule = ExtractCommonConjunctRule.INSTANCE;
 
     // One common conjunct: int_col < 10
@@ -285,7 +286,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
    * testing is done in expr-test.cc.
    */
   @Test
-  public void TestFoldConstantsRule() throws AnalysisException {
+  public void TestFoldConstantsRule() throws ImpalaException {
     ExprRewriteRule rule = FoldConstantsRule.INSTANCE;
 
     RewritesOk("1 + 1", rule, "2");
@@ -312,7 +313,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestSimplifyConditionalsRule() throws AnalysisException {
+  public void TestSimplifyConditionalsRule() throws ImpalaException {
     ExprRewriteRule rule = SimplifyConditionalsRule.INSTANCE;
 
     // IF
@@ -448,7 +449,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestNormalizeExprsRule() throws AnalysisException {
+  public void TestNormalizeExprsRule() throws ImpalaException {
     ExprRewriteRule rule = NormalizeExprsRule.INSTANCE;
 
     // CompoundPredicate
@@ -461,7 +462,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestNormalizeBinaryPredicatesRule() throws AnalysisException {
+  public void TestNormalizeBinaryPredicatesRule() throws ImpalaException {
     ExprRewriteRule rule = NormalizeBinaryPredicatesRule.INSTANCE;
 
     RewritesOk("0 = id", rule, "id = 0");
@@ -483,7 +484,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestEqualityDisjunctsToInRule() throws AnalysisException {
+  public void TestEqualityDisjunctsToInRule() throws ImpalaException {
     ExprRewriteRule edToInrule = EqualityDisjunctsToInRule.INSTANCE;
     ExprRewriteRule normalizeRule = NormalizeBinaryPredicatesRule.INSTANCE;
     List<ExprRewriteRule> comboRules = Lists.newArrayList(normalizeRule,
@@ -546,7 +547,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestNormalizeCountStarRule() throws AnalysisException {
+  public void TestNormalizeCountStarRule() throws ImpalaException {
     ExprRewriteRule rule = NormalizeCountStarRule.INSTANCE;
 
     RewritesOk("count(1)", rule, "count(*)");
@@ -560,7 +561,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestSimplifyDistinctFromRule() throws AnalysisException {
+  public void TestSimplifyDistinctFromRule() throws ImpalaException {
     ExprRewriteRule rule = SimplifyDistinctFromRule.INSTANCE;
 
     // Can be simplified
@@ -583,7 +584,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   }
 
   @Test
-  public void TestRemoveRedundantStringCastRule() throws AnalysisException {
+  public void TestRemoveRedundantStringCastRule() throws ImpalaException {
     ExprRewriteRule removeRule = RemoveRedundantStringCast.INSTANCE;
     ExprRewriteRule foldConstantRule = FoldConstantsRule.INSTANCE;
     List<ExprRewriteRule> comboRules = Lists.newArrayList(removeRule, foldConstantRule);
@@ -677,7 +678,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
    * it can be further simplified via SimplifyDistinctFromRule.
    */
   @Test
-  public void TestNullif() throws AnalysisException {
+  public void TestNullif() throws ImpalaException {
     List<ExprRewriteRule> rules = Lists.newArrayList(
         SimplifyConditionalsRule.INSTANCE,
         SimplifyDistinctFromRule.INSTANCE);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/ExprRewriterTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/ExprRewriterTest.java b/fe/src/test/java/org/apache/impala/analysis/ExprRewriterTest.java
index 310b7e4..bebc6ba 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ExprRewriterTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ExprRewriterTest.java
@@ -17,13 +17,12 @@
 
 package org.apache.impala.analysis;
 
-import org.apache.impala.authorization.AuthorizationConfig;
-import org.apache.impala.catalog.Catalog;
+import org.apache.impala.analysis.AnalysisContext.AnalysisResult;
 import org.apache.impala.common.AnalysisException;
+import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.RuntimeEnv;
 import org.apache.impala.rewrite.ExprRewriteRule;
 import org.apache.impala.rewrite.ExprRewriter;
-import org.apache.impala.testutil.TestUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -78,10 +77,10 @@ public class ExprRewriterTest extends AnalyzerTest {
    * number of expressions and complicate validation.
    */
   public void RewritesOk(String stmt, int expectedNumChanges,
-      int expectedNumExprTrees) throws AnalysisException {
+      int expectedNumExprTrees) throws ImpalaException {
+    // Analyze without rewrites since that's what we want to test here.
     StatementBase parsedStmt = (StatementBase) ParsesOk(stmt);
-    parsedStmt.analyze(createAnalyzer(Catalog.DEFAULT_DB));
-
+    AnalyzesOkNoRewrite(parsedStmt);
     exprToTrue_.reset();
     parsedStmt.rewriteExprs(exprToTrue_);
     Assert.assertEquals(expectedNumChanges, exprToTrue_.getNumChanges());
@@ -93,21 +92,17 @@ public class ExprRewriterTest extends AnalyzerTest {
 
     // Make sure the stmt can be successfully re-analyzed.
     parsedStmt.reset();
-    parsedStmt.analyze(createAnalyzer(Catalog.DEFAULT_DB));
+    AnalyzesOkNoRewrite(parsedStmt);
   }
 
   /**
    * Asserts that no rewrites are performed on the given stmt.
    */
-  public void CheckNoRewrite(String stmt) throws AnalysisException {
+  public void CheckNoRewrite(String stmt) throws ImpalaException {
     exprToTrue_.reset();
-    Analyzer analyzer = createAnalyzer(Catalog.DEFAULT_DB);
-    AnalysisContext analysisCtx = new AnalysisContext(catalog_,
-        TestUtils.createQueryContext(Catalog.DEFAULT_DB,
-            System.getProperty("user.name")),
-            AuthorizationConfig.createAuthDisabledConfig(), exprToTrue_);
-    analysisCtx.analyze(stmt, analyzer);
-    Preconditions.checkNotNull(analysisCtx.getAnalysisResult().getStmt());
+    AnalysisContext analysisCtx = createAnalysisCtx();
+    AnalysisResult result = parseAndAnalyze(stmt, analysisCtx);
+    Preconditions.checkNotNull(result.getStmt());
     Assert.assertEquals(0, exprToTrue_.getNumChanges());
   }
 
@@ -122,7 +117,7 @@ public class ExprRewriterTest extends AnalyzerTest {
       "order by a.int_col, 4 limit 10";
 
   @Test
-  public void TestQueryStmts() throws AnalysisException {
+  public void TestQueryStmts() throws ImpalaException {
     RewritesOk(stmt_, 23, 11);
     // Test rewriting in inline views. The view stmt is the same as the query above
     // but with an order by + limit. Expanded star exprs are not rewritten.
@@ -146,7 +141,7 @@ public class ExprRewriterTest extends AnalyzerTest {
   }
 
   @Test
-  public void TestDdlStmts() throws AnalysisException {
+  public void TestDdlStmts() throws ImpalaException {
     RewritesOk("create table ctas_test as " + stmt_, 23, 11);
     // Create/alter view stmts are not rewritten to preserve the original SQL.
     CheckNoRewrite("create view view_test as " + stmt_);
@@ -154,7 +149,7 @@ public class ExprRewriterTest extends AnalyzerTest {
   }
 
   @Test
-  public void TestDmlStmts() throws AnalysisException {
+  public void TestDmlStmts() throws ImpalaException {
     // Insert.
     RewritesOk("insert into functional.alltypes (id, int_col, float_col, bigint_col) " +
       "partition(year=2009,month=10) " + stmt_, 23, 11);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java b/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java
new file mode 100644
index 0000000..f2c8faa
--- /dev/null
+++ b/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java
@@ -0,0 +1,180 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.impala.analysis;
+
+import java.util.Arrays;
+
+import org.apache.impala.analysis.StmtMetadataLoader.StmtTableCache;
+import org.apache.impala.authorization.AuthorizationConfig;
+import org.apache.impala.catalog.Catalog;
+import org.apache.impala.catalog.Table;
+import org.apache.impala.common.ImpalaException;
+import org.apache.impala.common.InternalException;
+import org.apache.impala.service.Frontend;
+import org.apache.impala.testutil.ImpaladTestCatalog;
+import org.apache.impala.util.EventSequence;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class StmtMetadataLoaderTest {
+
+  private void testLoadTables(String stmtStr,
+      int expectedNumLoadRequests, int expectedNumCatalogUpdates,
+      String[] expectedDbs, String[] expectedTables)
+      throws ImpalaException {
+    ImpaladTestCatalog catalog = new ImpaladTestCatalog();
+    Frontend fe = new Frontend(AuthorizationConfig.createAuthDisabledConfig(), catalog);
+    StatementBase stmt = fe.parse(stmtStr);
+    // Catalog is fresh and no tables are cached.
+    validateUncached(stmt, fe, expectedNumLoadRequests, expectedNumCatalogUpdates,
+        expectedDbs, expectedTables);
+    // All relevant tables should be cached now.
+    validateCached(stmt, fe, expectedDbs, expectedTables);
+  }
+
+  private void validateDbs(StmtTableCache stmtTableCache, String[] expectedDbs) {
+    String[] actualDbs = new String[stmtTableCache.dbs.size()];
+    actualDbs = stmtTableCache.dbs.toArray(actualDbs);
+    Arrays.sort(expectedDbs);
+    Arrays.sort(actualDbs);
+    Assert.assertArrayEquals(expectedDbs, actualDbs);
+  }
+
+  private void validateTables(StmtTableCache stmtTableCache, String[] expectedTables) {
+    String[] actualTables = new String[stmtTableCache.tables.size()];
+    int idx = 0;
+    for (Table t: stmtTableCache.tables.values()) {
+      Assert.assertTrue(t.isLoaded());
+      actualTables[idx++] = t.getFullName();
+    }
+    Arrays.sort(expectedTables);
+    Arrays.sort(actualTables);
+    Assert.assertArrayEquals(expectedTables, actualTables);
+  }
+
+  private void validateUncached(StatementBase stmt, Frontend fe,
+      int expectedNumLoadRequests, int expectedNumCatalogUpdates,
+      String[] expectedDbs, String[] expectedTables) throws InternalException {
+    EventSequence timeline = new EventSequence("Test Timeline");
+    StmtMetadataLoader mdLoader =
+        new StmtMetadataLoader(fe, Catalog.DEFAULT_DB, timeline);
+    StmtTableCache stmtTableCache = mdLoader.loadTables(stmt);
+    // Validate metrics.
+    Assert.assertEquals(expectedNumLoadRequests,
+        mdLoader.getNumLoadRequestsSent());
+    Assert.assertEquals(expectedNumCatalogUpdates,
+        mdLoader.getNumCatalogUpdatesReceived());
+    // Validate timeline.
+    Assert.assertEquals(2, mdLoader.getTimeline().getNumEvents());
+    // Validate dbs and tables.
+    validateDbs(stmtTableCache, expectedDbs);
+    validateTables(stmtTableCache, expectedTables);
+  }
+
+  private void validateCached(StatementBase stmt, Frontend fe,
+      String[] expectedDbs, String[] expectedTables) throws InternalException {
+    EventSequence timeline = new EventSequence("Test Timeline");
+    StmtMetadataLoader mdLoader =
+        new StmtMetadataLoader(fe, Catalog.DEFAULT_DB, timeline);
+    StmtTableCache stmtTableCache = mdLoader.loadTables(stmt);
+    // Validate metrics. Expect all tables to already be in the cache.
+    Assert.assertEquals(0, mdLoader.getNumLoadRequestsSent());
+    Assert.assertEquals(0, mdLoader.getNumCatalogUpdatesReceived());
+    // Validate timeline. Expect a single "everything is cached" event.
+    Assert.assertEquals(1, mdLoader.getTimeline().getNumEvents());
+    // Validate dbs and tables.
+    validateDbs(stmtTableCache, expectedDbs);
+    validateTables(stmtTableCache, expectedTables);
+  }
+
+  @Test
+  public void testSingleLoadRequest() throws ImpalaException {
+    // Single query block.
+    testLoadTables("select * from functional.alltypes", 1, 1,
+        new String[] {"default", "functional"},
+        new String[] {"functional.alltypes"});
+    // Single query block, multiple dbs and tables.
+    testLoadTables("select * from functional.alltypes, functional_parquet.alltypes, " +
+        "functional_avro.alltypes", 1, 1,
+        new String[] {"default", "functional", "functional_parquet", "functional_avro"},
+        new String[] {"functional.alltypes", "functional_parquet.alltypes",
+            "functional_avro.alltypes"});
+    // Single query block, test deduplication.
+    testLoadTables("select * from functional.alltypes, functional.alltypes, " +
+        "functional.alltypes", 1, 1,
+        new String[] {"default", "functional"},
+        new String[] {"functional.alltypes"});
+    // Multiple query blocks, multiple dbs and tables.
+    testLoadTables("with w as (select id from functional.alltypes) " +
+        "select * from w, (select id from functional.alltypessmall) v " +
+        "where v.id in (select id from functional.alltypestiny)", 1, 1,
+        new String[] {"default", "functional"},
+        new String[] {"functional.alltypes", "functional.alltypessmall",
+            "functional.alltypestiny"});
+    testLoadTables("select * from functional.alltypes union distinct " +
+        "select * from functional.alltypessmall union all " +
+        "select * from functional.alltypestiny", 1, 1,
+        new String[] {"default", "functional"},
+        new String[] {"functional.alltypes", "functional.alltypessmall",
+            "functional.alltypestiny"});
+    // Multiple query blocks, test deduplication.
+    testLoadTables("with w as (select id from functional.alltypes) " +
+        "select * from w, (select id from functional.alltypes) v " +
+        "where v.id in (select id from functional.alltypes)", 1, 1,
+        new String[] {"default", "functional"},
+        new String[] {"functional.alltypes"});
+    testLoadTables("select * from functional.alltypes union distinct " +
+        "select * from functional.alltypes union all " +
+        "select * from functional.alltypes", 1, 1,
+        new String[] {"default", "functional"},
+        new String[] {"functional.alltypes"});
+  }
+
+  @Test
+  public void testViewExpansion() throws ImpalaException {
+    // Test views:
+    // functional.alltypes_view references functional.alltypes
+    // functional.view_view references functional.alltypes_view
+    testLoadTables("select * from functional.alltypes_view", 2, 2,
+        new String[] {"default", "functional"},
+        new String[] {"functional.alltypes_view", "functional.alltypes"});
+    testLoadTables("select * from functional.view_view", 3, 3,
+        new String[] {"default", "functional"},
+        new String[] {"functional.view_view", "functional.alltypes_view",
+            "functional.alltypes"});
+    // Test deduplication.
+    testLoadTables("select * from functional.view_view, functional.view_view", 3, 3,
+        new String[] {"default", "functional"},
+        new String[] {"functional.view_view", "functional.alltypes_view",
+            "functional.alltypes"});
+    testLoadTables("select * from functional.alltypes, functional.view_view", 2, 2,
+        new String[] {"default", "functional"},
+        new String[] {"functional.view_view", "functional.alltypes_view",
+            "functional.alltypes"});
+    testLoadTables("select * from functional.alltypes_view, functional.view_view", 2, 2,
+        new String[] {"default", "functional"},
+        new String[] {"functional.view_view", "functional.alltypes_view",
+            "functional.alltypes"});
+    // All tables nested in views are also referenced at top level.
+    testLoadTables("select * from functional.alltypes, functional.alltypes_view, " +
+            "functional.view_view", 1, 1,
+        new String[] {"default", "functional"},
+        new String[] {"functional.view_view", "functional.alltypes_view",
+            "functional.alltypes"});
+  }
+}

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/ToSqlTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/ToSqlTest.java b/fe/src/test/java/org/apache/impala/analysis/ToSqlTest.java
index 41124f5..a813cce 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ToSqlTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ToSqlTest.java
@@ -19,7 +19,6 @@ package org.apache.impala.analysis;
 
 import static org.junit.Assert.fail;
 
-import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.FrontendTestBase;
 import org.apache.impala.testutil.TestUtils;
@@ -67,22 +66,6 @@ public class ToSqlTest extends FrontendTestBase {
     }
   }
 
-  private static AnalysisContext.AnalysisResult analyze(String query, String defaultDb) {
-    try {
-      AnalysisContext analysisCtx = new AnalysisContext(catalog_,
-          TestUtils.createQueryContext(defaultDb, System.getProperty("user.name")),
-          AuthorizationConfig.createAuthDisabledConfig());
-      analysisCtx.analyze(query);
-      AnalysisContext.AnalysisResult analysisResult = analysisCtx.getAnalysisResult();
-      Preconditions.checkNotNull(analysisResult.getStmt());
-      return analysisResult;
-    } catch (Exception e) {
-      e.printStackTrace();
-      fail("Failed to analyze query: " + query + "\n" + e.getMessage());
-    }
-    return null;
-  }
-
   private void testToSql(String query, String expected) {
     testToSql(query, System.getProperty("user.name"), expected);
   }
@@ -95,7 +78,7 @@ public class ToSqlTest extends FrontendTestBase {
       boolean ignoreWhitespace) {
     String actual = null;
     try {
-      ParseNode node = AnalyzesOk(query, createAnalyzer(defaultDb));
+      ParseNode node = AnalyzesOk(query, createAnalysisCtx(defaultDb));
       actual = node.toSql();
       if (ignoreWhitespace) {
         // Transform whitespace to single space.
@@ -112,7 +95,7 @@ public class ToSqlTest extends FrontendTestBase {
       fail("Failed to analyze query: " + query + "\n" + e.getMessage());
     }
     // Parse and analyze the resulting SQL to ensure its validity.
-    AnalyzesOk(actual, createAnalyzer(defaultDb));
+    AnalyzesOk(actual, createAnalysisCtx(defaultDb));
   }
 
   private void runTestTemplate(String sql, String expectedSql, String[]... testDims) {
@@ -151,7 +134,7 @@ public class ToSqlTest extends FrontendTestBase {
     Preconditions.checkState(query.contains("$TBL"));
     String uqQuery = query.replace("$TBL", tbl.getTbl());
     testToSql(uqQuery, tbl.getDb(), expectedSql);
-    AnalyzesOk(uqQuery, createAnalyzer(tbl.getDb()));
+    AnalyzesOk(uqQuery, createAnalysisCtx(tbl.getDb()));
     String fqQuery = query.replace("$TBL", tbl.toString());
     testToSql(fqQuery, expectedSql);
   }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/common/FrontendTestBase.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/common/FrontendTestBase.java b/fe/src/test/java/org/apache/impala/common/FrontendTestBase.java
index c087f5a..9be3343 100644
--- a/fe/src/test/java/org/apache/impala/common/FrontendTestBase.java
+++ b/fe/src/test/java/org/apache/impala/common/FrontendTestBase.java
@@ -26,6 +26,7 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.impala.analysis.AnalysisContext;
+import org.apache.impala.analysis.AnalysisContext.AnalysisResult;
 import org.apache.impala.analysis.Analyzer;
 import org.apache.impala.analysis.ColumnDef;
 import org.apache.impala.analysis.CreateTableStmt;
@@ -36,6 +37,9 @@ import org.apache.impala.analysis.ParseNode;
 import org.apache.impala.analysis.QueryStmt;
 import org.apache.impala.analysis.SqlParser;
 import org.apache.impala.analysis.SqlScanner;
+import org.apache.impala.analysis.StatementBase;
+import org.apache.impala.analysis.StmtMetadataLoader;
+import org.apache.impala.analysis.StmtMetadataLoader.StmtTableCache;
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.catalog.AggregateFunction;
 import org.apache.impala.catalog.Catalog;
@@ -44,7 +48,6 @@ import org.apache.impala.catalog.Column;
 import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.Function;
 import org.apache.impala.catalog.HdfsTable;
-import org.apache.impala.catalog.ImpaladCatalog;
 import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.ScalarFunction;
 import org.apache.impala.catalog.ScalarType;
@@ -58,6 +61,7 @@ import org.apache.impala.testutil.TestUtils;
 import org.apache.impala.thrift.TFunctionBinaryType;
 import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TQueryOptions;
+import org.apache.impala.util.EventSequence;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -73,7 +77,7 @@ import com.google.common.collect.Lists;
  * as well as helper functions for creating test-local tables/views and UDF/UDAs.
  */
 public class FrontendTestBase {
-  protected static ImpaladCatalog catalog_ = new ImpaladTestCatalog();
+  protected static ImpaladTestCatalog catalog_ = new ImpaladTestCatalog();
   protected static Frontend frontend_ = new Frontend(
       AuthorizationConfig.createAuthDisabledConfig(), catalog_);
 
@@ -91,26 +95,6 @@ public class FrontendTestBase {
     RuntimeEnv.INSTANCE.setTestEnv(false);
   }
 
-  protected Analyzer createAnalyzer(String defaultDb) {
-    TQueryCtx queryCtx =
-        TestUtils.createQueryContext(defaultDb, System.getProperty("user.name"));
-    return new Analyzer(catalog_, queryCtx,
-        AuthorizationConfig.createAuthDisabledConfig());
-  }
-
-  protected Analyzer createAnalyzer(TQueryOptions queryOptions) {
-    TQueryCtx queryCtx = TestUtils.createQueryContext();
-    queryCtx.client_request.query_options = queryOptions;
-    return new Analyzer(catalog_, queryCtx,
-        AuthorizationConfig.createAuthDisabledConfig());
-  }
-
-  protected Analyzer createAnalyzerUsingHiveColLabels() {
-    Analyzer analyzer = createAnalyzer(Catalog.DEFAULT_DB);
-    analyzer.setUseHiveColLabels(true);
-    return analyzer;
-  }
-
   // Adds a Udf: default.name(args) to the catalog.
   // TODO: we could consider having this be the sql to run instead but that requires
   // connecting to the BE.
@@ -292,7 +276,11 @@ public class FrontendTestBase {
    * Analyze 'stmt', expecting it to pass. Asserts in case of analysis error.
    */
   public ParseNode AnalyzesOk(String stmt) {
-    return AnalyzesOk(stmt, createAnalyzer(Catalog.DEFAULT_DB), null);
+    return AnalyzesOk(stmt, createAnalysisCtx(), null);
+  }
+
+  public ParseNode AnalyzesOk(String stmt, AnalysisContext analysisCtx) {
+    return AnalyzesOk(stmt, analysisCtx, null);
   }
 
   /**
@@ -300,21 +288,56 @@ public class FrontendTestBase {
    * If 'expectedWarning' is not null, asserts that a warning is produced.
    */
   public ParseNode AnalyzesOk(String stmt, String expectedWarning) {
-    return AnalyzesOk(stmt, createAnalyzer(Catalog.DEFAULT_DB), expectedWarning);
+    return AnalyzesOk(stmt, createAnalysisCtx(), expectedWarning);
+  }
+
+  protected AnalysisContext createAnalysisCtx() {
+    return createAnalysisCtx(Catalog.DEFAULT_DB);
+  }
+
+  protected AnalysisContext createAnalysisCtx(String defaultDb) {
+    TQueryCtx queryCtx = TestUtils.createQueryContext(
+        defaultDb, System.getProperty("user.name"));
+    EventSequence timeline = new EventSequence("Frontend Test Timeline");
+    AnalysisContext analysisCtx = new AnalysisContext(queryCtx,
+        AuthorizationConfig.createAuthDisabledConfig(), timeline);
+    return analysisCtx;
+  }
+
+  protected AnalysisContext createAnalysisCtx(TQueryOptions queryOptions) {
+    TQueryCtx queryCtx = TestUtils.createQueryContext();
+    queryCtx.client_request.query_options = queryOptions;
+    EventSequence timeline = new EventSequence("Frontend Test Timeline");
+    AnalysisContext analysisCtx = new AnalysisContext(queryCtx,
+        AuthorizationConfig.createAuthDisabledConfig(), timeline);
+    return analysisCtx;
+  }
+
+  protected AnalysisContext createAnalysisCtx(AuthorizationConfig authzConfig) {
+    return createAnalysisCtx(authzConfig, System.getProperty("user.name"));
+  }
+
+  protected AnalysisContext createAnalysisCtx(AuthorizationConfig authzConfig,
+      String user) {
+    TQueryCtx queryCtx = TestUtils.createQueryContext(Catalog.DEFAULT_DB, user);
+    EventSequence timeline = new EventSequence("Frontend Test Timeline");
+    AnalysisContext analysisCtx = new AnalysisContext(queryCtx, authzConfig, timeline);
+    return analysisCtx;
+  }
+
+  protected AnalysisContext createAnalysisCtxUsingHiveColLabels() {
+    AnalysisContext analysisCtx = createAnalysisCtx();
+    analysisCtx.setUseHiveColLabels(true);
+    return analysisCtx;
   }
 
   /**
    * Analyze 'stmt', expecting it to pass. Asserts in case of analysis error.
    * If 'expectedWarning' is not null, asserts that a warning is produced.
    */
-  public ParseNode AnalyzesOk(String stmt, Analyzer analyzer, String expectedWarning) {
+  public ParseNode AnalyzesOk(String stmt, AnalysisContext ctx, String expectedWarning) {
     try {
-      AnalysisContext analysisCtx = new AnalysisContext(catalog_,
-          TestUtils.createQueryContext(Catalog.DEFAULT_DB,
-            System.getProperty("user.name")),
-          AuthorizationConfig.createAuthDisabledConfig());
-      analysisCtx.analyze(stmt, analyzer);
-      AnalysisContext.AnalysisResult analysisResult = analysisCtx.getAnalysisResult();
+      AnalysisResult analysisResult = parseAndAnalyze(stmt, ctx);
       if (expectedWarning != null) {
         List<String> actualWarnings = analysisResult.getAnalyzer().getWarnings();
         boolean matchedWarning = false;
@@ -340,17 +363,23 @@ public class FrontendTestBase {
   }
 
   /**
-   * Asserts if stmt passes analysis.
+   * Analyzes the given statement without performing rewrites or authorization.
    */
-  public void AnalysisError(String stmt) {
-    AnalysisError(stmt, null);
+  public StatementBase AnalyzesOkNoRewrite(StatementBase stmt) throws ImpalaException {
+    AnalysisContext ctx = createAnalysisCtx();
+    StmtMetadataLoader mdLoader =
+        new StmtMetadataLoader(frontend_, ctx.getQueryCtx().session.database, null);
+    StmtTableCache loadedTables = mdLoader.loadTables(stmt);
+    Analyzer analyzer = ctx.createAnalyzer(loadedTables);
+    stmt.analyze(analyzer);
+    return stmt;
   }
 
   /**
-   * Analyze 'stmt', expecting it to pass. Asserts in case of analysis error.
+   * Asserts if stmt passes analysis.
    */
-  public ParseNode AnalyzesOk(String stmt, Analyzer analyzer) {
-    return AnalyzesOk(stmt, analyzer, null);
+  public void AnalysisError(String stmt) {
+    AnalysisError(stmt, null);
   }
 
   /**
@@ -358,22 +387,17 @@ public class FrontendTestBase {
    * is non-null.
    */
   public void AnalysisError(String stmt, String expectedErrorString) {
-    AnalysisError(stmt, createAnalyzer(Catalog.DEFAULT_DB), expectedErrorString);
+    AnalysisError(stmt, createAnalysisCtx(), expectedErrorString);
   }
 
   /**
    * Asserts if stmt passes analysis or the error string doesn't match and it
    * is non-null.
    */
-  public void AnalysisError(String stmt, Analyzer analyzer, String expectedErrorString) {
+  public void AnalysisError(String stmt, AnalysisContext ctx, String expectedErrorString) {
     Preconditions.checkNotNull(expectedErrorString, "No expected error message given.");
     try {
-      AnalysisContext analysisCtx = new AnalysisContext(catalog_,
-          TestUtils.createQueryContext(Catalog.DEFAULT_DB,
-              System.getProperty("user.name")),
-              AuthorizationConfig.createAuthDisabledConfig());
-      analysisCtx.analyze(stmt, analyzer);
-      AnalysisContext.AnalysisResult analysisResult = analysisCtx.getAnalysisResult();
+      AnalysisResult analysisResult = parseAndAnalyze(stmt, ctx);
       Preconditions.checkNotNull(analysisResult.getStmt());
     } catch (Exception e) {
       String errorString = e.getMessage();
@@ -385,4 +409,18 @@ public class FrontendTestBase {
     }
     fail("Stmt didn't result in analysis error: " + stmt);
   }
+
+  protected AnalysisResult parseAndAnalyze(String stmt, AnalysisContext ctx)
+      throws ImpalaException {
+    return parseAndAnalyze(stmt, ctx, frontend_);
+  }
+
+  protected AnalysisResult parseAndAnalyze(String stmt, AnalysisContext ctx, Frontend fe)
+      throws ImpalaException {
+    StatementBase parsedStmt = fe.parse(stmt);
+    StmtMetadataLoader mdLoader =
+        new StmtMetadataLoader(fe, ctx.getQueryCtx().session.database, null);
+    StmtTableCache stmtTableCache = mdLoader.loadTables(parsedStmt);
+    return ctx.analyzeAndAuthorize(parsedStmt, stmtTableCache, fe.getAuthzChecker());
+  }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/service/FrontendTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/service/FrontendTest.java b/fe/src/test/java/org/apache/impala/service/FrontendTest.java
index c87882d..de8b7ac 100644
--- a/fe/src/test/java/org/apache/impala/service/FrontendTest.java
+++ b/fe/src/test/java/org/apache/impala/service/FrontendTest.java
@@ -20,38 +20,28 @@ package org.apache.impala.service;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
 import java.util.Arrays;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.hive.service.rpc.thrift.TGetCatalogsReq;
 import org.apache.hive.service.rpc.thrift.TGetColumnsReq;
 import org.apache.hive.service.rpc.thrift.TGetFunctionsReq;
 import org.apache.hive.service.rpc.thrift.TGetInfoReq;
 import org.apache.hive.service.rpc.thrift.TGetSchemasReq;
 import org.apache.hive.service.rpc.thrift.TGetTablesReq;
-import org.junit.Test;
-import org.apache.impala.analysis.AuthorizationTest;
-import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.catalog.Db;
-import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.PrimitiveType;
 import org.apache.impala.catalog.Table;
-import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.FrontendTestBase;
 import org.apache.impala.common.ImpalaException;
-import org.apache.impala.testutil.ImpaladTestCatalog;
-import org.apache.impala.testutil.TestUtils;
 import org.apache.impala.thrift.TMetadataOpRequest;
 import org.apache.impala.thrift.TMetadataOpcode;
-import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TResultRow;
 import org.apache.impala.thrift.TResultSet;
+import org.junit.Test;
 
-import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
@@ -205,15 +195,15 @@ public class FrontendTest extends FrontendTestBase {
       }
     }
 
-    // Make sure tables that can't be loaded don't result in errors in the GetTables
-    // request (see IMPALA-5579)
+    // IMPALA-5579: GetTables() should succeed and display the available information for
+    // tables that cannot be loaded.
     req = new TMetadataOpRequest();
     req.opcode = TMetadataOpcode.GET_TABLES;
     req.get_tables_req = new TGetTablesReq();
     req.get_tables_req.setSchemaName("functional");
     req.get_tables_req.setTableName("hive_index_tbl");
     resp = execMetadataOp(req);
-    assertEquals(0, resp.rows.size());
+    assertEquals(1, resp.rows.size());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/testutil/ImpaladTestCatalog.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/testutil/ImpaladTestCatalog.java b/fe/src/test/java/org/apache/impala/testutil/ImpaladTestCatalog.java
index 7e8ff46..32b5ee7 100644
--- a/fe/src/test/java/org/apache/impala/testutil/ImpaladTestCatalog.java
+++ b/fe/src/test/java/org/apache/impala/testutil/ImpaladTestCatalog.java
@@ -17,15 +17,19 @@
 
 package org.apache.impala.testutil;
 
+import java.util.Set;
+
+import org.apache.impala.analysis.TableName;
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.catalog.CatalogException;
 import org.apache.impala.catalog.CatalogServiceCatalog;
 import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.HdfsCachePool;
+import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.catalog.ImpaladCatalog;
 import org.apache.impala.catalog.Table;
-import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.util.PatternMatcher;
+
 import com.google.common.base.Preconditions;
 
 /**
@@ -68,27 +72,46 @@ public class ImpaladTestCatalog extends ImpaladCatalog {
   public void reset() throws CatalogException { srcCatalog_.reset(); }
 
   /**
-   * Overrides ImpaladCatalog.getTable to load the table metadata if it is missing.
+   * Returns the Table for the given name, loading the table's metadata if necessary.
+   * Returns null if the database or table does not exist.
    */
-  @Override
-  public Table getTable(String dbName, String tableName)
-      throws CatalogException {
-    Table existingTbl = super.getTable(dbName, tableName);
-    // Table doesn't exist or is already loaded. Just return it.
+  public Table getOrLoadTable(String dbName, String tblName) {
+    Db db = getDb(dbName);
+    if (db == null) return null;
+    Table existingTbl = db.getTable(tblName);
+    // Table doesn't exist or is already loaded.
     if (existingTbl == null || existingTbl.isLoaded()) return existingTbl;
 
-    // The table was not yet loaded. Load it in to the catalog and try getTable()
-    // again.
-    Table newTbl = srcCatalog_.getOrLoadTable(dbName,  tableName);
+    // The table was not yet loaded. Load it in to the catalog now.
+    Table newTbl = null;
+    try {
+      newTbl = srcCatalog_.getOrLoadTable(dbName, tblName);
+    } catch (CatalogException e) {
+      throw new IllegalStateException("Unexpected table loading failure.", e);
+    }
     Preconditions.checkNotNull(newTbl);
     Preconditions.checkState(newTbl.isLoaded());
-    Db db = getDb(dbName);
-    Preconditions.checkNotNull(db);
-    db.addTable(newTbl);
-    Table resultTable = super.getTable(dbName, tableName);
-    if (resultTable instanceof HdfsTable) {
-      ((HdfsTable) resultTable).computeHdfsStatsForTesting();
+    if (newTbl instanceof HdfsTable) {
+      ((HdfsTable) newTbl).computeHdfsStatsForTesting();
     }
-    return resultTable;
+    db.addTable(newTbl);
+    return newTbl;
+  }
+
+  /**
+   * Fast loading path for FE unit testing. Immediately loads the given tables into
+   * this catalog from this thread without involving the catalogd/statestored.
+   */
+  @Override
+  public void prioritizeLoad(Set<TableName> tableNames) {
+    for (TableName tbl: tableNames) getOrLoadTable(tbl.getDb(), tbl.getTbl());
+  }
+
+  /**
+   * No-op. Metadata loading does not go through the catalogd/statestored in a
+   * FE test environment.
+   */
+  @Override
+  public void waitForCatalogUpdate(long timeoutMs) {
   }
 }


[2/3] impala git commit: IMPALA-5152: Introduce metadata loading phase

Posted by ta...@apache.org.
http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java b/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java
new file mode 100644
index 0000000..be71161
--- /dev/null
+++ b/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java
@@ -0,0 +1,280 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package org.apache.impala.analysis;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.impala.catalog.Db;
+import org.apache.impala.catalog.ImpaladCatalog;
+import org.apache.impala.catalog.Table;
+import org.apache.impala.catalog.View;
+import org.apache.impala.common.InternalException;
+import org.apache.impala.service.Frontend;
+import org.apache.impala.util.EventSequence;
+import org.apache.impala.util.TUniqueIdUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+/**
+ * Loads all table and view metadata relevant for a single SQL statement and returns the
+ * loaded tables in a StmtTableCache. Optionally marks important loading events in an
+ * EventSequence.
+ */
+public class StmtMetadataLoader {
+  private final static Logger LOG = LoggerFactory.getLogger(StmtMetadataLoader.class);
+
+  // Events are triggered when at least the set number of catalog updates have passed.
+  private final long DEBUG_LOGGING_NUM_CATALOG_UPDATES = 10;
+  private final long RETRY_LOAD_NUM_CATALOG_UPDATES = 20;
+
+  private final Frontend fe_;
+  private final String sessionDb_;
+  private final EventSequence timeline_;
+
+  // Results of the loading process. See StmtTableCache.
+  private final Set<String> dbs_ = Sets.newHashSet();
+  private final Map<TableName, Table> loadedTbls_ = Maps.newHashMap();
+
+  // Metrics for the metadata load.
+  // Number of prioritizedLoad() RPCs issued to the catalogd.
+  private int numLoadRequestsSent_ = 0;
+  // Number of catalog topic updates received from the statestore.
+  private int numCatalogUpdatesReceived_ = 0;
+
+  /**
+   * Contains all statement-relevant tables and database names as well as the latest
+   * ImpaladCatalog. An entry in the tables map is guaranteed to point to a loaded
+   * table. This could mean the table was loaded successfully or a load was attempted
+   * but failed. The absence of a table or database name indicates that object was not
+   * in the Catalog at the time this StmtTableCache was generated.
+   */
+  public static final class StmtTableCache {
+    public final ImpaladCatalog catalog;
+    public final Set<String> dbs;
+    public final Map<TableName, Table> tables;
+
+    public StmtTableCache(ImpaladCatalog catalog, Set<String> dbs,
+        Map<TableName, Table> tables) {
+      this.catalog = Preconditions.checkNotNull(catalog);
+      this.dbs = Preconditions.checkNotNull(dbs);
+      this.tables = Preconditions.checkNotNull(tables);
+      validate();
+    }
+
+    private void validate() {
+      // Checks that all entries in 'tables' have a matching entry in 'dbs'.
+      for (TableName tbl: tables.keySet()) {
+        Preconditions.checkState(dbs.contains(tbl.getDb()));
+      }
+    }
+  }
+
+  /**
+   * The 'fe' and 'sessionDb' arguments must be non-null. A null 'timeline' may be passed
+   * if no events should be marked.
+   */
+  public StmtMetadataLoader(Frontend fe, String sessionDb, EventSequence timeline) {
+    fe_ = Preconditions.checkNotNull(fe);
+    sessionDb_ = Preconditions.checkNotNull(sessionDb);
+    timeline_ = timeline;
+  }
+
+  // Getters for testing
+  public EventSequence getTimeline() { return timeline_; }
+  public int getNumLoadRequestsSent() { return numLoadRequestsSent_; }
+  public int getNumCatalogUpdatesReceived() { return numCatalogUpdatesReceived_; }
+
+  /**
+   * Collects and loads all tables and views required to analyze the given statement.
+   * Marks the start and end of metadata loading in 'timeline_' if it is non-NULL.
+   * Must only be called once for a single statement.
+   */
+  public StmtTableCache loadTables(StatementBase stmt) throws InternalException {
+    Set<TableName> requiredTables = collectTableCandidates(stmt);
+    return loadTables(requiredTables);
+  }
+
+  /**
+   * Loads the tables/views with the given names and returns them. As views become
+   * loaded, the set of table/views still to be loaded is expanded based on the view
+   * definitions. For tables/views missing metadata this function issues a loading
+   * request to the catalog server and then waits for the metadata to arrive through
+   * a statestore topic update.
+   * This function succeeds even across catalog restarts for the following reasons:
+   * - The loading process is strictly additive, i.e., a new loaded table may be added
+   *   to the 'loadedTbls_' map, but an existing entry is never removed, even if the
+   *   equivalent table in the impalad catalog is different.
+   * - Tables on the impalad side are not modified in place. This means that an entry in
+   *   the 'loadedTbls_' will always remain in the loaded state.
+   * Tables/views that are already loaded are simply included in the result.
+   * Marks the start and end of metadata loading in 'timeline_' if it is non-NULL.
+   * Must only be called once for a single statement.
+   */
+  public StmtTableCache loadTables(Set<TableName> tbls) throws InternalException {
+    Preconditions.checkState(dbs_.isEmpty() && loadedTbls_.isEmpty());
+    Preconditions.checkState(numLoadRequestsSent_ == 0);
+    Preconditions.checkState(numCatalogUpdatesReceived_ == 0);
+    ImpaladCatalog catalog = fe_.getCatalog();
+    Set<TableName> missingTbls = getMissingTables(catalog, tbls);
+    // There are no missing tables. Return to avoid making an RPC to the CatalogServer
+    // and adding events to the timeline.
+    if (missingTbls.isEmpty()) {
+      if (timeline_ != null) {
+        timeline_.markEvent(
+            String.format("Metadata of all %d tables cached", loadedTbls_.size()));
+      }
+      return new StmtTableCache(catalog, dbs_, loadedTbls_);
+    }
+
+    if (timeline_ != null) timeline_.markEvent("Metadata load started");
+    long startTimeMs = System.currentTimeMillis();
+
+    // All tables for which we have requested a prioritized load.
+    Set<TableName> requestedTbls = Sets.newHashSet();
+
+    // Loading a fixed set of tables happens in two steps:
+    // 1) Issue a loading request RPC to the catalogd.
+    // 2) Wait for the loaded tables to arrive via the statestore.
+    // The second step could take a while and we should avoid repeatedly issuing
+    // redundant RPCs to the catalogd. This flag indicates whether a loading RPC
+    // should be issued. See below for more details in which circumstances this
+    // flag is set to true.
+    boolean issueLoadRequest = true;
+    // Loop until all the missing tables are loaded in the Impalad's catalog cache.
+    // In every iteration of this loop we wait for one catalog update to arrive.
+    while (!missingTbls.isEmpty()) {
+      if (issueLoadRequest) {
+        catalog.prioritizeLoad(missingTbls);
+        ++numLoadRequestsSent_;
+        requestedTbls.addAll(missingTbls);
+      }
+
+      // Catalog may have been restarted, always use the latest reference.
+      ImpaladCatalog currCatalog = fe_.getCatalog();
+      boolean hasCatalogRestarted = currCatalog != catalog;
+      if (hasCatalogRestarted && LOG.isWarnEnabled()) {
+        LOG.warn(String.format(
+            "Catalog restart detected while waiting for table metadata. " +
+            "Current catalog service id: %s. Previous catalog service id: %s",
+            TUniqueIdUtil.PrintId(currCatalog.getCatalogServiceId()),
+            TUniqueIdUtil.PrintId(catalog.getCatalogServiceId())));
+
+      }
+      catalog = currCatalog;
+
+      // Log progress and wait time for debugging.
+      if (hasCatalogRestarted
+          || (numCatalogUpdatesReceived_ > 0
+              && numCatalogUpdatesReceived_ % DEBUG_LOGGING_NUM_CATALOG_UPDATES == 0)) {
+        if (LOG.isInfoEnabled()) {
+          long endTimeMs = System.currentTimeMillis();
+          LOG.info(String.format("Waiting for table metadata. " +
+              "Waited for %d catalog updates and %dms. Tables remaining: %s",
+              numCatalogUpdatesReceived_, endTimeMs - startTimeMs, missingTbls));
+        }
+      }
+
+      // Wait for the next catalog update and then revise the loaded/missing tables.
+      catalog.waitForCatalogUpdate(Frontend.MAX_CATALOG_UPDATE_WAIT_TIME_MS);
+      Set<TableName> newMissingTbls = getMissingTables(catalog, missingTbls);
+      // Issue a load request for the new missing tables in these cases:
+      // 1) Catalog has restarted so all in-flight loads have been lost
+      // 2) There are new missing tables due to view expansion
+      issueLoadRequest = hasCatalogRestarted || !missingTbls.containsAll(newMissingTbls);
+      // 3) Periodically retry to avoid a hang due to anomalies/bugs, e.g.,
+      //    a previous load request was somehow lost on the catalog side, or the table
+      //    was invalidated after being loaded but before being sent to this impalad
+      if (!issueLoadRequest && numCatalogUpdatesReceived_ > 0
+          && numCatalogUpdatesReceived_ % RETRY_LOAD_NUM_CATALOG_UPDATES == 0) {
+        issueLoadRequest = true;
+        if (LOG.isInfoEnabled()) {
+          long endTimeMs = System.currentTimeMillis();
+          LOG.info(String.format("Re-sending prioritized load request. " +
+              "Waited for %d catalog updates and %dms.",
+              numCatalogUpdatesReceived_, endTimeMs - startTimeMs));
+        }
+      }
+      missingTbls = newMissingTbls;
+      ++numCatalogUpdatesReceived_;
+    }
+    if (timeline_ != null) {
+      timeline_.markEvent(String.format("Metadata load finished. " +
+          "loaded-tables=%d/%d load-requests=%d catalog-updates=%d",
+          requestedTbls.size(), loadedTbls_.size(), numLoadRequestsSent_,
+          numCatalogUpdatesReceived_));
+    }
+
+    return new StmtTableCache(catalog, dbs_, loadedTbls_);
+  }
+
+  /**
+   * Determines whether the 'tbls' are loaded in the given catalog or not. Adds the names
+   * of referenced databases that exist to 'dbs_', and loaded tables to 'loadedTbls_'.
+   * Returns the set of tables that are not loaded. Recursively collects loaded/missing
+   * tables from views. Uses 'sessionDb_' to construct table candidates from views with
+   * Path.getCandidateTables(). Non-existent tables are ignored and not returned or
+   * added to 'loadedTbls_'.
+   */
+  private Set<TableName> getMissingTables(ImpaladCatalog catalog, Set<TableName> tbls) {
+    Set<TableName> missingTbls = Sets.newHashSet();
+    Set<TableName> viewTbls = Sets.newHashSet();
+    for (TableName tblName: tbls) {
+      if (loadedTbls_.containsKey(tblName)) continue;
+      Db db = catalog.getDb(tblName.getDb());
+      if (db == null) continue;
+      dbs_.add(tblName.getDb());
+      Table tbl = db.getTable(tblName.getTbl());
+      if (tbl == null) continue;
+      if (!tbl.isLoaded()) {
+        missingTbls.add(tblName);
+        continue;
+      }
+      loadedTbls_.put(tblName, tbl);
+      if (tbl instanceof View) {
+        viewTbls.addAll(collectTableCandidates(((View) tbl).getQueryStmt()));
+      }
+    }
+    // Recursively collect loaded/missing tables from loaded views.
+    if (!viewTbls.isEmpty()) missingTbls.addAll(getMissingTables(catalog, viewTbls));
+    return missingTbls;
+  }
+
+  /**
+   * Returns the set of tables whose metadata needs to be loaded for the analysis of the
+   * given 'stmt' to succeed. This is done by collecting all table references from 'stmt'
+   * and generating all possible table-path resolutions considered during analysis.
+   * Uses 'sessionDb_' to construct the candidate tables with Path.getCandidateTables().
+   */
+  private Set<TableName> collectTableCandidates(StatementBase stmt) {
+    Preconditions.checkNotNull(stmt);
+    List<TableRef> tblRefs = Lists.newArrayList();
+    stmt.collectTableRefs(tblRefs);
+    Set<TableName> tableNames = Sets.newHashSet();
+    for (TableRef ref: tblRefs) {
+      tableNames.addAll(Path.getCandidateTables(ref.getPath(), sessionDb_));
+    }
+    return tableNames;
+  }
+}

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/TruncateStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/TruncateStmt.java b/fe/src/main/java/org/apache/impala/analysis/TruncateStmt.java
index 714e9b1..4f5dbfb 100644
--- a/fe/src/main/java/org/apache/impala/analysis/TruncateStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/TruncateStmt.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.catalog.Table;
@@ -47,12 +49,17 @@ public class TruncateStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     tableName_ = analyzer.getFqTableName(tableName_);
     try {
       table_ = analyzer.getTable(tableName_, Privilege.INSERT);
     } catch (AnalysisException e) {
-      if (ifExists_ && analyzer.getMissingTbls().isEmpty()) return;
+      if (ifExists_) return;
       throw e;
     }
     // We only support truncating hdfs tables now.

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java b/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java
index f811e49..bb472a2 100644
--- a/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java
@@ -162,6 +162,8 @@ public class UnionStmt extends QueryStmt {
   public UnionStmt(List<UnionOperand> operands,
       ArrayList<OrderByElement> orderByElements, LimitElement limitElement) {
     super(orderByElements, limitElement);
+    Preconditions.checkNotNull(operands);
+    Preconditions.checkState(operands.size() > 0);
     operands_ = operands;
   }
 
@@ -207,12 +209,7 @@ public class UnionStmt extends QueryStmt {
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     if (isAnalyzed()) return;
-    try {
-      super.analyze(analyzer);
-    } catch (AnalysisException e) {
-      if (analyzer.getMissingTbls().isEmpty()) throw e;
-    }
-    Preconditions.checkState(operands_.size() > 0);
+    super.analyze(analyzer);
 
     // Propagates DISTINCT from right to left.
     propagateDistinct();
@@ -279,25 +276,18 @@ public class UnionStmt extends QueryStmt {
    */
   private void analyzeOperands(Analyzer analyzer) throws AnalysisException {
     for (int i = 0; i < operands_.size(); ++i) {
-      try {
-        operands_.get(i).analyze(analyzer);
-        QueryStmt firstQuery = operands_.get(0).getQueryStmt();
-        List<Expr> firstExprs = operands_.get(0).getQueryStmt().getResultExprs();
-        QueryStmt query = operands_.get(i).getQueryStmt();
-        List<Expr> exprs = query.getResultExprs();
-        if (firstExprs.size() != exprs.size()) {
-          throw new AnalysisException("Operands have unequal number of columns:\n" +
-              "'" + queryStmtToSql(firstQuery) + "' has " +
-              firstExprs.size() + " column(s)\n" +
-              "'" + queryStmtToSql(query) + "' has " + exprs.size() + " column(s)");
-        }
-      } catch (AnalysisException e) {
-        if (analyzer.getMissingTbls().isEmpty()) throw e;
+      operands_.get(i).analyze(analyzer);
+      QueryStmt firstQuery = operands_.get(0).getQueryStmt();
+      List<Expr> firstExprs = operands_.get(0).getQueryStmt().getResultExprs();
+      QueryStmt query = operands_.get(i).getQueryStmt();
+      List<Expr> exprs = query.getResultExprs();
+      if (firstExprs.size() != exprs.size()) {
+        throw new AnalysisException("Operands have unequal number of columns:\n" +
+            "'" + queryStmtToSql(firstQuery) + "' has " +
+            firstExprs.size() + " column(s)\n" +
+            "'" + queryStmtToSql(query) + "' has " + exprs.size() + " column(s)");
       }
     }
-    if (!analyzer.getMissingTbls().isEmpty()) {
-      throw new AnalysisException("Found missing tables. Aborting analysis.");
-    }
   }
 
   /**
@@ -552,8 +542,11 @@ public class UnionStmt extends QueryStmt {
   }
 
   @Override
-  public void collectTableRefs(List<TableRef> tblRefs) {
-    for (UnionOperand op: operands_) op.getQueryStmt().collectTableRefs(tblRefs);
+  public void collectTableRefs(List<TableRef> tblRefs, boolean fromClauseOnly) {
+    super.collectTableRefs(tblRefs, fromClauseOnly);
+    for (UnionOperand op: operands_) {
+      op.getQueryStmt().collectTableRefs(tblRefs, fromClauseOnly);
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/WithClause.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/WithClause.java b/fe/src/main/java/org/apache/impala/analysis/WithClause.java
index 3e70764..1a771cd 100644
--- a/fe/src/main/java/org/apache/impala/analysis/WithClause.java
+++ b/fe/src/main/java/org/apache/impala/analysis/WithClause.java
@@ -23,6 +23,7 @@ import java.util.List;
 import org.apache.impala.authorization.PrivilegeRequest;
 import org.apache.impala.catalog.View;
 import org.apache.impala.common.AnalysisException;
+
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -74,30 +75,23 @@ public class WithClause implements ParseNode {
     Analyzer withClauseAnalyzer = Analyzer.createWithNewGlobalState(analyzer);
     withClauseAnalyzer.setIsWithClause();
     if (analyzer.isExplain()) withClauseAnalyzer.setIsExplain();
-    try {
-      for (View view: views_) {
-        Analyzer viewAnalyzer = new Analyzer(withClauseAnalyzer);
-        view.getQueryStmt().analyze(viewAnalyzer);
-        // Register this view so that the next view can reference it.
-        withClauseAnalyzer.registerLocalView(view);
-      }
-      // Register all local views with the analyzer.
-      for (View localView: withClauseAnalyzer.getLocalViews().values()) {
-        analyzer.registerLocalView(localView);
-      }
-      // Record audit events because the resolved table references won't generate any
-      // when a view is referenced.
-      analyzer.getAccessEvents().addAll(withClauseAnalyzer.getAccessEvents());
+    for (View view: views_) {
+      Analyzer viewAnalyzer = new Analyzer(withClauseAnalyzer);
+      view.getQueryStmt().analyze(viewAnalyzer);
+      // Register this view so that the next view can reference it.
+      withClauseAnalyzer.registerLocalView(view);
+    }
+    // Register all local views with the analyzer.
+    for (View localView: withClauseAnalyzer.getLocalViews().values()) {
+      analyzer.registerLocalView(localView);
+    }
+    // Record audit events because the resolved table references won't generate any
+    // when a view is referenced.
+    analyzer.getAccessEvents().addAll(withClauseAnalyzer.getAccessEvents());
 
-      // Register all privilege requests made from the root analyzer.
-      for (PrivilegeRequest req: withClauseAnalyzer.getPrivilegeReqs()) {
-        analyzer.registerPrivReq(req);
-      }
-    } finally {
-      // Record missing tables in the original analyzer.
-      if (analyzer.isRootAnalyzer()) {
-        analyzer.getMissingTbls().addAll(withClauseAnalyzer.getMissingTbls());
-      }
+    // Register all privilege requests made from the root analyzer.
+    for (PrivilegeRequest req: withClauseAnalyzer.getPrivilegeReqs()) {
+      analyzer.registerPrivReq(req);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/catalog/Catalog.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/catalog/Catalog.java b/fe/src/main/java/org/apache/impala/catalog/Catalog.java
index ff7b1e4..0cd1eda 100644
--- a/fe/src/main/java/org/apache/impala/catalog/Catalog.java
+++ b/fe/src/main/java/org/apache/impala/catalog/Catalog.java
@@ -141,21 +141,27 @@ public abstract class Catalog {
   }
 
   /**
-   * Returns the Table object for the given dbName/tableName. If 'throwIfError' is true,
-   * an exception is thrown if the associated database does not exist. Otherwise, null is
-   * returned.
+   * Returns the Table object for the given dbName/tableName or null if the database or
+   * table does not exist.
    */
-  public Table getTable(String dbName, String tableName, boolean throwIfError)
-      throws CatalogException {
+  public Table getTableNoThrow(String dbName, String tableName) {
     Db db = getDb(dbName);
-    if (db == null && throwIfError) {
-      throw new DatabaseNotFoundException("Database '" + dbName + "' not found");
-    }
+    if (db == null) return null;
     return db.getTable(tableName);
   }
 
-  public Table getTable(String dbName, String tableName) throws CatalogException {
-    return getTable(dbName, tableName, true);
+  /**
+   * Returns the Table object for the given dbName/tableName. Throws if the database
+   * does not exists. Returns null if the table does not exist.
+   * TODO: Clean up the inconsistent error behavior (throwing vs. returning null).
+   */
+  public Table getTable(String dbName, String tableName)
+      throws DatabaseNotFoundException {
+    Db db = getDb(dbName);
+    if (db == null) {
+      throw new DatabaseNotFoundException("Database '" + dbName + "' not found");
+    }
+    return db.getTable(tableName);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/catalog/ImpaladCatalog.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/catalog/ImpaladCatalog.java b/fe/src/main/java/org/apache/impala/catalog/ImpaladCatalog.java
index 99bd23e..1da5d0b 100644
--- a/fe/src/main/java/org/apache/impala/catalog/ImpaladCatalog.java
+++ b/fe/src/main/java/org/apache/impala/catalog/ImpaladCatalog.java
@@ -19,11 +19,13 @@ package org.apache.impala.catalog;
 
 import java.nio.ByteBuffer;
 import java.util.ArrayDeque;
+import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.impala.analysis.TableName;
 import org.apache.impala.catalog.MetaStoreClientPool.MetaStoreClient;
-import org.apache.impala.common.ImpalaException;
+import org.apache.impala.common.InternalException;
 import org.apache.impala.common.Pair;
 import org.apache.impala.service.FeSupport;
 import org.apache.impala.thrift.TCatalogObject;
@@ -219,6 +221,13 @@ public class ImpaladCatalog extends Catalog {
 
 
   /**
+   * Issues a load request to the catalogd for the given tables.
+   */
+  public void prioritizeLoad(Set<TableName> tableNames) throws InternalException {
+    FeSupport.PrioritizeLoad(tableNames);
+  }
+
+  /**
    * Causes the calling thread to wait until a catalog update notification has been sent
    * or the given timeout has been reached. A timeout value of 0 indicates an indefinite
    * wait. Does not protect against spurious wakeups, so this should be called in a loop.
@@ -234,27 +243,6 @@ public class ImpaladCatalog extends Catalog {
     }
   }
 
-  /**
-   * Returns the Table object for the given dbName/tableName. Returns null
-   * if the table does not exist. Will throw a TableLoadingException if the table's
-   * metadata was not able to be loaded successfully and DatabaseNotFoundException
-   * if the parent database does not exist.
-   */
-  @Override
-  public Table getTable(String dbName, String tableName)
-      throws CatalogException {
-    Table table = super.getTable(dbName, tableName);
-    if (table == null) return null;
-
-    if (table.isLoaded() && table instanceof IncompleteTable) {
-      // If there were problems loading this table's metadata, throw an exception
-      // when it is accessed.
-      ImpalaException cause = ((IncompleteTable) table).getCause();
-      if (cause instanceof TableLoadingException) throw (TableLoadingException) cause;
-      throw new TableLoadingException("Missing metadata for table: " + tableName, cause);
-    }
-    return table;
-  }
 
   /**
    * Returns the HDFS path where the metastore would create the given table. If the table
@@ -542,4 +530,5 @@ public class ImpaladCatalog extends Catalog {
       LOG.error("LibCacheRemoveEntry(" + hdfsLibFile + ") failed.");
     }
   }
+  public TUniqueId getCatalogServiceId() { return catalogServiceId_; }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/common/AnalysisException.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/common/AnalysisException.java b/fe/src/main/java/org/apache/impala/common/AnalysisException.java
index cbc4f00..2add32e 100644
--- a/fe/src/main/java/org/apache/impala/common/AnalysisException.java
+++ b/fe/src/main/java/org/apache/impala/common/AnalysisException.java
@@ -29,4 +29,8 @@ public class AnalysisException extends ImpalaException {
   public AnalysisException(String msg) {
     super(msg);
   }
+
+  public AnalysisException(Throwable cause) {
+    super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/common/ImpalaException.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/common/ImpalaException.java b/fe/src/main/java/org/apache/impala/common/ImpalaException.java
index 100f682..9302626 100644
--- a/fe/src/main/java/org/apache/impala/common/ImpalaException.java
+++ b/fe/src/main/java/org/apache/impala/common/ImpalaException.java
@@ -30,4 +30,8 @@ abstract public class ImpalaException extends java.lang.Exception {
   protected ImpalaException(String msg) {
     super(msg);
   }
+
+  protected ImpalaException(Throwable cause) {
+    super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/planner/Planner.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/planner/Planner.java b/fe/src/main/java/org/apache/impala/planner/Planner.java
index 71182f5..c320eb4 100644
--- a/fe/src/main/java/org/apache/impala/planner/Planner.java
+++ b/fe/src/main/java/org/apache/impala/planner/Planner.java
@@ -22,6 +22,7 @@ import java.util.Collections;
 import java.util.List;
 
 import org.apache.impala.analysis.AnalysisContext;
+import org.apache.impala.analysis.AnalysisContext.AnalysisResult;
 import org.apache.impala.analysis.Analyzer;
 import org.apache.impala.analysis.ColumnLineageGraph;
 import org.apache.impala.analysis.Expr;
@@ -44,6 +45,7 @@ import org.apache.impala.thrift.TQueryExecRequest;
 import org.apache.impala.thrift.TQueryOptions;
 import org.apache.impala.thrift.TRuntimeFilterMode;
 import org.apache.impala.thrift.TTableName;
+import org.apache.impala.util.EventSequence;
 import org.apache.impala.util.KuduUtil;
 import org.apache.impala.util.MaxRowsProcessedVisitor;
 import org.slf4j.Logger;
@@ -69,8 +71,9 @@ public class Planner {
 
   private final PlannerContext ctx_;
 
-  public Planner(AnalysisContext.AnalysisResult analysisResult, TQueryCtx queryCtx) {
-    ctx_ = new PlannerContext(analysisResult, queryCtx);
+  public Planner(AnalysisResult analysisResult, TQueryCtx queryCtx,
+      EventSequence timeline) {
+    ctx_ = new PlannerContext(analysisResult, queryCtx, timeline);
   }
 
   public TQueryCtx getQueryCtx() { return ctx_.getQueryCtx(); }
@@ -96,7 +99,7 @@ public class Planner {
     SingleNodePlanner singleNodePlanner = new SingleNodePlanner(ctx_);
     DistributedPlanner distributedPlanner = new DistributedPlanner(ctx_);
     PlanNode singleNodePlan = singleNodePlanner.createSingleNodePlan();
-    ctx_.getAnalysisResult().getTimeline().markEvent("Single node plan created");
+    ctx_.getTimeline().markEvent("Single node plan created");
     ArrayList<PlanFragment> fragments = null;
 
     checkForSmallQueryOptimization(singleNodePlan);
@@ -120,7 +123,7 @@ public class Planner {
     PlanFragment rootFragment = fragments.get(fragments.size() - 1);
     if (ctx_.getQueryOptions().getRuntime_filter_mode() != TRuntimeFilterMode.OFF) {
       RuntimeFilterGenerator.generateRuntimeFilters(ctx_, rootFragment.getPlanRoot());
-      ctx_.getAnalysisResult().getTimeline().markEvent("Runtime filters computed");
+      ctx_.getTimeline().markEvent("Runtime filters computed");
     }
 
     rootFragment.verifyTree();
@@ -169,7 +172,7 @@ public class Planner {
     }
 
     Collections.reverse(fragments);
-    ctx_.getAnalysisResult().getTimeline().markEvent("Distributed plan created");
+    ctx_.getTimeline().markEvent("Distributed plan created");
 
     ColumnLineageGraph graph = ctx_.getRootAnalyzer().getColumnLineageGraph();
     if (BackendConfig.INSTANCE.getComputeLineage() || RuntimeEnv.INSTANCE.isTestEnv()) {
@@ -212,7 +215,7 @@ public class Planner {
         graph.computeLineageGraph(resultExprs, ctx_.getRootAnalyzer());
       }
       if (LOG.isTraceEnabled()) LOG.trace("lineage: " + graph.debugString());
-      ctx_.getAnalysisResult().getTimeline().markEvent("Lineage info computed");
+      ctx_.getTimeline().markEvent("Lineage info computed");
     }
 
     return fragments;
@@ -231,7 +234,7 @@ public class Planner {
     // Only use one scanner thread per scan-node instance since intra-node
     // parallelism is achieved via multiple fragment instances.
     ctx_.getQueryOptions().setNum_scanner_threads(1);
-    ctx_.getAnalysisResult().getTimeline().markEvent("Parallel plans created");
+    ctx_.getTimeline().markEvent("Parallel plans created");
     return parallelPlans;
   }
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/planner/PlannerContext.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/planner/PlannerContext.java b/fe/src/main/java/org/apache/impala/planner/PlannerContext.java
index b2881b7..135f7d0 100644
--- a/fe/src/main/java/org/apache/impala/planner/PlannerContext.java
+++ b/fe/src/main/java/org/apache/impala/planner/PlannerContext.java
@@ -19,12 +19,13 @@ package org.apache.impala.planner;
 
 import java.util.LinkedList;
 
-import org.apache.impala.analysis.AnalysisContext;
+import org.apache.impala.analysis.AnalysisContext.AnalysisResult;
 import org.apache.impala.analysis.Analyzer;
 import org.apache.impala.analysis.QueryStmt;
 import org.apache.impala.common.IdGenerator;
 import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TQueryOptions;
+import org.apache.impala.util.EventSequence;
 
 import com.google.common.collect.Lists;
 
@@ -56,14 +57,16 @@ public class PlannerContext {
   // Keeps track of subplan nesting. Maintained with push/popSubplan().
   private final LinkedList<SubplanNode> subplans_ = Lists.newLinkedList();
 
+  private final AnalysisResult analysisResult_;
+  private final EventSequence timeline_;
   private final TQueryCtx queryCtx_;
-  private final AnalysisContext.AnalysisResult analysisResult_;
   private final QueryStmt queryStmt_;
 
-  public PlannerContext (AnalysisContext.AnalysisResult analysisResult,
-      TQueryCtx queryCtx) {
+  public PlannerContext (AnalysisResult analysisResult, TQueryCtx queryCtx,
+      EventSequence timeline) {
     analysisResult_ = analysisResult;
     queryCtx_ = queryCtx;
+    timeline_ = timeline;
     if (isInsertOrCtas()) {
       queryStmt_ = analysisResult.getInsertStmt().getQueryStmt();
     } else if (analysisResult.isUpdateStmt()) {
@@ -78,7 +81,8 @@ public class PlannerContext {
   public QueryStmt getQueryStmt() { return queryStmt_; }
   public TQueryCtx getQueryCtx() { return queryCtx_; }
   public TQueryOptions getQueryOptions() { return getRootAnalyzer().getQueryOptions(); }
-  public AnalysisContext.AnalysisResult getAnalysisResult() { return analysisResult_; }
+  public AnalysisResult getAnalysisResult() { return analysisResult_; }
+  public EventSequence getTimeline() { return timeline_; }
   public Analyzer getRootAnalyzer() { return analysisResult_.getAnalyzer(); }
   public boolean isSingleNodeExec() { return getQueryOptions().num_nodes == 1; }
   public PlanNodeId getNextNodeId() { return nodeIdGenerator_.getNextId(); }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/planner/SingleNodePlanner.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/planner/SingleNodePlanner.java b/fe/src/main/java/org/apache/impala/planner/SingleNodePlanner.java
index 5d933a3..27d293d 100644
--- a/fe/src/main/java/org/apache/impala/planner/SingleNodePlanner.java
+++ b/fe/src/main/java/org/apache/impala/planner/SingleNodePlanner.java
@@ -31,13 +31,12 @@ import org.apache.impala.analysis.AggregateInfo;
 import org.apache.impala.analysis.AnalyticInfo;
 import org.apache.impala.analysis.Analyzer;
 import org.apache.impala.analysis.BaseTableRef;
-import org.apache.impala.analysis.BinaryPredicate.Operator;
 import org.apache.impala.analysis.BinaryPredicate;
+import org.apache.impala.analysis.BinaryPredicate.Operator;
 import org.apache.impala.analysis.CollectionTableRef;
 import org.apache.impala.analysis.Expr;
 import org.apache.impala.analysis.ExprId;
 import org.apache.impala.analysis.ExprSubstitutionMap;
-import org.apache.impala.analysis.FunctionCallExpr;
 import org.apache.impala.analysis.InlineViewRef;
 import org.apache.impala.analysis.JoinOperator;
 import org.apache.impala.analysis.NullLiteral;
@@ -48,12 +47,11 @@ import org.apache.impala.analysis.SlotDescriptor;
 import org.apache.impala.analysis.SlotId;
 import org.apache.impala.analysis.SlotRef;
 import org.apache.impala.analysis.TableRef;
-import org.apache.impala.analysis.TableSampleClause;
 import org.apache.impala.analysis.TupleDescriptor;
 import org.apache.impala.analysis.TupleId;
 import org.apache.impala.analysis.TupleIsNullPredicate;
-import org.apache.impala.analysis.UnionStmt.UnionOperand;
 import org.apache.impala.analysis.UnionStmt;
+import org.apache.impala.analysis.UnionStmt.UnionOperand;
 import org.apache.impala.catalog.ColumnStats;
 import org.apache.impala.catalog.DataSourceTable;
 import org.apache.impala.catalog.HBaseTable;
@@ -62,7 +60,6 @@ import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.Type;
-import org.apache.impala.catalog.HdfsPartition.FileDescriptor;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.common.NotImplementedException;
@@ -129,7 +126,7 @@ public class SingleNodePlanner {
     // to detect empty result sets.
     Analyzer analyzer = queryStmt.getAnalyzer();
     analyzer.computeValueTransferGraph();
-    ctx_.getAnalysisResult().getTimeline().markEvent("Value transfer graph computed");
+    ctx_.getTimeline().markEvent("Value transfer graph computed");
 
     // Mark slots referenced by output exprs as materialized, prior to generating the
     // plan tree.
@@ -231,7 +228,7 @@ public class SingleNodePlanner {
    */
   private void unmarkCollectionSlots(QueryStmt stmt) {
     List<TableRef> tblRefs = Lists.newArrayList();
-    stmt.collectTableRefs(tblRefs);
+    stmt.collectFromClauseTableRefs(tblRefs);
     for (TableRef ref: tblRefs) {
       if (!ref.isRelative()) continue;
       Preconditions.checkState(ref instanceof CollectionTableRef);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java b/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
index 69137ef..c3b15a1 100644
--- a/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
+++ b/fe/src/main/java/org/apache/impala/service/CatalogOpExecutor.java
@@ -53,10 +53,8 @@ import org.apache.impala.analysis.AlterTableSortByStmt;
 import org.apache.impala.analysis.FunctionName;
 import org.apache.impala.analysis.TableName;
 import org.apache.impala.authorization.User;
-import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.CatalogException;
 import org.apache.impala.catalog.CatalogServiceCatalog;
-import org.apache.impala.catalog.CatalogUsageMonitor;
 import org.apache.impala.catalog.Column;
 import org.apache.impala.catalog.ColumnNotFoundException;
 import org.apache.impala.catalog.DataSource;
@@ -117,7 +115,6 @@ import org.apache.impala.thrift.TCreateFunctionParams;
 import org.apache.impala.thrift.TCreateOrAlterViewParams;
 import org.apache.impala.thrift.TCreateTableLikeParams;
 import org.apache.impala.thrift.TCreateTableParams;
-import org.apache.impala.thrift.TDatabase;
 import org.apache.impala.thrift.TDdlExecRequest;
 import org.apache.impala.thrift.TDdlExecResponse;
 import org.apache.impala.thrift.TDropDataSourceParams;
@@ -1544,7 +1541,7 @@ public class CatalogOpExecutor {
     Preconditions.checkState(params.getColumns() != null,
         "Null column list given as argument to Catalog.createTable");
 
-    Table existingTbl = catalog_.getTable(tableName.getDb(), tableName.getTbl(), false);
+    Table existingTbl = catalog_.getTableNoThrow(tableName.getDb(), tableName.getTbl());
     if (params.if_not_exists && existingTbl != null) {
       LOG.trace(String.format("Skipping table creation because %s already exists and " +
           "IF NOT EXISTS was specified.", tableName));
@@ -1760,7 +1757,7 @@ public class CatalogOpExecutor {
     Preconditions.checkState(tblName != null && tblName.isFullyQualified());
     Preconditions.checkState(srcTblName != null && srcTblName.isFullyQualified());
 
-    Table existingTbl = catalog_.getTable(tblName.getDb(), tblName.getTbl(), false);
+    Table existingTbl = catalog_.getTableNoThrow(tblName.getDb(), tblName.getTbl());
     if (params.if_not_exists && existingTbl != null) {
       LOG.trace(String.format("Skipping table creation because %s already exists and " +
           "IF NOT EXISTS was specified.", tblName));
@@ -2131,9 +2128,6 @@ public class CatalogOpExecutor {
           "The partitions being dropped don't exist any more");
     }
 
-    org.apache.hadoop.hive.metastore.api.Table msTbl =
-        tbl.getMetaStoreTable().deepCopy();
-
     PartitionDropOptions dropOptions = PartitionDropOptions.instance();
     dropOptions.purgeData(purge);
     long numTargetedPartitions = 0L;

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/service/FeSupport.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/FeSupport.java b/fe/src/main/java/org/apache/impala/service/FeSupport.java
index 5bc1d87..714f686 100644
--- a/fe/src/main/java/org/apache/impala/service/FeSupport.java
+++ b/fe/src/main/java/org/apache/impala/service/FeSupport.java
@@ -35,13 +35,13 @@ import org.apache.impala.thrift.TCatalogObject;
 import org.apache.impala.thrift.TCatalogObjectType;
 import org.apache.impala.thrift.TCatalogServiceRequestHeader;
 import org.apache.impala.thrift.TColumnValue;
+import org.apache.impala.thrift.TErrorCode;
 import org.apache.impala.thrift.TExprBatch;
 import org.apache.impala.thrift.TPrioritizeLoadRequest;
 import org.apache.impala.thrift.TPrioritizeLoadResponse;
 import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TQueryOptions;
 import org.apache.impala.thrift.TResultRow;
-import org.apache.impala.thrift.TStatus;
 import org.apache.impala.thrift.TSymbolLookupParams;
 import org.apache.impala.thrift.TSymbolLookupResult;
 import org.apache.impala.thrift.TTable;
@@ -53,6 +53,7 @@ import org.apache.thrift.protocol.TBinaryProtocol;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 
 /**
@@ -263,10 +264,13 @@ public class FeSupport {
     return NativePrioritizeLoad(thriftReq);
   }
 
-  public static TStatus PrioritizeLoad(Set<TableName> tableNames)
+  public static void PrioritizeLoad(Set<TableName> tableNames)
       throws InternalException {
     Preconditions.checkNotNull(tableNames);
 
+    LOG.info(String.format("Requesting prioritized load of table(s): %s",
+        Joiner.on(", ").join(tableNames)));
+
     List<TCatalogObject> objectDescs = new ArrayList<TCatalogObject>(tableNames.size());
     for (TableName tableName: tableNames) {
       TCatalogObject catalogObject = new TCatalogObject();
@@ -286,7 +290,10 @@ public class FeSupport {
       TDeserializer deserializer = new TDeserializer(new TBinaryProtocol.Factory());
       TPrioritizeLoadResponse response = new TPrioritizeLoadResponse();
       deserializer.deserialize(response, result);
-      return response.getStatus();
+      if (response.getStatus().getStatus_code() != TErrorCode.OK) {
+        throw new InternalException("Error requesting prioritized load: " +
+            Joiner.on("\n").join(response.getStatus().getError_msgs()));
+      }
     } catch (TException e) {
       // this should never happen
       throw new InternalException("Error processing request: " + e.getMessage(), e);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/service/Frontend.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/Frontend.java b/fe/src/main/java/org/apache/impala/service/Frontend.java
index 318b248..f03feb0 100644
--- a/fe/src/main/java/org/apache/impala/service/Frontend.java
+++ b/fe/src/main/java/org/apache/impala/service/Frontend.java
@@ -18,11 +18,11 @@
 package org.apache.impala.service;
 
 import java.io.IOException;
+import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Random;
@@ -36,6 +36,7 @@ import java.util.concurrent.atomic.AtomicReference;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.impala.analysis.AnalysisContext;
+import org.apache.impala.analysis.AnalysisContext.AnalysisResult;
 import org.apache.impala.analysis.CreateDataSrcStmt;
 import org.apache.impala.analysis.CreateDropRoleStmt;
 import org.apache.impala.analysis.CreateUdaStmt;
@@ -54,6 +55,11 @@ import org.apache.impala.analysis.ResetMetadataStmt;
 import org.apache.impala.analysis.ShowFunctionsStmt;
 import org.apache.impala.analysis.ShowGrantRoleStmt;
 import org.apache.impala.analysis.ShowRolesStmt;
+import org.apache.impala.analysis.SqlParser;
+import org.apache.impala.analysis.SqlScanner;
+import org.apache.impala.analysis.StatementBase;
+import org.apache.impala.analysis.StmtMetadataLoader;
+import org.apache.impala.analysis.StmtMetadataLoader.StmtTableCache;
 import org.apache.impala.analysis.TableName;
 import org.apache.impala.analysis.TruncateStmt;
 import org.apache.impala.authorization.AuthorizationChecker;
@@ -63,7 +69,6 @@ import org.apache.impala.authorization.ImpalaInternalAdminUser;
 import org.apache.impala.authorization.PrivilegeRequest;
 import org.apache.impala.authorization.PrivilegeRequestBuilder;
 import org.apache.impala.authorization.User;
-import org.apache.impala.catalog.AuthorizationException;
 import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.CatalogException;
 import org.apache.impala.catalog.Column;
@@ -83,8 +88,8 @@ import org.apache.impala.common.FileSystemUtil;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.common.NotImplementedException;
-import org.apache.impala.planner.HdfsScanNode;
 import org.apache.impala.compat.MetastoreShim;
+import org.apache.impala.planner.HdfsScanNode;
 import org.apache.impala.planner.PlanFragment;
 import org.apache.impala.planner.Planner;
 import org.apache.impala.planner.ScanNode;
@@ -98,7 +103,6 @@ import org.apache.impala.thrift.TDdlExecRequest;
 import org.apache.impala.thrift.TDdlType;
 import org.apache.impala.thrift.TDescribeOutputStyle;
 import org.apache.impala.thrift.TDescribeResult;
-import org.apache.impala.thrift.TErrorCode;
 import org.apache.impala.thrift.TExecRequest;
 import org.apache.impala.thrift.TExplainResult;
 import org.apache.impala.thrift.TFinalizeParams;
@@ -120,7 +124,6 @@ import org.apache.impala.thrift.TResultSet;
 import org.apache.impala.thrift.TResultSetMetadata;
 import org.apache.impala.thrift.TShowFilesParams;
 import org.apache.impala.thrift.TShowStatsOp;
-import org.apache.impala.thrift.TStatus;
 import org.apache.impala.thrift.TStmtType;
 import org.apache.impala.thrift.TTableName;
 import org.apache.impala.thrift.TUpdateCatalogCacheRequest;
@@ -135,7 +138,6 @@ import org.apache.thrift.TException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicates;
 import com.google.common.collect.Lists;
@@ -149,16 +151,14 @@ import com.google.common.collect.Sets;
  */
 public class Frontend {
   private final static Logger LOG = LoggerFactory.getLogger(Frontend.class);
-  // Time to wait for missing tables to be loaded before timing out.
-  private final long MISSING_TBL_LOAD_WAIT_TIMEOUT_MS = 2 * 60 * 1000;
 
   // Max time to wait for a catalog update notification.
-  private final long MAX_CATALOG_UPDATE_WAIT_TIME_MS = 2 * 1000;
+  public static final long MAX_CATALOG_UPDATE_WAIT_TIME_MS = 2 * 1000;
 
   //TODO: Make the reload interval configurable.
   private static final int AUTHORIZATION_POLICY_RELOAD_INTERVAL_SECS = 5 * 60;
 
-  private AtomicReference<ImpaladCatalog> impaladCatalog_ =
+  private final AtomicReference<ImpaladCatalog> impaladCatalog_ =
       new AtomicReference<ImpaladCatalog>();
   private final AuthorizationConfig authzConfig_;
   private final AtomicReference<AuthorizationChecker> authzChecker_;
@@ -247,7 +247,7 @@ public class Frontend {
    * Constructs a TCatalogOpRequest and attaches it, plus any metadata, to the
    * result argument.
    */
-  private void createCatalogOpRequest(AnalysisContext.AnalysisResult analysis,
+  private void createCatalogOpRequest(AnalysisResult analysis,
       TExecRequest result) throws InternalException {
     TCatalogOpRequest ddl = new TCatalogOpRequest();
     TResultSetMetadata metadata = new TResultSetMetadata();
@@ -798,69 +798,6 @@ public class Frontend {
   }
 
   /**
-   * Given a set of table names, returns the set of table names that are missing
-   * metadata (are not yet loaded).
-   */
-  private Set<TableName> getMissingTbls(Set<TableName> tableNames) {
-    Set<TableName> missingTbls = new HashSet<TableName>();
-    for (TableName tblName: tableNames) {
-      Db db = getCatalog().getDb(tblName.getDb());
-      if (db == null) continue;
-      Table tbl = db.getTable(tblName.getTbl());
-      if (tbl == null) continue;
-      if (!tbl.isLoaded()) missingTbls.add(tblName);
-    }
-    return missingTbls;
-  }
-
-  /**
-   * Requests the catalog server load the given set of tables and waits until
-   * these tables show up in the local catalog, or the given timeout has been reached.
-   * The timeout is specified in milliseconds, with a value <= 0 indicating no timeout.
-   * The exact steps taken are:
-   * 1) Collect the tables that are missing (not yet loaded locally).
-   * 2) Make an RPC to the CatalogServer to prioritize the loading of these tables.
-   * 3) Wait until the local catalog contains all missing tables by (re)checking the
-   *    catalog each time a new catalog update is received.
-   *
-   * Returns true if all missing tables were received before timing out and false if
-   * the timeout was reached before all tables were received.
-   */
-  private boolean requestTblLoadAndWait(Set<TableName> requestedTbls, long timeoutMs)
-      throws InternalException {
-    Set<TableName> missingTbls = getMissingTbls(requestedTbls);
-    // There are no missing tables, return and avoid making an RPC to the CatalogServer.
-    if (missingTbls.isEmpty()) return true;
-
-    // Call into the CatalogServer and request the required tables be loaded.
-    LOG.info(String.format("Requesting prioritized load of table(s): %s",
-        Joiner.on(", ").join(missingTbls)));
-    TStatus status = FeSupport.PrioritizeLoad(missingTbls);
-    if (status.getStatus_code() != TErrorCode.OK) {
-      throw new InternalException("Error requesting prioritized load: " +
-          Joiner.on("\n").join(status.getError_msgs()));
-    }
-
-    long startTimeMs = System.currentTimeMillis();
-    // Wait until all the required tables are loaded in the Impalad's catalog cache.
-    while (!missingTbls.isEmpty()) {
-      // Check if the timeout has been reached.
-      if (timeoutMs > 0 && System.currentTimeMillis() - startTimeMs > timeoutMs) {
-        return false;
-      }
-
-      if (LOG.isTraceEnabled()) {
-        LOG.trace(String.format("Waiting for table(s) to complete loading: %s",
-            Joiner.on(", ").join(missingTbls)));
-      }
-      getCatalog().waitForCatalogUpdate(MAX_CATALOG_UPDATE_WAIT_TIME_MS);
-      missingTbls = getMissingTbls(missingTbls);
-      // TODO: Check for query cancellation here.
-    }
-    return true;
-  }
-
-  /**
    * Waits indefinitely for the local catalog to be ready. The catalog is "ready" after
    * the first catalog update is received from the statestore.
    *
@@ -883,78 +820,6 @@ public class Frontend {
   }
 
   /**
-   * Overload of requestTblLoadAndWait that uses the default timeout.
-   */
-  public boolean requestTblLoadAndWait(Set<TableName> requestedTbls)
-      throws InternalException {
-    return requestTblLoadAndWait(requestedTbls, MISSING_TBL_LOAD_WAIT_TIMEOUT_MS);
-  }
-
-  /**
-   * Analyzes the SQL statement included in queryCtx and returns the AnalysisResult.
-   * Authorizes all catalog object accesses and throws an AuthorizationException
-   * if the user does not have privileges to access one or more objects.
-   * If a statement fails analysis because table/view metadata was not loaded, an
-   * RPC to the CatalogServer will be executed to request loading the missing metadata
-   * and analysis will be restarted once the required tables have been loaded
-   * in the local Impalad Catalog or the MISSING_TBL_LOAD_WAIT_TIMEOUT_MS timeout
-   * is reached.
-   * The goal of this timeout is not to analysis, but to restart the analysis/missing
-   * table collection process. This helps ensure a statement never waits indefinitely
-   * for a table to be loaded in event the table metadata was invalidated.
-   * TODO: Also consider adding an overall timeout that fails analysis.
-   */
-  private AnalysisContext.AnalysisResult analyzeStmt(TQueryCtx queryCtx)
-      throws AnalysisException, InternalException, AuthorizationException {
-    Preconditions.checkState(getCatalog().isReady(),
-        "Local catalog has not been initialized. Aborting query analysis.");
-
-    AnalysisContext analysisCtx = new AnalysisContext(impaladCatalog_.get(), queryCtx,
-        authzConfig_);
-    LOG.info("Compiling query: " + queryCtx.client_request.stmt);
-
-    // Run analysis in a loop until it any of the following events occur:
-    // 1) Analysis completes successfully.
-    // 2) Analysis fails with an AnalysisException AND there are no missing tables.
-    // 3) Analysis fails with an AuthorizationException.
-    try {
-      while (true) {
-        // Ensure that catalog snapshot reflects any recent changes.
-        analysisCtx.setCatalog(impaladCatalog_.get());
-        try {
-          analysisCtx.analyze(queryCtx.client_request.stmt);
-          Preconditions.checkState(analysisCtx.getAnalyzer().getMissingTbls().isEmpty());
-          return analysisCtx.getAnalysisResult();
-        } catch (AnalysisException e) {
-          Set<TableName> missingTbls = analysisCtx.getAnalyzer().getMissingTbls();
-          // Only re-throw the AnalysisException if there were no missing tables.
-          if (missingTbls.isEmpty()) throw e;
-
-          // Record that analysis needs table metadata
-          analysisCtx.getTimeline().markEvent("Metadata load started");
-
-          // Some tables/views were missing, request and wait for them to load.
-          if (!requestTblLoadAndWait(missingTbls, MISSING_TBL_LOAD_WAIT_TIMEOUT_MS)) {
-            if (LOG.isWarnEnabled()) {
-              LOG.warn(String.format("Missing tables were not received in %dms. Load " +
-                  "request will be retried.", MISSING_TBL_LOAD_WAIT_TIMEOUT_MS));
-            }
-            analysisCtx.getTimeline().markEvent("Metadata load timeout");
-          } else {
-            analysisCtx.getTimeline().markEvent("Metadata load finished");
-          }
-        }
-      }
-    } finally {
-      // Authorize all accesses.
-      // AuthorizationExceptions must take precedence over any AnalysisException
-      // that has been thrown, so perform the authorization first.
-      analysisCtx.authorize(getAuthzChecker());
-      LOG.info("Compiled query.");
-    }
-  }
-
-  /**
    * Return a TPlanExecInfo corresponding to the plan with root fragment 'planRoot'.
    */
   private TPlanExecInfo createPlanExecInfo(PlanFragment planRoot, Planner planner,
@@ -1015,7 +880,7 @@ public class Frontend {
   private TQueryExecRequest createExecRequest(
       Planner planner, StringBuilder explainString) throws ImpalaException {
     TQueryCtx queryCtx = planner.getQueryCtx();
-    AnalysisContext.AnalysisResult analysisResult = planner.getAnalysisResult();
+    AnalysisResult analysisResult = planner.getAnalysisResult();
     boolean isMtExec = analysisResult.isQueryStmt()
         && queryCtx.client_request.query_options.isSetMt_dop()
         && queryCtx.client_request.query_options.mt_dop > 0;
@@ -1065,16 +930,40 @@ public class Frontend {
     return result;
   }
 
+  public StatementBase parse(String stmt) throws AnalysisException {
+    SqlScanner input = new SqlScanner(new StringReader(stmt));
+    SqlParser parser = new SqlParser(input);
+    try {
+      return (StatementBase) parser.parse().value;
+    } catch (Exception e) {
+      throw new AnalysisException(parser.getErrorMsg(stmt), e);
+    }
+  }
+
   /**
    * Create a populated TExecRequest corresponding to the supplied TQueryCtx.
    */
   public TExecRequest createExecRequest(TQueryCtx queryCtx, StringBuilder explainString)
       throws ImpalaException {
-    // Analyze the statement
-    AnalysisContext.AnalysisResult analysisResult = analyzeStmt(queryCtx);
-    EventSequence timeline = analysisResult.getTimeline();
+    // Timeline of important events in the planning process, used for debugging
+    // and profiling.
+    EventSequence timeline = new EventSequence("Query Compilation");
+    LOG.info("Analyzing query: " + queryCtx.client_request.stmt);
+
+    // Parse stmt and collect/load metadata to populate a stmt-local table cache
+    StatementBase stmt = parse(queryCtx.client_request.stmt);
+    StmtMetadataLoader metadataLoader =
+        new StmtMetadataLoader(this, queryCtx.session.database, timeline);
+    StmtTableCache stmtTableCache = metadataLoader.loadTables(stmt);
+
+    // Analyze and authorize stmt
+    AnalysisContext analysisCtx = new AnalysisContext(queryCtx, authzConfig_, timeline);
+    AnalysisResult analysisResult =
+        analysisCtx.analyzeAndAuthorize(stmt, stmtTableCache, authzChecker_.get());
+    LOG.info("Analysis finished.");
     timeline.markEvent("Analysis finished");
     Preconditions.checkNotNull(analysisResult.getStmt());
+
     TExecRequest result = new TExecRequest();
     result.setQuery_options(queryCtx.client_request.getQuery_options());
     result.setAccess_events(analysisResult.getAccessEvents());
@@ -1123,7 +1012,7 @@ public class Frontend {
         || analysisResult.isCreateTableAsSelectStmt() || analysisResult.isUpdateStmt()
         || analysisResult.isDeleteStmt());
 
-    Planner planner = new Planner(analysisResult, queryCtx);
+    Planner planner = new Planner(analysisResult, queryCtx, timeline);
     TQueryExecRequest queryExecRequest = createExecRequest(planner, explainString);
     queryCtx.setDesc_tbl(
         planner.getAnalysisResult().getAnalyzer().getDescTbl().toThrift());
@@ -1187,7 +1076,7 @@ public class Frontend {
     }
 
     timeline.markEvent("Planning finished");
-    result.setTimeline(analysisResult.getTimeline().toThrift());
+    result.setTimeline(timeline.toThrift());
     return result;
   }
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/service/MetadataOp.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/service/MetadataOp.java b/fe/src/main/java/org/apache/impala/service/MetadataOp.java
index c541cc5..f1cb077 100644
--- a/fe/src/main/java/org/apache/impala/service/MetadataOp.java
+++ b/fe/src/main/java/org/apache/impala/service/MetadataOp.java
@@ -22,11 +22,11 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.impala.analysis.StmtMetadataLoader;
 import org.apache.impala.analysis.TableName;
 import org.apache.impala.authorization.User;
+import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.Column;
 import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.Function;
@@ -34,7 +34,6 @@ import org.apache.impala.catalog.ImpaladCatalog;
 import org.apache.impala.catalog.PrimitiveType;
 import org.apache.impala.catalog.ScalarType;
 import org.apache.impala.catalog.Table;
-import org.apache.impala.catalog.TableLoadingException;
 import org.apache.impala.catalog.Type;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.thrift.TColumn;
@@ -43,6 +42,9 @@ import org.apache.impala.thrift.TResultRow;
 import org.apache.impala.thrift.TResultSet;
 import org.apache.impala.thrift.TResultSetMetadata;
 import org.apache.impala.util.PatternMatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
 
@@ -277,12 +279,7 @@ public class MetadataOp {
         List<String> tableComments = Lists.newArrayList();
         List<String> tableTypes = Lists.newArrayList();
         for (String tabName: fe.getTableNames(db.getName(), tablePatternMatcher, user)) {
-          Table table = null;
-          try {
-            table = catalog.getTable(db.getName(), tabName);
-          } catch (TableLoadingException e) {
-            // Ignore exception (this table will be skipped).
-          }
+          Table table = catalog.getTable(db.getName(), tabName);
           if (table == null) continue;
 
           String comment = null;
@@ -356,25 +353,24 @@ public class MetadataOp {
    * The parameters catalogName, schemaName, tableName and columnName are JDBC search
    * patterns.
    */
-  public static TResultSet getColumns(Frontend fe,
-      String catalogName, String schemaName, String tableName, String columnName,
-      User user)
+  public static TResultSet getColumns(Frontend fe, String catalogName, String schemaName,
+      String tableName, String columnName, User user)
       throws ImpalaException {
-    TResultSet result = createEmptyResultSet(GET_COLUMNS_MD);
-
     // Get the list of schemas, tables, and columns that satisfy the search conditions.
-    DbsMetadata dbsMetadata = null;
     PatternMatcher schemaMatcher = PatternMatcher.createJdbcPatternMatcher(schemaName);
     PatternMatcher tableMatcher = PatternMatcher.createJdbcPatternMatcher(tableName);
     PatternMatcher columnMatcher = PatternMatcher.createJdbcPatternMatcher(columnName);
-    while (dbsMetadata == null || !dbsMetadata.missingTbls.isEmpty()) {
-      dbsMetadata = getDbsMetadata(fe, catalogName, schemaMatcher, tableMatcher,
-          columnMatcher, PatternMatcher.MATCHER_MATCH_NONE, user);
-      if (!fe.requestTblLoadAndWait(dbsMetadata.missingTbls)) {
-        LOG.info("Timed out waiting for missing tables. Load request will be retried.");
-      }
+    DbsMetadata dbsMetadata = getDbsMetadata(fe, catalogName, schemaMatcher,
+        tableMatcher, columnMatcher, PatternMatcher.MATCHER_MATCH_NONE, user);
+    if (!dbsMetadata.missingTbls.isEmpty()) {
+      // Need to load tables for column metadata.
+      StmtMetadataLoader mdLoader = new StmtMetadataLoader(fe, Catalog.DEFAULT_DB, null);
+      mdLoader.loadTables(dbsMetadata.missingTbls);
+      dbsMetadata = getDbsMetadata(fe, catalogName, schemaMatcher,
+          tableMatcher, columnMatcher, PatternMatcher.MATCHER_MATCH_NONE, user);
     }
 
+    TResultSet result = createEmptyResultSet(GET_COLUMNS_MD);
     for (int i = 0; i < dbsMetadata.dbs.size(); ++i) {
       String dbName = dbsMetadata.dbs.get(i);
       for (int j = 0; j < dbsMetadata.tableNames.get(i).size(); ++j) {

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/util/EventSequence.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/util/EventSequence.java b/fe/src/main/java/org/apache/impala/util/EventSequence.java
index 8ce286a..92552d1 100644
--- a/fe/src/main/java/org/apache/impala/util/EventSequence.java
+++ b/fe/src/main/java/org/apache/impala/util/EventSequence.java
@@ -48,6 +48,9 @@ public class EventSequence {
     labels_.add(label);
   }
 
+  // For testing
+  public int getNumEvents() { return labels_.size(); }
+
   public TEventSequence toThrift() {
     TEventSequence ret = new TEventSequence();
     ret.timestamps = timestamps_;

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java
index 1fe11bc..ddb95f1 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java
@@ -19,14 +19,14 @@ package org.apache.impala.analysis;
 
 import java.util.HashSet;
 
-import org.junit.Test;
-
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.Role;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.testutil.TestUtils;
 import org.apache.impala.thrift.TQueryCtx;
+import org.apache.impala.util.EventSequence;
+import org.junit.Test;
 
 public class AnalyzeAuthStmtsTest extends AnalyzerTest {
   public AnalyzeAuthStmtsTest() throws AnalysisException {
@@ -35,18 +35,23 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
   }
 
   @Override
-  protected Analyzer createAnalyzer(String defaultDb) {
-    TQueryCtx queryCtx =
-        TestUtils.createQueryContext(defaultDb, System.getProperty("user.name"));
-    return new Analyzer(catalog_, queryCtx,
-        AuthorizationConfig.createHadoopGroupAuthConfig("server1", null, null));
+  protected AnalysisContext createAnalysisCtx(String defaultDb) {
+    TQueryCtx queryCtx = TestUtils.createQueryContext(
+        defaultDb, System.getProperty("user.name"));
+    EventSequence timeline = new EventSequence("Authorization Test");
+    AnalysisContext analysisCtx = new AnalysisContext(queryCtx,
+        AuthorizationConfig.createHadoopGroupAuthConfig("server1", null, null),
+        timeline);
+    return analysisCtx;
   }
 
-  private Analyzer createAuthDisabledAnalyzer(String defaultDb) {
-    TQueryCtx queryCtx =
-        TestUtils.createQueryContext(defaultDb, System.getProperty("user.name"));
-    return new Analyzer(catalog_, queryCtx,
-        AuthorizationConfig.createAuthDisabledConfig());
+  private AnalysisContext createAuthDisabledAnalysisCtx() {
+    TQueryCtx queryCtx = TestUtils.createQueryContext(
+        Catalog.DEFAULT_DB, System.getProperty("user.name"));
+    EventSequence timeline = new EventSequence("Authorization Test");
+    AnalysisContext analysisCtx = new AnalysisContext(queryCtx,
+        AuthorizationConfig.createAuthDisabledConfig(), timeline);
+    return analysisCtx;
   }
 
   @Test
@@ -55,12 +60,12 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
     AnalyzesOk("SHOW ROLE GRANT GROUP myGroup");
     AnalyzesOk("SHOW CURRENT ROLES");
 
-    Analyzer authDisabledAnalyzer = createAuthDisabledAnalyzer(Catalog.DEFAULT_DB);
-    AnalysisError("SHOW ROLES", authDisabledAnalyzer,
+    AnalysisContext authDisabledCtx = createAuthDisabledAnalysisCtx();
+    AnalysisError("SHOW ROLES", authDisabledCtx,
         "Authorization is not enabled.");
-    AnalysisError("SHOW ROLE GRANT GROUP myGroup", authDisabledAnalyzer,
+    AnalysisError("SHOW ROLE GRANT GROUP myGroup", authDisabledCtx,
         "Authorization is not enabled.");
-    AnalysisError("SHOW CURRENT ROLES", authDisabledAnalyzer,
+    AnalysisError("SHOW CURRENT ROLES", authDisabledCtx,
         "Authorization is not enabled.");
   }
 
@@ -76,10 +81,10 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
     AnalysisError("SHOW GRANT ROLE does_not_exist ON SERVER",
         "Role 'does_not_exist' does not exist.");
 
-    Analyzer authDisabledAnalyzer = createAuthDisabledAnalyzer(Catalog.DEFAULT_DB);
-    AnalysisError("SHOW GRANT ROLE myRole", authDisabledAnalyzer,
+    AnalysisContext authDisabledCtx = createAuthDisabledAnalysisCtx();
+    AnalysisError("SHOW GRANT ROLE myRole", authDisabledCtx,
         "Authorization is not enabled.");
-    AnalysisError("SHOW GRANT ROLE myRole ON SERVER", authDisabledAnalyzer,
+    AnalysisError("SHOW GRANT ROLE myRole ON SERVER", authDisabledCtx,
         "Authorization is not enabled.");
   }
 
@@ -95,10 +100,10 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
     AnalyzesOk("DROP ROLE MYrole");
     AnalysisError("CREATE ROLE MYrole", "Role 'MYrole' already exists.");
 
-    Analyzer authDisabledAnalyzer = createAuthDisabledAnalyzer(Catalog.DEFAULT_DB);
-    AnalysisError("DROP ROLE myRole", authDisabledAnalyzer,
+    AnalysisContext authDisabledCtx = createAuthDisabledAnalysisCtx();
+    AnalysisError("DROP ROLE myRole", authDisabledCtx,
         "Authorization is not enabled.");
-    AnalysisError("CREATE ROLE doesNotExist", authDisabledAnalyzer,
+    AnalysisError("CREATE ROLE doesNotExist", authDisabledCtx,
         "Authorization is not enabled.");
   }
 
@@ -111,10 +116,10 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
     AnalysisError("REVOKE ROLE doesNotExist FROM GROUP abc",
         "Role 'doesNotExist' does not exist.");
 
-    Analyzer authDisabledAnalyzer = createAuthDisabledAnalyzer(Catalog.DEFAULT_DB);
-    AnalysisError("GRANT ROLE myrole TO GROUP abc", authDisabledAnalyzer,
+    AnalysisContext authDisabledCtx = createAuthDisabledAnalysisCtx();
+    AnalysisError("GRANT ROLE myrole TO GROUP abc", authDisabledCtx,
         "Authorization is not enabled.");
-    AnalysisError("REVOKE ROLE myrole FROM GROUP abc", authDisabledAnalyzer,
+    AnalysisError("REVOKE ROLE myrole FROM GROUP abc", authDisabledCtx,
         "Authorization is not enabled.");
   }
 
@@ -126,7 +131,7 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
       if (isGrant) formatArgs = new String[] {"GRANT", "TO"};
       // ALL privileges
       AnalyzesOk(String.format("%s ALL ON TABLE alltypes %s myrole", formatArgs),
-          createAnalyzer("functional"));
+          createAnalysisCtx("functional"));
       AnalyzesOk(String.format("%s ALL ON TABLE functional.alltypes %s myrole",
           formatArgs));
       AnalyzesOk(String.format("%s ALL ON TABLE functional_kudu.alltypes %s myrole",
@@ -152,7 +157,7 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
 
       // INSERT privilege
       AnalyzesOk(String.format("%s INSERT ON TABLE alltypesagg %s myrole", formatArgs),
-          createAnalyzer("functional"));
+          createAnalysisCtx("functional"));
       AnalyzesOk(String.format(
           "%s INSERT ON TABLE functional_kudu.alltypessmall %s myrole", formatArgs));
       AnalyzesOk(String.format("%s INSERT ON TABLE functional.alltypesagg %s myrole",
@@ -167,7 +172,7 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
 
       // SELECT privilege
       AnalyzesOk(String.format("%s SELECT ON TABLE alltypessmall %s myrole", formatArgs),
-          createAnalyzer("functional"));
+          createAnalysisCtx("functional"));
       AnalyzesOk(String.format("%s SELECT ON TABLE functional.alltypessmall %s myrole",
           formatArgs));
       AnalyzesOk(String.format(
@@ -187,7 +192,7 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
           "%s myrole", formatArgs));
       // SELECT privilege on both regular and partition columns
       AnalyzesOk(String.format("%s SELECT (id, int_col, year, month) ON TABLE " +
-          "alltypes %s myrole", formatArgs), createAnalyzer("functional"));
+          "alltypes %s myrole", formatArgs), createAnalysisCtx("functional"));
       AnalyzesOk(String.format("%s SELECT (id, bool_col) ON TABLE " +
           "functional_kudu.alltypessmall %s myrole", formatArgs));
       // Empty column list
@@ -216,16 +221,20 @@ public class AnalyzeAuthStmtsTest extends AnalyzerTest {
           "exists and that you have permissions to issue a GRANT/REVOKE statement.");
     }
 
-    Analyzer authDisabledAnalyzer = createAuthDisabledAnalyzer(Catalog.DEFAULT_DB);
-    AnalysisError("GRANT ALL ON SERVER TO myRole", authDisabledAnalyzer,
+    AnalysisContext authDisabledCtx = createAuthDisabledAnalysisCtx();
+    AnalysisError("GRANT ALL ON SERVER TO myRole", authDisabledCtx,
         "Authorization is not enabled.");
-    AnalysisError("REVOKE ALL ON SERVER FROM myRole", authDisabledAnalyzer,
+    AnalysisError("REVOKE ALL ON SERVER FROM myRole", authDisabledCtx,
         "Authorization is not enabled.");
 
-    TQueryCtx queryCtxNoUsername = TestUtils.createQueryContext("default", "");
-    Analyzer noUsernameAnalyzer = new Analyzer(catalog_, queryCtxNoUsername,
-        AuthorizationConfig.createHadoopGroupAuthConfig("server1", null, null));
-    AnalysisError("GRANT ALL ON SERVER TO myRole", noUsernameAnalyzer,
+
+    TQueryCtx noUserNameQueryCtx = TestUtils.createQueryContext(
+        Catalog.DEFAULT_DB, "");
+    EventSequence timeline = new EventSequence("Authorization Test");
+    AnalysisContext noUserNameCtx = new AnalysisContext(noUserNameQueryCtx,
+        AuthorizationConfig.createHadoopGroupAuthConfig("server1", null, null),
+        timeline);
+    AnalysisError("GRANT ALL ON SERVER TO myRole", noUserNameCtx,
         "Cannot execute authorization statement with an empty username.");
   }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
----------------------------------------------------------------------
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
index 80c6916..ee4ed0e 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.impala.catalog.ArrayType;
-import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.CatalogException;
 import org.apache.impala.catalog.Column;
 import org.apache.impala.catalog.ColumnStats;
@@ -366,7 +365,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     // Cannot ALTER TABLE a nested collection.
     AnalysisError("alter table allcomplextypes.int_array_col " +
         "add columns (c1 string comment 'hi')",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "ALTER TABLE not allowed on a nested collection: allcomplextypes.int_array_col");
     // Cannot ALTER TABLE produced by a data source.
     AnalysisError("alter table functional.alltypes_datasource " +
@@ -405,7 +404,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
         "ALTER TABLE not allowed on a view: functional.alltypes_view");
     // Cannot ALTER TABLE a nested collection.
     AnalysisError("alter table allcomplextypes.int_array_col drop column int_col",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "ALTER TABLE not allowed on a nested collection: allcomplextypes.int_array_col");
     // Cannot ALTER TABLE produced by a data source.
     AnalysisError("alter table functional.alltypes_datasource drop column int_col",
@@ -457,7 +456,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     // Cannot ALTER TABLE a nested collection.
     AnalysisError("alter table allcomplextypes.int_array_col " +
         "change column int_col int_col2 int",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "ALTER TABLE not allowed on a nested collection: allcomplextypes.int_array_col");
     // Cannot ALTER TABLE produced by a data source.
     AnalysisError("alter table functional.alltypes_datasource " +
@@ -681,7 +680,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
         "ALTER TABLE not allowed on a view: functional.alltypes_view");
     // Cannot ALTER TABLE a nested collection.
     AnalysisError("alter table allcomplextypes.int_array_col set fileformat sequencefile",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "ALTER TABLE not allowed on a nested collection: allcomplextypes.int_array_col");
     // Cannot ALTER TABLE produced by a data source.
     AnalysisError("alter table functional.alltypes_datasource set fileformat parquet",
@@ -721,7 +720,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalysisError("alter table functional.view_view set cached in 'testPool'",
         "ALTER TABLE not allowed on a view: functional.view_view");
     AnalysisError("alter table allcomplextypes.int_array_col set cached in 'testPool'",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "ALTER TABLE not allowed on a nested collection: allcomplextypes.int_array_col");
 
     AnalysisError("alter table functional.alltypes set cached in 'badPool'",
@@ -841,7 +840,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalysisError(
         "alter table allcomplextypes.int_array_col " +
         "set column stats int_col ('numNulls'='2')",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "ALTER TABLE not allowed on a nested collection: allcomplextypes.int_array_col");
     // Cannot set column stats of partition columns.
     AnalysisError(
@@ -1024,7 +1023,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
         "ALTER TABLE not allowed on a view: functional.alltypes_view");
     // Cannot ALTER TABLE a nested collection.
     AnalysisError("alter table allcomplextypes.int_array_col rename to new_alltypes",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "Database does not exist: allcomplextypes");
 
     // It should be okay to rename an HBase table.
@@ -1046,7 +1045,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     AnalysisError("alter table functional.view_view recover partitions",
         "ALTER TABLE not allowed on a view: functional.view_view");
     AnalysisError("alter table allcomplextypes.int_array_col recover partitions",
-        createAnalyzer("functional"),
+        createAnalysisCtx("functional"),
         "ALTER TABLE not allowed on a nested collection: allcomplextypes.int_array_col");
     AnalysisError("alter table functional_hbase.alltypes recover partitions",
         "ALTER TABLE RECOVER PARTITIONS must target an HDFS table: " +
@@ -1176,21 +1175,21 @@ public class AnalyzeDDLTest extends FrontendTestBase {
   }
 
   ComputeStatsStmt checkComputeStatsStmt(String stmt) throws AnalysisException {
-    return checkComputeStatsStmt(stmt, createAnalyzer(Catalog.DEFAULT_DB));
+    return checkComputeStatsStmt(stmt, createAnalysisCtx());
   }
 
-  ComputeStatsStmt checkComputeStatsStmt(String stmt, Analyzer analyzer)
+  ComputeStatsStmt checkComputeStatsStmt(String stmt, AnalysisContext ctx)
       throws AnalysisException {
-    return checkComputeStatsStmt(stmt, analyzer, null);
+    return checkComputeStatsStmt(stmt, ctx, null);
   }
 
   /**
    * Analyzes 'stmt' and checks that the table-level and column-level SQL that is used
    * to compute the stats is valid. Returns the analyzed statement.
    */
-  ComputeStatsStmt checkComputeStatsStmt(String stmt, Analyzer analyzer,
+  ComputeStatsStmt checkComputeStatsStmt(String stmt, AnalysisContext ctx,
       String expectedWarning) throws AnalysisException {
-    ParseNode parseNode = AnalyzesOk(stmt, analyzer, expectedWarning);
+    ParseNode parseNode = AnalyzesOk(stmt, ctx, expectedWarning);
     assertTrue(parseNode instanceof ComputeStatsStmt);
     ComputeStatsStmt parsedStmt = (ComputeStatsStmt)parseNode;
     AnalyzesOk(parsedStmt.getTblStatsQuery());
@@ -1349,7 +1348,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
           queryOpts.compute_stats_min_sample_size == 1024 * 1024 * 1024);
       ComputeStatsStmt noSamplingStmt = checkComputeStatsStmt(
           "compute stats functional.alltypes tablesample system (10) repeatable(1)",
-          createAnalyzer(queryOpts),
+          createAnalysisCtx(queryOpts),
           "Ignoring TABLESAMPLE because the effective sampling rate is 100%");
       Assert.assertTrue(noSamplingStmt.getEffectiveSamplingPerc() == 1.0);
       String tblStatsQuery = noSamplingStmt.getTblStatsQuery().toUpperCase();
@@ -1362,10 +1361,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
       // No minimum sample bytes.
       queryOpts.setCompute_stats_min_sample_size(0);
       checkComputeStatsStmt("compute stats functional.alltypes tablesample system (10)",
-          createAnalyzer(queryOpts));
+          createAnalysisCtx(queryOpts));
       checkComputeStatsStmt(
           "compute stats functional.alltypes tablesample system (55) repeatable(1)",
-          createAnalyzer(queryOpts));
+          createAnalysisCtx(queryOpts));
 
       // Sample is adjusted based on the minimum sample bytes.
       // Assumes that functional.alltypes has 24 files of roughly 20KB each.
@@ -1374,7 +1373,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
       queryOpts.setCompute_stats_min_sample_size(0);
       ComputeStatsStmt baselineStmt = checkComputeStatsStmt(
           "compute stats functional.alltypes tablesample system (1) repeatable(1)",
-          createAnalyzer(queryOpts));
+          createAnalysisCtx(queryOpts));
       // Approximate validation of effective sampling rate.
       Assert.assertTrue(baselineStmt.getEffectiveSamplingPerc() > 0.03);
       Assert.assertTrue(baselineStmt.getEffectiveSamplingPerc() < 0.05);
@@ -1383,7 +1382,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
       queryOpts.setCompute_stats_min_sample_size(100 * 1024);
       ComputeStatsStmt adjustedStmt = checkComputeStatsStmt(
           "compute stats functional.alltypes tablesample system (1) repeatable(1)",
-          createAnalyzer(queryOpts));
+          createAnalysisCtx(queryOpts));
       // Approximate validation to avoid flakiness due to sampling and file size
       // changes. Expect a sample between 4 and 6 of the 24 total files.
       Assert.assertTrue(adjustedStmt.getEffectiveSamplingPerc() >= 4.0 / 24);
@@ -3592,7 +3591,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     addTestTable("create table ambig.ambig (ambig struct<ambig:array<int>>)");
     // Single element path can only be resolved as <table>.
     DescribeTableStmt describe = (DescribeTableStmt)AnalyzesOk("describe ambig",
-        createAnalyzer("ambig"));
+        createAnalysisCtx("ambig"));
     TDescribeTableParams tdesc = (TDescribeTableParams) describe.toThrift();
     Assert.assertTrue(tdesc.isSetTable_name());
     Assert.assertEquals("ambig", tdesc.table_name.getDb_name());
@@ -3600,14 +3599,14 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     Assert.assertFalse(tdesc.isSetResult_struct());
 
     // Path could be resolved as either <db>.<table> or <table>.<complex field>
-    AnalysisError("describe ambig.ambig", createAnalyzer("ambig"),
+    AnalysisError("describe ambig.ambig", createAnalysisCtx("ambig"),
         "Path is ambiguous: 'ambig.ambig'");
     // Path could be resolved as either <db>.<table>.<field> or <table>.<field>.<field>
-    AnalysisError("describe ambig.ambig.ambig", createAnalyzer("ambig"),
+    AnalysisError("describe ambig.ambig.ambig", createAnalysisCtx("ambig"),
         "Path is ambiguous: 'ambig.ambig.ambig'");
     // 4 element path can only be resolved to nested array.
     describe = (DescribeTableStmt) AnalyzesOk(
-        "describe ambig.ambig.ambig.ambig", createAnalyzer("ambig"));
+        "describe ambig.ambig.ambig.ambig", createAnalysisCtx("ambig"));
     tdesc = (TDescribeTableParams) describe.toThrift();
     Type expectedType =
         org.apache.impala.analysis.Path.getTypeAsStruct(new ArrayType(Type.INT));
@@ -3660,7 +3659,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
           partition),
           "SHOW FILES not applicable to a non hdfs table: functional.alltypes_view");
       AnalysisError(String.format("show files in allcomplextypes.int_array_col %s",
-          partition), createAnalyzer("functional"),
+          partition), createAnalysisCtx("functional"),
           "SHOW FILES not applicable to a non hdfs table: allcomplextypes.int_array_col");
     }
 


[3/3] impala git commit: IMPALA-5152: Introduce metadata loading phase

Posted by ta...@apache.org.
IMPALA-5152: Introduce metadata loading phase

Reworks the collection and loading of missing metadata
when compiling a statement. Introduces a new
metadata-loading phase between parsing and analysis.
Summary of the new compilation flow:
1. Parse statement.
2. Collect all table references from the parsed
   statement and generate a list of tables that need
   to be loaded for analysis to succeed.
3. Request missing metadata and wait for it to arrive.
   As views become loaded we expand the set of required
   tables based on the view definitions.
   This step populates a statement-local table cache
   that contains all loaded tables relevant to the
   statement.
4. Create a new Analyzer with the table cache and
   analyze the statement. During analysis only the
   table cache is consulted for table metadata, the
   ImpaladCatalog is not used for that purpose anymore.
5. Authorize the statement.
6. Plan generation as usual.

The intent of the existing code was to collect all tables
missing metadata during analysis, load the metadata, and then
re-analyze the statement (and repeat those steps until all
metadata is loaded).
Unfortunately, the relevant code was hard-to-follow, subtle
and not well tested, and therefore it was broken in several
ways over the course of time. For example, the introduction
of path analysis for nested types subtly broke the intended
behavior, and there are other similar examples.

The serial table loading observed in the JIRA was caused by the
following code in the resolution of table references:
for (all path interpretations) {
  try {
    // Try to resolve the path; might call getTable() which
    // throws for nonexistent tables.
  } catch (AnalysisException e) {
    if (analyzer.hasMissingTbls()) throw e;
  }
}

The following example illustrates the problem:
SELECT * FROM a.b, x.y
When resolving the path "a.b" we consider that "a" could be a
database or a table. Similarly, "b" could be a table or a
nested collection.
If the path resolution for "a.b" adds a missing table entry,
then the path resolution for "x.y" could exit prematurely,
without trying the other path interpretations that would
lead to adding the expected missing table. So effectively,
the tables end up being loaded one-by-one.

Testing:
- A core/hdfs run succeeded
- No new tests were added because the existing functional tests
  provide good coverage of various metadata loading scenarios.
- The issue reported in IMPALA-5152 is basically impossible now.
  Adding FE unit tests for that bug specifically would require
  ugly changes to the new code to enable such testing.

Change-Id: I68d32d5acd4a6f6bc6cedb05e6cc5cf604d24a55
Reviewed-on: http://gerrit.cloudera.org:8080/8958
Reviewed-by: Alex Behm <al...@cloudera.com>
Tested-by: Impala Public Jenkins
Reviewed-on: http://gerrit.cloudera.org:8080/9408
Reviewed-by: Tim Armstrong <ta...@cloudera.com>
Tested-by: Alex Behm <al...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/impala/repo
Commit: http://git-wip-us.apache.org/repos/asf/impala/commit/e0c09181
Tree: http://git-wip-us.apache.org/repos/asf/impala/tree/e0c09181
Diff: http://git-wip-us.apache.org/repos/asf/impala/diff/e0c09181

Branch: refs/heads/2.x
Commit: e0c09181f0abc9943a489f8be8a9f8f8c451fb78
Parents: d89db5b
Author: Alex Behm <al...@cloudera.com>
Authored: Fri Apr 7 09:58:40 2017 -0700
Committer: Alex Behm <al...@cloudera.com>
Committed: Fri Feb 23 17:17:04 2018 +0000

----------------------------------------------------------------------
 .../apache/impala/analysis/AlterTableStmt.java  |   7 +
 .../apache/impala/analysis/AnalysisContext.java | 170 ++++++-----
 .../org/apache/impala/analysis/Analyzer.java    | 133 ++++-----
 .../impala/analysis/AuthorizationStmt.java      |   1 +
 .../impala/analysis/ComputeStatsStmt.java       |   5 +
 .../apache/impala/analysis/CreateDbStmt.java    |   2 -
 .../impala/analysis/CreateFunctionStmtBase.java |   2 +-
 .../analysis/CreateOrAlterViewStmtBase.java     |   6 +
 .../analysis/CreateTableAsSelectStmt.java       |  23 +-
 .../impala/analysis/CreateTableLikeStmt.java    |  13 +-
 .../apache/impala/analysis/CreateTableStmt.java |   5 +
 .../apache/impala/analysis/DescribeDbStmt.java  |   1 +
 .../impala/analysis/DescribeTableStmt.java      |   7 +-
 .../apache/impala/analysis/DropStatsStmt.java   |  11 +-
 .../impala/analysis/DropTableOrViewStmt.java    |  13 +-
 .../org/apache/impala/analysis/FromClause.java  |  39 +--
 .../impala/analysis/GrantRevokePrivStmt.java    |   6 +
 .../org/apache/impala/analysis/InsertStmt.java  |  48 ++--
 .../apache/impala/analysis/LimitElement.java    |   7 +-
 .../apache/impala/analysis/LoadDataStmt.java    |   8 +-
 .../org/apache/impala/analysis/ModifyStmt.java  |  34 ++-
 .../java/org/apache/impala/analysis/Path.java   |  23 ++
 .../apache/impala/analysis/PrivilegeSpec.java   |   7 +-
 .../org/apache/impala/analysis/QueryStmt.java   |  35 ++-
 .../impala/analysis/ResetMetadataStmt.java      |   7 +
 .../org/apache/impala/analysis/SelectStmt.java  |  21 +-
 .../org/apache/impala/analysis/SetStmt.java     |   1 +
 .../impala/analysis/ShowCreateFunctionStmt.java |   4 +-
 .../impala/analysis/ShowCreateTableStmt.java    |  10 +-
 .../apache/impala/analysis/ShowFilesStmt.java   |  11 +-
 .../impala/analysis/ShowFunctionsStmt.java      |   1 +
 .../impala/analysis/ShowGrantRoleStmt.java      |   8 +
 .../apache/impala/analysis/ShowStatsStmt.java   |  12 +-
 .../apache/impala/analysis/ShowTablesStmt.java  |   1 +
 .../apache/impala/analysis/StatementBase.java   |   9 +-
 .../impala/analysis/StmtMetadataLoader.java     | 280 +++++++++++++++++++
 .../apache/impala/analysis/TruncateStmt.java    |   9 +-
 .../org/apache/impala/analysis/UnionStmt.java   |  43 ++-
 .../org/apache/impala/analysis/WithClause.java  |  40 ++-
 .../java/org/apache/impala/catalog/Catalog.java |  26 +-
 .../apache/impala/catalog/ImpaladCatalog.java   |  33 +--
 .../apache/impala/common/AnalysisException.java |   4 +
 .../apache/impala/common/ImpalaException.java   |   4 +
 .../java/org/apache/impala/planner/Planner.java |  17 +-
 .../apache/impala/planner/PlannerContext.java   |  14 +-
 .../impala/planner/SingleNodePlanner.java       |  11 +-
 .../impala/service/CatalogOpExecutor.java       |  10 +-
 .../org/apache/impala/service/FeSupport.java    |  13 +-
 .../org/apache/impala/service/Frontend.java     | 193 +++----------
 .../org/apache/impala/service/MetadataOp.java   |  38 ++-
 .../org/apache/impala/util/EventSequence.java   |   3 +
 .../impala/analysis/AnalyzeAuthStmtsTest.java   |  81 +++---
 .../apache/impala/analysis/AnalyzeDDLTest.java  |  47 ++--
 .../impala/analysis/AnalyzeExprsTest.java       |  42 ++-
 .../impala/analysis/AnalyzeModifyStmtsTest.java |  15 +-
 .../impala/analysis/AnalyzeStmtsTest.java       |  36 +--
 .../impala/analysis/AnalyzeSubqueriesTest.java  |   5 +-
 .../impala/analysis/AnalyzeUpsertStmtTest.java  |   6 +-
 .../apache/impala/analysis/AnalyzerTest.java    |   4 +-
 .../apache/impala/analysis/AuditingTest.java    |  27 +-
 .../impala/analysis/AuthorizationTest.java      | 173 +++++-------
 .../org/apache/impala/analysis/ExprNdvTest.java |  23 +-
 .../impala/analysis/ExprRewriteRulesTest.java   |  57 ++--
 .../impala/analysis/ExprRewriterTest.java       |  31 +-
 .../impala/analysis/StmtMetadataLoaderTest.java | 180 ++++++++++++
 .../org/apache/impala/analysis/ToSqlTest.java   |  23 +-
 .../apache/impala/common/FrontendTestBase.java  | 128 ++++++---
 .../org/apache/impala/service/FrontendTest.java |  18 +-
 .../impala/testutil/ImpaladTestCatalog.java     |  57 ++--
 69 files changed, 1452 insertions(+), 930 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java b/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java
index 6c7530a..75abaa7 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AlterTableStmt.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.DataSourceTable;
 import org.apache.impala.catalog.Table;
@@ -67,6 +69,11 @@ public abstract class AlterTableStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     // Resolve and analyze this table ref so we can evaluate partition predicates.
     TableRef tableRef = new TableRef(tableName_.toPath(), null, Privilege.ALTER);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java b/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java
index 5ad97eb..c886079 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java
@@ -17,12 +17,12 @@
 
 package org.apache.impala.analysis;
 
-import java.io.StringReader;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.impala.analysis.StmtMetadataLoader.StmtTableCache;
 import org.apache.impala.authorization.AuthorizationChecker;
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.authorization.AuthorizeableColumn;
@@ -34,12 +34,15 @@ import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.ImpaladCatalog;
 import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
+import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.common.Pair;
+import org.apache.impala.common.RuntimeEnv;
 import org.apache.impala.rewrite.ExprRewriter;
 import org.apache.impala.thrift.TAccessEvent;
 import org.apache.impala.thrift.TLineageGraph;
 import org.apache.impala.thrift.TQueryCtx;
+import org.apache.impala.thrift.TQueryOptions;
 import org.apache.impala.util.EventSequence;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -54,47 +57,39 @@ import com.google.common.collect.Maps;
  */
 public class AnalysisContext {
   private final static Logger LOG = LoggerFactory.getLogger(AnalysisContext.class);
-  private ImpaladCatalog catalog_;
   private final TQueryCtx queryCtx_;
   private final AuthorizationConfig authzConfig_;
-  private final ExprRewriter customRewriter_;
-
-  // Timeline of important events in the planning process, used for debugging
-  // and profiling
-  private final EventSequence timeline_ = new EventSequence("Planner Timeline");
+  private final EventSequence timeline_;
 
-  // Set in analyze()
+  // Set in analyzeAndAuthorize().
+  private ImpaladCatalog catalog_;
   private AnalysisResult analysisResult_;
 
-  public AnalysisContext(ImpaladCatalog catalog, TQueryCtx queryCtx,
-      AuthorizationConfig authzConfig) {
-    setCatalog(catalog);
+  // Use Hive's scheme for auto-generating column labels. Only used for testing.
+  private boolean useHiveColLabels_;
+
+  public AnalysisContext(TQueryCtx queryCtx, AuthorizationConfig authzConfig,
+      EventSequence timeline) {
     queryCtx_ = queryCtx;
     authzConfig_ = authzConfig;
-    customRewriter_ = null;
+    timeline_ = timeline;
   }
 
-  /**
-   * C'tor with a custom ExprRewriter for testing.
-   */
-  protected AnalysisContext(ImpaladCatalog catalog, TQueryCtx queryCtx,
-      AuthorizationConfig authzConfig, ExprRewriter rewriter) {
-    setCatalog(catalog);
-    queryCtx_ = queryCtx;
-    authzConfig_ = authzConfig;
-    customRewriter_ = rewriter;
+  public ImpaladCatalog getCatalog() { return catalog_; }
+  public TQueryCtx getQueryCtx() { return queryCtx_; }
+  public TQueryOptions getQueryOptions() {
+    return queryCtx_.client_request.query_options;
   }
+  public String getUser() { return queryCtx_.session.connected_user; }
 
-  // Catalog may change between analysis attempts (e.g. when missing tables are loaded).
-  public void setCatalog(ImpaladCatalog catalog) {
-    Preconditions.checkNotNull(catalog);
-    catalog_ = catalog;
+  public void setUseHiveColLabels(boolean b) {
+    Preconditions.checkState(RuntimeEnv.INSTANCE.isTestEnv());
+    useHiveColLabels_ = b;
   }
 
   static public class AnalysisResult {
     private StatementBase stmt_;
     private Analyzer analyzer_;
-    private EventSequence timeline_;
     private boolean userHasProfileAccess_ = true;
 
     public boolean isAlterTableStmt() { return stmt_ instanceof AlterTableStmt; }
@@ -184,6 +179,27 @@ public class AnalysisContext {
       return isInsertStmt();
     }
 
+    /**
+     * Returns true for statements that may produce several privilege requests of
+     * hierarchical nature, e.g., table/column.
+     */
+    public boolean isHierarchicalAuthStmt() {
+      return isQueryStmt() || isInsertStmt() || isUpdateStmt() || isDeleteStmt()
+          || isCreateTableAsSelectStmt() || isCreateViewStmt() || isAlterViewStmt();
+    }
+
+    /**
+     * Returns true for statements that may produce a single column-level privilege
+     * request without a request at the table level.
+     * Example: USE functional; ALTER TABLE allcomplextypes.int_array_col [...];
+     * The path 'allcomplextypes.int_array_col' table ref path resolves to
+     * a column, so a column-level privilege request is registered.
+     */
+    public boolean isSingleColumnPrivStmt() {
+      return isDescribeTableStmt() || isResetMetadataStmt() || isUseStmt()
+          || isShowTablesStmt() || isAlterTableStmt();
+    }
+
     public AlterTableStmt getAlterTableStmt() {
       Preconditions.checkState(isAlterTableStmt());
       return (AlterTableStmt) stmt_;
@@ -335,7 +351,6 @@ public class AnalysisContext {
 
     public StatementBase getStmt() { return stmt_; }
     public Analyzer getAnalyzer() { return analyzer_; }
-    public EventSequence getTimeline() { return timeline_; }
     public Set<TAccessEvent> getAccessEvents() { return analyzer_.getAccessEvents(); }
     public boolean requiresSubqueryRewrite() {
       return analyzer_.containsSubquery() && !(stmt_ instanceof CreateViewStmt)
@@ -352,46 +367,66 @@ public class AnalysisContext {
     public boolean userHasProfileAccess() { return userHasProfileAccess_; }
   }
 
+  public Analyzer createAnalyzer(StmtTableCache stmtTableCache) {
+    Analyzer result = new Analyzer(stmtTableCache, queryCtx_, authzConfig_);
+    result.setUseHiveColLabels(useHiveColLabels_);
+    return result;
+  }
+
   /**
-   * Parse and analyze 'stmt'. If 'stmt' is a nested query (i.e. query that
-   * contains subqueries), it is also rewritten by performing subquery unnesting.
-   * The transformed stmt is then re-analyzed in a new analysis context.
-   *
-   * The result of analysis can be retrieved by calling
-   * getAnalysisResult().
-   *
-   * @throws AnalysisException
-   *           On any other error, including parsing errors. Also thrown when any
-   *           missing tables are detected as a result of running analysis.
+   * Analyzes and authorizes the given statement using the provided table cache and
+   * authorization checker.
+   * AuthorizationExceptions take precedence over AnalysisExceptions so as not to
+   * reveal the existence/absence of objects the user is not authorized to see.
    */
-  public void analyze(String stmt) throws AnalysisException {
-    Analyzer analyzer = new Analyzer(catalog_, queryCtx_, authzConfig_);
-    analyze(stmt, analyzer);
+  public AnalysisResult analyzeAndAuthorize(StatementBase stmt,
+      StmtTableCache stmtTableCache, AuthorizationChecker authzChecker)
+      throws ImpalaException {
+    // TODO: Clean up the creation/setting of the analysis result.
+    analysisResult_ = new AnalysisResult();
+    analysisResult_.stmt_ = stmt;
+    catalog_ = stmtTableCache.catalog;
+
+    // Analyze statement and record exception.
+    AnalysisException analysisException = null;
+    try {
+      analyze(stmtTableCache);
+    } catch (AnalysisException e) {
+      analysisException = e;
+    }
+
+    // Authorize statement and record exception. Authorization relies on information
+    // collected during analysis.
+    AuthorizationException authException = null;
+    try {
+      authorize(authzChecker);
+    } catch (AuthorizationException e) {
+      authException = e;
+    }
+
+    // AuthorizationExceptions take precedence over AnalysisExceptions so as not
+    // to reveal the existence/absence of objects the user is not authorized to see.
+    if (authException != null) throw authException;
+    if (analysisException != null) throw analysisException;
+    return analysisResult_;
   }
 
   /**
-   * Parse and analyze 'stmt' using a specified Analyzer.
+   * Analyzes the statement set in 'analysisResult_' with a new Analyzer based on the
+   * given loaded tables. Performs expr and subquery rewrites which require re-analyzing
+   * the transformed statement.
    */
-  public void analyze(String stmt, Analyzer analyzer) throws AnalysisException {
-    SqlScanner input = new SqlScanner(new StringReader(stmt));
-    SqlParser parser = new SqlParser(input);
+  private void analyze(StmtTableCache stmtTableCache) throws AnalysisException {
+    Preconditions.checkNotNull(analysisResult_);
+    Preconditions.checkNotNull(analysisResult_.stmt_);
     try {
-      analysisResult_ = new AnalysisResult();
-      analysisResult_.analyzer_ = analyzer;
-      if (analysisResult_.analyzer_ == null) {
-        analysisResult_.analyzer_ = new Analyzer(catalog_, queryCtx_, authzConfig_);
-      }
-      analysisResult_.timeline_ = timeline_;
-      analysisResult_.stmt_ = (StatementBase) parser.parse().value;
-      if (analysisResult_.stmt_ == null) return;
-
+      analysisResult_.analyzer_ = createAnalyzer(stmtTableCache);
       analysisResult_.stmt_.analyze(analysisResult_.analyzer_);
       boolean isExplain = analysisResult_.isExplainStmt();
 
       // Apply expr and subquery rewrites.
       boolean reAnalyze = false;
-      ExprRewriter rewriter = (customRewriter_ != null) ? customRewriter_ :
-          analyzer.getExprRewriter();
+      ExprRewriter rewriter = analysisResult_.analyzer_.getExprRewriter();
       if (analysisResult_.requiresExprRewrite()) {
         rewriter.reset();
         analysisResult_.stmt_.rewriteExprs(rewriter);
@@ -414,7 +449,7 @@ public class AnalysisContext {
             Lists.newArrayList(analysisResult_.stmt_.getColLabels());
 
         // Re-analyze the stmt with a new analyzer.
-        analysisResult_.analyzer_ = new Analyzer(catalog_, queryCtx_, authzConfig_);
+        analysisResult_.analyzer_ = createAnalyzer(stmtTableCache);
         analysisResult_.stmt_.reset();
         analysisResult_.stmt_.analyze(analysisResult_.analyzer_);
 
@@ -430,8 +465,6 @@ public class AnalysisContext {
     } catch (AnalysisException e) {
       // Don't wrap AnalysisExceptions in another AnalysisException
       throw e;
-    } catch (Exception e) {
-      throw new AnalysisException(parser.getErrorMsg(stmt), e);
     }
   }
 
@@ -440,16 +473,15 @@ public class AnalysisContext {
    * analyze() must have already been called. Throws an AuthorizationException if the
    * user doesn't have sufficient privileges to run this statement.
    */
-  public void authorize(AuthorizationChecker authzChecker)
+  private void authorize(AuthorizationChecker authzChecker)
       throws AuthorizationException, InternalException {
     Preconditions.checkNotNull(analysisResult_);
     Analyzer analyzer = getAnalyzer();
-    // Process statements for which column-level privilege requests may be registered
-    // except for DESCRIBE TABLE, REFRESH/INVALIDATE, USE or SHOW TABLES statements.
-    if (analysisResult_.isQueryStmt() || analysisResult_.isInsertStmt() ||
-        analysisResult_.isUpdateStmt() || analysisResult_.isDeleteStmt() ||
-        analysisResult_.isCreateTableAsSelectStmt() ||
-        analysisResult_.isCreateViewStmt() || analysisResult_.isAlterViewStmt()) {
+    // Authorize statements that may produce several hierarchical privilege requests.
+    // Such a statement always has a corresponding table-level privilege request if it
+    // has column-level privilege request. The hierarchical nature requires special
+    // logic to process correctly and efficiently.
+    if (analysisResult_.isHierarchicalAuthStmt()) {
       // Map of table name to a list of privilege requests associated with that table.
       // These include both table-level and column-level privilege requests. We use a
       // LinkedHashMap to preserve the order in which requests are inserted.
@@ -492,10 +524,7 @@ public class AnalysisContext {
       for (PrivilegeRequest privReq: analyzer.getPrivilegeReqs()) {
         Preconditions.checkState(
             !(privReq.getAuthorizeable() instanceof AuthorizeableColumn) ||
-            analysisResult_.isDescribeTableStmt() ||
-            analysisResult_.isResetMetadataStmt() ||
-            analysisResult_.isUseStmt() ||
-            analysisResult_.isShowTablesStmt());
+            analysisResult_.isSingleColumnPrivStmt());
         authorizePrivilegeRequest(authzChecker, privReq);
       }
     }
@@ -603,7 +632,6 @@ public class AnalysisContext {
     return false;
   }
 
-  public AnalysisResult getAnalysisResult() { return analysisResult_; }
-  public Analyzer getAnalyzer() { return getAnalysisResult().getAnalyzer(); }
+  public Analyzer getAnalyzer() { return analysisResult_.getAnalyzer(); }
   public EventSequence getTimeline() { return timeline_; }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/Analyzer.java b/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
index 5e9c788..02bcf31 100644
--- a/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
+++ b/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
@@ -19,7 +19,6 @@ package org.apache.impala.analysis;
 
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.IdentityHashMap;
 import java.util.Iterator;
@@ -30,13 +29,13 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.impala.analysis.Path.PathType;
+import org.apache.impala.analysis.StmtMetadataLoader.StmtTableCache;
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.authorization.AuthorizeableTable;
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.authorization.PrivilegeRequest;
 import org.apache.impala.authorization.PrivilegeRequestBuilder;
 import org.apache.impala.authorization.User;
-import org.apache.impala.catalog.CatalogException;
 import org.apache.impala.catalog.Column;
 import org.apache.impala.catalog.DataSourceTable;
 import org.apache.impala.catalog.DatabaseNotFoundException;
@@ -44,6 +43,7 @@ import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.HBaseTable;
 import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.catalog.ImpaladCatalog;
+import org.apache.impala.catalog.IncompleteTable;
 import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.TableLoadingException;
@@ -57,7 +57,6 @@ import org.apache.impala.common.Pair;
 import org.apache.impala.common.RuntimeEnv;
 import org.apache.impala.planner.PlanNode;
 import org.apache.impala.rewrite.BetweenToCompoundRule;
-import org.apache.impala.rewrite.RemoveRedundantStringCast;
 import org.apache.impala.rewrite.EqualityDisjunctsToInRule;
 import org.apache.impala.rewrite.ExprRewriteRule;
 import org.apache.impala.rewrite.ExprRewriter;
@@ -66,6 +65,7 @@ import org.apache.impala.rewrite.FoldConstantsRule;
 import org.apache.impala.rewrite.NormalizeBinaryPredicatesRule;
 import org.apache.impala.rewrite.NormalizeCountStarRule;
 import org.apache.impala.rewrite.NormalizeExprsRule;
+import org.apache.impala.rewrite.RemoveRedundantStringCast;
 import org.apache.impala.rewrite.SimplifyConditionalsRule;
 import org.apache.impala.rewrite.SimplifyDistinctFromRule;
 import org.apache.impala.service.FeSupport;
@@ -192,9 +192,6 @@ public class Analyzer {
   // a tuple is outer/semi joined, etc. Remove the maps in favor of making
   // them properties of the tuple descriptor itself.
   private static class GlobalState {
-    // TODO: Consider adding an "exec-env"-like global singleton that contains the
-    // catalog and authzConfig.
-    public final ImpaladCatalog catalog;
     public final TQueryCtx queryCtx;
     public final AuthorizationConfig authzConfig;
     public final DescriptorTable descTbl = new DescriptorTable();
@@ -302,10 +299,8 @@ public class Analyzer {
     // Decreases the size of the scan range locations.
     private final ListMap<TNetworkAddress> hostIndex = new ListMap<TNetworkAddress>();
 
-    // The Impalad Catalog has the latest tables from the statestore. In order to use the
-    // same version of a table in a single query, we cache all referenced tables here.
-    // TODO: Investigate what to do with other catalog objects.
-    private final HashMap<TableName, Table> referencedTables_ = Maps.newHashMap();
+    // Cache of statement-relevant table metadata populated before analysis.
+    private final StmtTableCache stmtTableCache;
 
     // Expr rewriter for folding constants.
     private final ExprRewriter constantFolder_ =
@@ -314,9 +309,9 @@ public class Analyzer {
     // Expr rewriter for normalizing and rewriting expressions.
     private final ExprRewriter exprRewriter_;
 
-    public GlobalState(ImpaladCatalog catalog, TQueryCtx queryCtx,
+    public GlobalState(StmtTableCache stmtTableCache, TQueryCtx queryCtx,
         AuthorizationConfig authzConfig) {
-      this.catalog = catalog;
+      this.stmtTableCache = stmtTableCache;
       this.queryCtx = queryCtx;
       this.authzConfig = authzConfig;
       this.lineageGraph = new ColumnLineageGraph();
@@ -373,9 +368,6 @@ public class Analyzer {
   // that have a scalar type as destination (see registerSlotRef()).
   private final Map<String, SlotDescriptor> slotPathMap_ = Maps.newHashMap();
 
-  // Tracks the all tables/views found during analysis that were missing metadata.
-  private Set<TableName> missingTbls_ = new HashSet<TableName>();
-
   // Indicates whether this analyzer/block is guaranteed to have an empty result set
   // due to a limit 0 or constant conjunct evaluating to false.
   private boolean hasEmptyResultSet_ = false;
@@ -385,10 +377,10 @@ public class Analyzer {
   // conjunct evaluating to false.
   private boolean hasEmptySpjResultSet_ = false;
 
-  public Analyzer(ImpaladCatalog catalog, TQueryCtx queryCtx,
+  public Analyzer(StmtTableCache stmtTableCache, TQueryCtx queryCtx,
       AuthorizationConfig authzConfig) {
     ancestors_ = Lists.newArrayList();
-    globalState_ = new GlobalState(catalog, queryCtx, authzConfig);
+    globalState_ = new GlobalState(stmtTableCache, queryCtx, authzConfig);
     user_ = new User(TSessionStateUtil.getEffectiveUser(queryCtx.session));
   }
 
@@ -407,7 +399,6 @@ public class Analyzer {
     ancestors_ = Lists.newArrayList(parentAnalyzer);
     ancestors_.addAll(parentAnalyzer.ancestors_);
     globalState_ = globalState;
-    missingTbls_ = parentAnalyzer.missingTbls_;
     user_ = parentAnalyzer.getUser();
     useHiveColLabels_ = parentAnalyzer.useHiveColLabels_;
     authErrorMsg_ = parentAnalyzer.authErrorMsg_;
@@ -421,7 +412,7 @@ public class Analyzer {
    * global state.
    */
   public static Analyzer createWithNewGlobalState(Analyzer parentAnalyzer) {
-    GlobalState globalState = new GlobalState(parentAnalyzer.globalState_.catalog,
+    GlobalState globalState = new GlobalState(parentAnalyzer.globalState_.stmtTableCache,
         parentAnalyzer.getQueryCtx(), parentAnalyzer.getAuthzConfig());
     return new Analyzer(parentAnalyzer, globalState);
   }
@@ -437,8 +428,6 @@ public class Analyzer {
     visibleSemiJoinedTupleId_ = tid;
   }
 
-  public Set<TableName> getMissingTbls() { return missingTbls_; }
-  public boolean hasMissingTbls() { return !missingTbls_.isEmpty(); }
   public boolean hasAncestors() { return !ancestors_.isEmpty(); }
   public Analyzer getParentAnalyzer() {
     return hasAncestors() ? ancestors_.get(0) : null;
@@ -575,19 +564,17 @@ public class Analyzer {
     try {
       resolvedPath = resolvePath(tableRef.getPath(), PathType.TABLE_REF);
     } catch (AnalysisException e) {
-      if (!hasMissingTbls()) {
-        // Register privilege requests to prefer reporting an authorization error over
-        // an analysis error. We should not accidentally reveal the non-existence of a
-        // table/database if the user is not authorized.
-        if (rawPath.size() > 1) {
-          registerPrivReq(new PrivilegeRequestBuilder()
-              .onTable(rawPath.get(0), rawPath.get(1))
-              .allOf(tableRef.getPrivilege()).toRequest());
-        }
+      // Register privilege requests to prefer reporting an authorization error over
+      // an analysis error. We should not accidentally reveal the non-existence of a
+      // table/database if the user is not authorized.
+      if (rawPath.size() > 1) {
         registerPrivReq(new PrivilegeRequestBuilder()
-            .onTable(getDefaultDb(), rawPath.get(0))
+            .onTable(rawPath.get(0), rawPath.get(1))
             .allOf(tableRef.getPrivilege()).toRequest());
       }
+      registerPrivReq(new PrivilegeRequestBuilder()
+          .onTable(getDefaultDb(), rawPath.get(0))
+          .allOf(tableRef.getPrivilege()).toRequest());
       throw e;
     } catch (TableLoadingException e) {
       throw new AnalysisException(String.format(
@@ -780,16 +767,14 @@ public class Analyzer {
       candidates.clear();
 
       // Add paths rooted at a table with an unqualified and fully-qualified table name.
-      int end = Math.min(2, rawPath.size());
-      for (int tblNameIdx = 0; tblNameIdx < end; ++tblNameIdx) {
-        String dbName = (tblNameIdx == 0) ? getDefaultDb() : rawPath.get(0);
-        String tblName = rawPath.get(tblNameIdx);
+      List<TableName> candidateTbls = Path.getCandidateTables(rawPath, getDefaultDb());
+      for (int tblNameIdx = 0; tblNameIdx < candidateTbls.size(); ++tblNameIdx) {
+        TableName tblName = candidateTbls.get(tblNameIdx);
         Table tbl = null;
         try {
-          tbl = getTable(dbName, tblName);
+          tbl = getTable(tblName.getDb(), tblName.getTbl());
         } catch (AnalysisException e) {
-          if (hasMissingTbls()) throw e;
-          // Ignore other exceptions to allow path resolution to continue.
+          // Ignore to allow path resolution to continue.
         }
         if (tbl != null) {
           candidates.add(new Path(tbl, rawPath.subList(tblNameIdx + 1, rawPath.size())));
@@ -1318,7 +1303,8 @@ public class Analyzer {
   }
 
   public DescriptorTable getDescTbl() { return globalState_.descTbl; }
-  public ImpaladCatalog getCatalog() { return globalState_.catalog; }
+  public ImpaladCatalog getCatalog() { return globalState_.stmtTableCache.catalog; }
+  public StmtTableCache getStmtTableCache() { return globalState_.stmtTableCache; }
   public Set<String> getAliases() { return aliasMap_.keySet(); }
 
   /**
@@ -2361,64 +2347,47 @@ public class Analyzer {
   }
 
   /**
-   * Returns the Catalog Table object for the given database and table name. A table
-   * referenced for the first time is cached in globalState_.referencedTables_. The same
-   * table instance is returned for all subsequent references in the same query.
-   * Adds the table to this analyzer's "missingTbls_" and throws an AnalysisException if
-   * the table has not yet been loaded in the local catalog cache.
-   * Throws an AnalysisException if the table or the db does not exist in the Catalog.
-   * This function does not register authorization requests and does not log access events.
+   * Returns the Table for the given database and table name from the 'stmtTableCache'
+   * in the global analysis state.
+   * Throws an AnalysisException if the database or table does not exist.
+   * Throws a TableLoadingException if the registered table failed to load.
+   * Does not register authorization requests or access events.
    */
   public Table getTable(String dbName, String tableName)
       throws AnalysisException, TableLoadingException {
     TableName tblName = new TableName(dbName, tableName);
-    Table table = globalState_.referencedTables_.get(tblName);
-    if (table != null) {
-      // Return query-local version of table.
-      Preconditions.checkState(table.isLoaded());
-      return table;
-    }
-    try {
-      table = getCatalog().getTable(dbName, tableName);
-    } catch (DatabaseNotFoundException e) {
-      throw new AnalysisException(DB_DOES_NOT_EXIST_ERROR_MSG + dbName);
-    } catch (CatalogException e) {
-      String errMsg = String.format("Failed to load metadata for table: %s", tableName);
-      // We don't want to log all AnalysisExceptions as ERROR, only failures due to
-      // TableLoadingExceptions.
-      LOG.error(String.format("%s\n%s", errMsg, e.getMessage()));
-      if (e instanceof TableLoadingException) throw (TableLoadingException) e;
-      throw new TableLoadingException(errMsg, e);
-    }
+    Table table = globalState_.stmtTableCache.tables.get(tblName);
     if (table == null) {
-      throw new AnalysisException(
-          TBL_DOES_NOT_EXIST_ERROR_MSG + dbName + "." + tableName);
+      if (!globalState_.stmtTableCache.dbs.contains(tblName.getDb())) {
+        throw new AnalysisException(DB_DOES_NOT_EXIST_ERROR_MSG + tblName.getDb());
+      } else {
+        throw new AnalysisException(TBL_DOES_NOT_EXIST_ERROR_MSG + tblName.toString());
+      }
     }
-    if (!table.isLoaded()) {
-      missingTbls_.add(new TableName(table.getDb().getName(), table.getName()));
-      throw new AnalysisException(
-          "Table/view is missing metadata: " + table.getFullName());
+    Preconditions.checkState(table.isLoaded());
+    if (table instanceof IncompleteTable) {
+      // If there were problems loading this table's metadata, throw an exception
+      // when it is accessed.
+      ImpalaException cause = ((IncompleteTable) table).getCause();
+      if (cause instanceof TableLoadingException) throw (TableLoadingException) cause;
+      throw new TableLoadingException("Missing metadata for table: " + tableName, cause);
     }
-    globalState_.referencedTables_.put(tblName, table);
     return table;
   }
 
   /**
-   * Returns the Catalog Table object for the TableName.
-   * Adds the table to this analyzer's "missingTbls_" and throws an AnalysisException if
-   * the table has not yet been loaded in the local catalog cache.
-   * Throws an AnalysisException if the table or the db does not exist in the Catalog.
+   * Returns the Table with the given name from the 'loadedTables' map in the global
+   * analysis state. Throws an AnalysisException if the table or the db does not exist.
+   * Throws a TableLoadingException if the registered table failed to load.
    * Always registers a privilege request for the table at the given privilege level,
    * regardless of the state of the table (i.e. whether it exists, is loaded, etc.).
-   * If addAccessEvent is true, adds an access event if the catalog access succeeded.
+   * If addAccessEvent is true adds an access event for successfully loaded tables.
    */
   public Table getTable(TableName tableName, Privilege privilege, boolean addAccessEvent)
       throws AnalysisException, TableLoadingException {
     Preconditions.checkNotNull(tableName);
     Preconditions.checkNotNull(privilege);
-    Table table = null;
-    tableName = new TableName(getTargetDbName(tableName), tableName.getTbl());
-
+    tableName = getFqTableName(tableName);
     if (privilege == Privilege.ANY) {
       registerPrivReq(new PrivilegeRequestBuilder()
           .any().onAnyColumn(tableName.getDb(), tableName.getTbl()).toRequest());
@@ -2426,7 +2395,7 @@ public class Analyzer {
       registerPrivReq(new PrivilegeRequestBuilder()
           .allOf(privilege).onTable(tableName.getDb(), tableName.getTbl()).toRequest());
     }
-    table = getTable(tableName.getDb(), tableName.getTbl());
+    Table table = getTable(tableName.getDb(), tableName.getTbl());
     Preconditions.checkNotNull(table);
     if (addAccessEvent) {
       // Add an audit event for this access
@@ -2448,12 +2417,10 @@ public class Analyzer {
    */
   public Table getTable(TableName tableName, Privilege privilege)
       throws AnalysisException {
-    // This may trigger a metadata load, in which case we want to return the errors as
-    // AnalysisExceptions.
     try {
       return getTable(tableName, privilege, true);
     } catch (TableLoadingException e) {
-      throw new AnalysisException(e.getMessage(), e);
+      throw new AnalysisException(e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java b/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java
index 9bfe2e6..c0186b3 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AuthorizationStmt.java
@@ -19,6 +19,7 @@ package org.apache.impala.analysis;
 
 import org.apache.impala.authorization.User;
 import org.apache.impala.common.AnalysisException;
+
 import com.google.common.base.Strings;
 
 /**

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java b/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java
index 54daf7f..5df3dfa 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java
@@ -187,6 +187,11 @@ public class ComputeStatsStmt extends StatementBase {
     }
   }
 
+  @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
   /**
    * Returns a stmt for COMPUTE STATS. The optional 'sampleParams' indicates whether the
    * stats should be computed with table sampling.

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java b/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
index 9ca14f4..c803910 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateDbStmt.java
@@ -18,8 +18,6 @@
 package org.apache.impala.analysis;
 
 import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.Db;
 import org.apache.impala.common.AnalysisException;

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/CreateFunctionStmtBase.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateFunctionStmtBase.java b/fe/src/main/java/org/apache/impala/analysis/CreateFunctionStmtBase.java
index 211b3de..5d8101f 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateFunctionStmtBase.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateFunctionStmtBase.java
@@ -22,7 +22,6 @@ import java.util.HashMap;
 import java.util.List;
 
 import org.apache.hadoop.fs.permission.FsAction;
-
 import org.apache.impala.authorization.AuthorizeableFn;
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.authorization.PrivilegeRequest;
@@ -33,6 +32,7 @@ import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TCreateFunctionParams;
 import org.apache.impala.thrift.TFunctionBinaryType;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java b/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java
index 701964a..4310871 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java
@@ -80,6 +80,12 @@ public abstract class CreateOrAlterViewStmtBase extends StatementBase {
     this.viewDefStmt_ = viewDefStmt;
   }
 
+  @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+    viewDefStmt_.collectTableRefs(tblRefs);
+  }
+
   /**
    * Sets the originalViewDef and the expanded inlineViewDef based on viewDefStmt.
    * If columnDefs were given, checks that they do not contain duplicate column names

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java b/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
index e050d06..c5c092d 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
@@ -92,6 +92,12 @@ public class CreateTableAsSelectStmt extends StatementBase {
   public String toSql() { return ToSqlUtils.getCreateTableSql(this); }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    createStmt_.collectTableRefs(tblRefs);
+    insertStmt_.collectTableRefs(tblRefs);
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     if (isAnalyzed()) return;
     super.analyze(analyzer);
@@ -113,19 +119,14 @@ public class CreateTableAsSelectStmt extends StatementBase {
     // query portion of the insert statement. If this passes, analysis will be run
     // over the full INSERT statement. To avoid duplicate registrations of table/colRefs,
     // create a new root analyzer and clone the query statement for this initial pass.
-    Analyzer dummyRootAnalyzer = new Analyzer(analyzer.getCatalog(),
+    Analyzer dummyRootAnalyzer = new Analyzer(analyzer.getStmtTableCache(),
         analyzer.getQueryCtx(), analyzer.getAuthzConfig());
     QueryStmt tmpQueryStmt = insertStmt_.getQueryStmt().clone();
-    try {
-      Analyzer tmpAnalyzer = new Analyzer(dummyRootAnalyzer);
-      tmpAnalyzer.setUseHiveColLabels(true);
-      tmpQueryStmt.analyze(tmpAnalyzer);
-      // Subqueries need to be rewritten by the StmtRewriter first.
-      if (analyzer.containsSubquery()) return;
-    } finally {
-      // Record missing tables in the original analyzer.
-      analyzer.getMissingTbls().addAll(dummyRootAnalyzer.getMissingTbls());
-    }
+    Analyzer tmpAnalyzer = new Analyzer(dummyRootAnalyzer);
+    tmpAnalyzer.setUseHiveColLabels(true);
+    tmpQueryStmt.analyze(tmpAnalyzer);
+    // Subqueries need to be rewritten by the StmtRewriter first.
+    if (analyzer.containsSubquery()) return;
 
     // Add the columns from the partition clause to the create statement.
     if (partitionKeys_ != null) {

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java b/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java
index 4091ef4..041ab9e 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableLikeStmt.java
@@ -19,11 +19,7 @@ package org.apache.impala.analysis;
 
 import java.util.List;
 
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
 import org.apache.hadoop.fs.permission.FsAction;
-
-import org.apache.impala.analysis.TableDef;
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.Table;
@@ -34,6 +30,9 @@ import org.apache.impala.thrift.TCreateTableLikeParams;
 import org.apache.impala.thrift.THdfsFileFormat;
 import org.apache.impala.thrift.TTableName;
 
+import com.google.common.base.Joiner;
+import com.google.common.base.Preconditions;
+
 /**
  * Represents a CREATE TABLE LIKE statement which creates a new table based on
  * a copy of an existing table definition.
@@ -144,6 +143,12 @@ public class CreateTableLikeStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+    tblRefs.add(new TableRef(srcTableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     Preconditions.checkState(tableName_ != null && !tableName_.isEmpty());
     Preconditions.checkState(srcTableName_ != null && !srcTableName_.isEmpty());

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java b/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
index 2e5425a..ee020df 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
@@ -182,6 +182,11 @@ public class CreateTableStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableDef_.getTblName().toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     super.analyze(analyzer);
     owner_ = analyzer.getUser().getName();

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/DescribeDbStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/DescribeDbStmt.java b/fe/src/main/java/org/apache/impala/analysis/DescribeDbStmt.java
index c6ffc9e..e7895b1 100644
--- a/fe/src/main/java/org/apache/impala/analysis/DescribeDbStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/DescribeDbStmt.java
@@ -21,6 +21,7 @@ import org.apache.impala.authorization.Privilege;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TDescribeDbParams;
 import org.apache.impala.thrift.TDescribeOutputStyle;
+
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java b/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
index 6977f3b..90f8b07 100644
--- a/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/DescribeTableStmt.java
@@ -18,6 +18,7 @@
 package org.apache.impala.analysis;
 
 import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.impala.analysis.Path.PathType;
@@ -101,6 +102,11 @@ public class DescribeTableStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(rawPath_, null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     try {
       path_ = analyzer.resolvePath(rawPath_, PathType.ANY);
@@ -108,7 +114,6 @@ public class DescribeTableStmt extends StatementBase {
       // Register privilege requests to prefer reporting an authorization error over
       // an analysis error. We should not accidentally reveal the non-existence of a
       // table/database if the user is not authorized.
-      if (analyzer.hasMissingTbls()) throw ae;
       if (rawPath_.size() > 1) {
         analyzer.registerPrivReq(new PrivilegeRequestBuilder()
             .onTable(rawPath_.get(0), rawPath_.get(1))

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/DropStatsStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/DropStatsStmt.java b/fe/src/main/java/org/apache/impala/analysis/DropStatsStmt.java
index e39071f..9ac0a11 100644
--- a/fe/src/main/java/org/apache/impala/analysis/DropStatsStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/DropStatsStmt.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TDropStatsParams;
@@ -41,12 +43,12 @@ public class DropStatsStmt extends StatementBase {
    * Constructor for building the DROP TABLE/VIEW statement
    */
   public DropStatsStmt(TableName tableName) {
-    this.tableName_ = tableName;
+    this.tableName_ = Preconditions.checkNotNull(tableName);
     this.partitionSet_ = null;
   }
 
   public DropStatsStmt(TableName tableName, PartitionSet partitionSet) {
-    this.tableName_ = tableName;
+    this.tableName_ = Preconditions.checkNotNull(tableName);;
     this.partitionSet_ = partitionSet;
   }
 
@@ -75,6 +77,11 @@ public class DropStatsStmt extends StatementBase {
     return params;
   }
 
+  @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
   /**
    * Checks that the given table exists and the user has privileges
    * to drop stats on this table.

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/DropTableOrViewStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/DropTableOrViewStmt.java b/fe/src/main/java/org/apache/impala/analysis/DropTableOrViewStmt.java
index fc9db4d..72769d0 100644
--- a/fe/src/main/java/org/apache/impala/analysis/DropTableOrViewStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/DropTableOrViewStmt.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.TableLoadingException;
@@ -26,6 +28,7 @@ import org.apache.impala.thrift.TAccessEvent;
 import org.apache.impala.thrift.TCatalogObjectType;
 import org.apache.impala.thrift.TDropTableOrViewParams;
 import org.apache.impala.thrift.TTableName;
+
 import com.google.common.base.Preconditions;
 
 /**
@@ -50,7 +53,7 @@ public class DropTableOrViewStmt extends StatementBase {
    */
   public DropTableOrViewStmt(TableName tableName, boolean ifExists,
       boolean dropTable, boolean purgeTable) {
-    tableName_ = tableName;
+    tableName_ = Preconditions.checkNotNull(tableName);
     ifExists_ = ifExists;
     dropTable_ = dropTable;
     purgeTable_ = purgeTable;
@@ -77,6 +80,11 @@ public class DropTableOrViewStmt extends StatementBase {
     return params;
   }
 
+  @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
   /**
    * 1. Checks that the user has privileges to DROP the given table/view
    * 2. Checks that the database and table exists
@@ -108,8 +116,7 @@ public class DropTableOrViewStmt extends StatementBase {
           analyzer.getFqTableName(tableName_).toString(), TCatalogObjectType.TABLE,
           Privilege.DROP.toString()));
     } catch (AnalysisException e) {
-      if (ifExists_ && analyzer.getMissingTbls().isEmpty()) return;
-      throw e;
+      if (!ifExists_) throw e;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/FromClause.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/FromClause.java b/fe/src/main/java/org/apache/impala/analysis/FromClause.java
index 0526ca9..294c324 100644
--- a/fe/src/main/java/org/apache/impala/analysis/FromClause.java
+++ b/fe/src/main/java/org/apache/impala/analysis/FromClause.java
@@ -22,6 +22,7 @@ import java.util.Iterator;
 import java.util.List;
 
 import org.apache.impala.common.AnalysisException;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
@@ -51,35 +52,39 @@ public class FromClause implements ParseNode, Iterable<TableRef> {
   public void analyze(Analyzer analyzer) throws AnalysisException {
     if (analyzed_) return;
 
-    if (tableRefs_.isEmpty()) {
-      analyzed_ = true;
-      return;
-    }
-
-    // Start out with table refs to establish aliases.
     TableRef leftTblRef = null;  // the one to the left of tblRef
     for (int i = 0; i < tableRefs_.size(); ++i) {
-      // Resolve and replace non-InlineViewRef table refs with a BaseTableRef or ViewRef.
       TableRef tblRef = tableRefs_.get(i);
+      // Replace non-InlineViewRef table refs with a BaseTableRef or ViewRef.
       tblRef = analyzer.resolveTableRef(tblRef);
       tableRefs_.set(i, Preconditions.checkNotNull(tblRef));
       tblRef.setLeftTblRef(leftTblRef);
-      try {
-        tblRef.analyze(analyzer);
-      } catch (AnalysisException e) {
-        // Only re-throw the exception if no tables are missing.
-        if (analyzer.getMissingTbls().isEmpty()) throw e;
-      }
+      tblRef.analyze(analyzer);
       leftTblRef = tblRef;
     }
+    analyzed_ = true;
+  }
 
-    // All tableRefs have been analyzed, but at least one table is missing metadata.
-    if (!analyzer.getMissingTbls().isEmpty()) {
-      throw new AnalysisException("Found missing tables. Aborting analysis.");
+  public void collectFromClauseTableRefs(List<TableRef> tblRefs) {
+    collectTableRefs(tblRefs, true);
+  }
+
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    collectTableRefs(tblRefs, false);
+  }
+
+  private void collectTableRefs(List<TableRef> tblRefs, boolean fromClauseOnly) {
+    for (TableRef tblRef: tableRefs_) {
+      if (tblRef instanceof InlineViewRef) {
+        InlineViewRef inlineViewRef = (InlineViewRef) tblRef;
+        inlineViewRef.getViewStmt().collectTableRefs(tblRefs, fromClauseOnly);
+      } else {
+        tblRefs.add(tblRef);
+      }
     }
-    analyzed_ = true;
   }
 
+  @Override
   public FromClause clone() {
     ArrayList<TableRef> clone = Lists.newArrayList();
     for (TableRef tblRef: tableRefs_) clone.add(tblRef.clone());

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/GrantRevokePrivStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/GrantRevokePrivStmt.java b/fe/src/main/java/org/apache/impala/analysis/GrantRevokePrivStmt.java
index d2c2040..9ded066 100644
--- a/fe/src/main/java/org/apache/impala/analysis/GrantRevokePrivStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/GrantRevokePrivStmt.java
@@ -23,6 +23,7 @@ import org.apache.impala.catalog.Role;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TGrantRevokePrivParams;
 import org.apache.impala.thrift.TPrivilege;
+
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 
@@ -79,6 +80,11 @@ public class GrantRevokePrivStmt extends AuthorizationStmt {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    if (privilegeSpec_ != null) privilegeSpec_.collectTableRefs(tblRefs);
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     super.analyze(analyzer);
     if (Strings.isNullOrEmpty(roleName_)) {

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/InsertStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/InsertStmt.java b/fe/src/main/java/org/apache/impala/analysis/InsertStmt.java
index 462728a..4f8d44b 100644
--- a/fe/src/main/java/org/apache/impala/analysis/InsertStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/InsertStmt.java
@@ -20,7 +20,6 @@ package org.apache.impala.analysis;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 import org.apache.impala.authorization.Privilege;
@@ -38,12 +37,9 @@ import org.apache.impala.common.FileSystemUtil;
 import org.apache.impala.planner.DataSink;
 import org.apache.impala.planner.TableSink;
 import org.apache.impala.rewrite.ExprRewriter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
@@ -129,7 +125,7 @@ public class InsertStmt extends StatementBase {
   // For Kudu tables, the primary keys are a leading subset of the cols, and the partition
   // cols can be any subset of the primary keys, meaning that this list will be in
   // ascending order from '0' to '# primary key cols - 1' but may leave out some numbers.
-  private List<Integer> partitionColPos_ = Lists.newArrayList();
+  private final List<Integer> partitionColPos_ = Lists.newArrayList();
 
   // Indicates whether this insert stmt has a shuffle or noshuffle plan hint.
   // Both flags may be false. Only one of them may be true, not both.
@@ -267,27 +263,17 @@ public class InsertStmt extends StatementBase {
   public void analyze(Analyzer analyzer) throws AnalysisException {
     if (isAnalyzed()) return;
     super.analyze(analyzer);
-    try {
-      if (withClause_ != null) withClause_.analyze(analyzer);
-    } catch (AnalysisException e) {
-      // Ignore AnalysisExceptions if tables are missing to ensure the maximum number
-      // of missing tables can be collected before failing analyze().
-      if (analyzer.getMissingTbls().isEmpty()) throw e;
-    }
+    if (withClause_ != null) withClause_.analyze(analyzer);
 
     List<Expr> selectListExprs = null;
     if (!needsGeneratedQueryStatement_) {
-      try {
-        // Use a child analyzer for the query stmt to properly scope WITH-clause
-        // views and to ignore irrelevant ORDER BYs.
-        Analyzer queryStmtAnalyzer = new Analyzer(analyzer);
-        queryStmt_.analyze(queryStmtAnalyzer);
-        // Use getResultExprs() and not getBaseTblResultExprs() here because the final
-        // substitution with TupleIsNullPredicate() wrapping happens in planning.
-        selectListExprs = Expr.cloneList(queryStmt_.getResultExprs());
-      } catch (AnalysisException e) {
-        if (analyzer.getMissingTbls().isEmpty()) throw e;
-      }
+      // Use a child analyzer for the query stmt to properly scope WITH-clause
+      // views and to ignore irrelevant ORDER BYs.
+      Analyzer queryStmtAnalyzer = new Analyzer(analyzer);
+      queryStmt_.analyze(queryStmtAnalyzer);
+      // Use getResultExprs() and not getBaseTblResultExprs() here because the final
+      // substitution with TupleIsNullPredicate() wrapping happens in planning.
+      selectListExprs = Expr.cloneList(queryStmt_.getResultExprs());
     } else {
       selectListExprs = Lists.newArrayList();
     }
@@ -296,11 +282,6 @@ public class InsertStmt extends StatementBase {
     // Also checks if the target table is missing.
     analyzeTargetTable(analyzer);
 
-    // Abort analysis if there are any missing tables beyond this point.
-    if (!analyzer.getMissingTbls().isEmpty()) {
-      throw new AnalysisException("Found missing tables. Aborting analysis.");
-    }
-
     boolean isHBaseTable = (table_ instanceof HBaseTable);
     int numClusteringCols = isHBaseTable ? 0 : table_.getNumClusteringCols();
 
@@ -941,4 +922,15 @@ public class InsertStmt extends StatementBase {
     }
     return strBuilder.toString();
   }
+
+  @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    if (withClause_ != null) {
+      for (View v: withClause_.getViews()) {
+        v.getQueryStmt().collectTableRefs(tblRefs);
+      }
+    }
+    tblRefs.add(new TableRef(targetTableName_.toPath(), null));
+    if (queryStmt_ != null) queryStmt_.collectTableRefs(tblRefs);
+  }
 }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/LimitElement.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/LimitElement.java b/fe/src/main/java/org/apache/impala/analysis/LimitElement.java
index d357b07..ab5dac1 100644
--- a/fe/src/main/java/org/apache/impala/analysis/LimitElement.java
+++ b/fe/src/main/java/org/apache/impala/analysis/LimitElement.java
@@ -21,6 +21,7 @@ import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.service.FeSupport;
 import org.apache.impala.thrift.TColumnValue;
+
 import com.google.common.base.Preconditions;
 
 /**
@@ -118,9 +119,9 @@ class LimitElement {
    */
   private static long evalIntegerExpr(Analyzer analyzer, Expr expr, String name)
       throws AnalysisException {
-    // Check for slotrefs before analysis so we can provide a more helpful message than
-    // "Could not resolve column/field reference".
-    if (expr.contains(SlotRef.class)) {
+    // Check for slotrefs and subqueries before analysis so we can provide a more
+    // helpful error message.
+    if (expr.contains(SlotRef.class) || expr.contains(Subquery.class)) {
       throw new AnalysisException(name + " expression must be a constant expression: " +
           expr.toSql());
     }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/LoadDataStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/LoadDataStmt.java b/fe/src/main/java/org/apache/impala/analysis/LoadDataStmt.java
index ec0244d..114862e 100644
--- a/fe/src/main/java/org/apache/impala/analysis/LoadDataStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/LoadDataStmt.java
@@ -19,13 +19,14 @@ package org.apache.impala.analysis;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.List;
 
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.adl.AdlFileSystem;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.s3a.S3AFileSystem;
-import org.apache.hadoop.fs.adl.AdlFileSystem;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.HdfsFileFormat;
@@ -99,6 +100,11 @@ public class LoadDataStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     dbName_ = analyzer.getTargetDbName(tableName_);
     Table table = analyzer.getTable(tableName_, Privilege.INSERT);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java b/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java
index 5bac75c..32b4a1d 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java
@@ -33,7 +33,6 @@ import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.Pair;
 import org.apache.impala.planner.DataSink;
 import org.apache.impala.rewrite.ExprRewriter;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
@@ -98,6 +97,20 @@ public abstract class ModifyStmt extends StatementBase {
     wherePredicate_ = wherePredicate;
   }
 
+  @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(targetTablePath_, null));
+    fromClause_.collectTableRefs(tblRefs);
+    if (wherePredicate_ != null) {
+      // Collect TableRefs in WHERE-clause subqueries.
+      List<Subquery> subqueries = Lists.newArrayList();
+      wherePredicate_.collect(Subquery.class, subqueries);
+      for (Subquery sq : subqueries) {
+        sq.getStatement().collectTableRefs(tblRefs);
+      }
+    }
+  }
+
   /**
    * The analysis of the ModifyStmt proceeds as follows: First, the FROM clause is
    * analyzed and the targetTablePath is verified to be a valid alias into the FROM
@@ -232,12 +245,18 @@ public abstract class ModifyStmt extends StatementBase {
 
     // Assignments are only used in the context of updates.
     for (Pair<SlotRef, Expr> valueAssignment : assignments_) {
-      Expr rhsExpr = valueAssignment.second;
-      rhsExpr.analyze(analyzer);
-
       SlotRef lhsSlotRef = valueAssignment.first;
       lhsSlotRef.analyze(analyzer);
 
+      Expr rhsExpr = valueAssignment.second;
+      // No subqueries for rhs expression
+      if (rhsExpr.contains(Subquery.class)) {
+        throw new AnalysisException(
+            format("Subqueries are not supported as update expressions for column '%s'",
+                lhsSlotRef.toSql()));
+      }
+      rhsExpr.analyze(analyzer);
+
       // Correct target table
       if (!lhsSlotRef.isBoundByTupleIds(targetTableRef_.getId().asList())) {
         throw new AnalysisException(
@@ -246,13 +265,6 @@ public abstract class ModifyStmt extends StatementBase {
                 rhsExpr.toSql(), targetTableRef_.getDesc().getTable().getFullName()));
       }
 
-      // No subqueries for rhs expression
-      if (rhsExpr.contains(Subquery.class)) {
-        throw new AnalysisException(
-            format("Subqueries are not supported as update expressions for column '%s'",
-                lhsSlotRef.toSql()));
-      }
-
       Column c = lhsSlotRef.getResolvedPath().destColumn();
       // TODO(Kudu) Add test for this code-path when Kudu supports nested types
       if (c == null) {

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/Path.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/Path.java b/fe/src/main/java/org/apache/impala/analysis/Path.java
index 32ba3d5..9eb57a0 100644
--- a/fe/src/main/java/org/apache/impala/analysis/Path.java
+++ b/fe/src/main/java/org/apache/impala/analysis/Path.java
@@ -27,6 +27,7 @@ import org.apache.impala.catalog.StructField;
 import org.apache.impala.catalog.StructType;
 import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.Type;
+
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -270,6 +271,28 @@ public class Path {
     }
   }
 
+  /**
+   * Returns a list of table names that might be referenced by the given path.
+   * The path must be non-empty.
+   *
+   * Examples: path -> result
+   * a -> [<sessionDb>.a]
+   * a.b -> [<sessionDb>.a, a.b]
+   * a.b.c -> [<sessionDb>.a, a.b]
+   * a.b.c... -> [<sessionDb>.a, a.b]
+   */
+  public static List<TableName> getCandidateTables(List<String> path, String sessionDb) {
+    Preconditions.checkArgument(path != null && !path.isEmpty());
+    List<TableName> result = Lists.newArrayList();
+    int end = Math.min(2, path.size());
+    for (int tblNameIdx = 0; tblNameIdx < end; ++tblNameIdx) {
+      String dbName = (tblNameIdx == 0) ? sessionDb : path.get(0);
+      String tblName = path.get(tblNameIdx);
+      result.add(new TableName(dbName, tblName));
+    }
+    return result;
+  }
+
   public Table getRootTable() { return rootTable_; }
   public TupleDescriptor getRootDesc() { return rootDesc_; }
   public boolean isRootedAtTable() { return rootTable_ != null; }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java b/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java
index 9e8731a..a21af93 100644
--- a/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java
+++ b/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java
@@ -21,7 +21,6 @@ import java.util.List;
 
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.DataSourceTable;
-import org.apache.impala.catalog.KuduTable;
 import org.apache.impala.catalog.RolePrivilege;
 import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.TableLoadingException;
@@ -30,6 +29,7 @@ import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TPrivilege;
 import org.apache.impala.thrift.TPrivilegeLevel;
 import org.apache.impala.thrift.TPrivilegeScope;
+
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
@@ -171,6 +171,10 @@ public class PrivilegeSpec implements ParseNode {
     return sb.toString();
   }
 
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    if (tableName_ != null) tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     String configServerName = analyzer.getAuthzConfig().getServerName();
@@ -277,7 +281,6 @@ public class PrivilegeSpec implements ParseNode {
     } catch (TableLoadingException e) {
       throw new AnalysisException(e.getMessage(), e);
     } catch (AnalysisException e) {
-      if (analyzer.hasMissingTbls()) throw e;
       throw new AnalysisException(String.format("Error setting privileges for " +
           "table '%s'. Verify that the table exists and that you have permissions " +
           "to issue a GRANT/REVOKE statement.", tableName_.toString()));

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/QueryStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/QueryStmt.java b/fe/src/main/java/org/apache/impala/analysis/QueryStmt.java
index 6f21a33..3f2eec5 100644
--- a/fe/src/main/java/org/apache/impala/analysis/QueryStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/QueryStmt.java
@@ -23,6 +23,7 @@ import java.util.ListIterator;
 import java.util.Set;
 
 import org.apache.impala.catalog.Type;
+import org.apache.impala.catalog.View;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.planner.DataSink;
 import org.apache.impala.planner.PlanRootSink;
@@ -95,6 +96,32 @@ public abstract class QueryStmt extends StatementBase {
     ambiguousAliasList_ = Lists.newArrayList();
   }
 
+  /**
+  * Returns all table references in the FROM clause of this statement and all statements
+  * nested within FROM clauses.
+  */
+  public void collectFromClauseTableRefs(List<TableRef> tblRefs) {
+    collectTableRefs(tblRefs, true);
+  }
+
+  @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    collectTableRefs(tblRefs, false);
+  }
+
+  /**
+   * Helper for collectFromClauseTableRefs() and collectTableRefs().
+   * If 'fromClauseOnly' is true only collects table references in the FROM clause,
+   * otherwise all table references.
+   */
+  protected void collectTableRefs(List<TableRef> tblRefs, boolean fromClauseOnly) {
+    if (!fromClauseOnly && withClause_ != null) {
+      for (View v: withClause_.getViews()) {
+        v.getQueryStmt().collectTableRefs(tblRefs, fromClauseOnly);
+      }
+    }
+  }
+
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     if (isAnalyzed()) return;
@@ -132,7 +159,7 @@ public abstract class QueryStmt extends StatementBase {
     Set<TupleId> tblRefIds = Sets.newHashSet();
 
     List<TableRef> tblRefs = Lists.newArrayList();
-    collectTableRefs(tblRefs);
+    collectTableRefs(tblRefs, true);
     for (TableRef tblRef: tblRefs) {
       if (absoluteRef == null && !tblRef.isRelative()) absoluteRef = tblRef;
       if (tblRef.isCorrelated()) {
@@ -338,12 +365,6 @@ public abstract class QueryStmt extends StatementBase {
    */
   public abstract void getMaterializedTupleIds(ArrayList<TupleId> tupleIdList);
 
-  /**
-   * Returns all physical (non-inline-view) TableRefs of this statement and the nested
-   * statements of inline views. The returned TableRefs are in depth-first order.
-   */
-  public abstract void collectTableRefs(List<TableRef> tblRefs);
-
   @Override
   public List<Expr> getResultExprs() { return resultExprs_; }
 

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ResetMetadataStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ResetMetadataStmt.java b/fe/src/main/java/org/apache/impala/analysis/ResetMetadataStmt.java
index ac7ca2e..1fd1e7c 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ResetMetadataStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ResetMetadataStmt.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.authorization.PrivilegeRequest;
 import org.apache.impala.authorization.PrivilegeRequestBuilder;
@@ -78,6 +80,11 @@ public class ResetMetadataStmt extends StatementBase {
   public TableName getTableName() { return tableName_; }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    if (tableName_ != null) tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     if (tableName_ != null) {
       String dbName = analyzer.getTargetDbName(tableName_);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/SelectStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/SelectStmt.java b/fe/src/main/java/org/apache/impala/analysis/SelectStmt.java
index 2ba5105..ab18f85 100644
--- a/fe/src/main/java/org/apache/impala/analysis/SelectStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/SelectStmt.java
@@ -151,6 +151,7 @@ public class SelectStmt extends QueryStmt {
     if (isAnalyzed()) return;
     super.analyze(analyzer);
 
+    // Start out with table refs to establish aliases.
     fromClause_.analyze(analyzer);
 
     // Generate !empty() predicates to filter out empty collections.
@@ -1019,13 +1020,19 @@ public class SelectStmt extends QueryStmt {
   }
 
   @Override
-  public void collectTableRefs(List<TableRef> tblRefs) {
-    for (TableRef tblRef: fromClause_) {
-      if (tblRef instanceof InlineViewRef) {
-        InlineViewRef inlineViewRef = (InlineViewRef) tblRef;
-        inlineViewRef.getViewStmt().collectTableRefs(tblRefs);
-      } else {
-        tblRefs.add(tblRef);
+  protected void collectTableRefs(List<TableRef> tblRefs, boolean fromClauseOnly) {
+    super.collectTableRefs(tblRefs, fromClauseOnly);
+    if (fromClauseOnly) {
+      fromClause_.collectFromClauseTableRefs(tblRefs);
+    } else {
+      fromClause_.collectTableRefs(tblRefs);
+    }
+    if (!fromClauseOnly && whereClause_ != null) {
+      // Collect TableRefs in WHERE-clause subqueries.
+      List<Subquery> subqueries = Lists.newArrayList();
+      whereClause_.collect(Subquery.class, subqueries);
+      for (Subquery sq: subqueries) {
+        sq.getStatement().collectTableRefs(tblRefs, fromClauseOnly);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/SetStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/SetStmt.java b/fe/src/main/java/org/apache/impala/analysis/SetStmt.java
index a83efda..5a48b93 100644
--- a/fe/src/main/java/org/apache/impala/analysis/SetStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/SetStmt.java
@@ -18,6 +18,7 @@
 package org.apache.impala.analysis;
 
 import org.apache.impala.thrift.TSetQueryOptionRequest;
+
 import com.google.common.base.Preconditions;
 
 /**

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ShowCreateFunctionStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowCreateFunctionStmt.java b/fe/src/main/java/org/apache/impala/analysis/ShowCreateFunctionStmt.java
index 37e32ae..414bbd7 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowCreateFunctionStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowCreateFunctionStmt.java
@@ -19,13 +19,13 @@ package org.apache.impala.analysis;
 
 import java.util.List;
 
-import org.apache.impala.analysis.FunctionName;
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.Db;
 import org.apache.impala.catalog.Function;
 import org.apache.impala.common.AnalysisException;
-import org.apache.impala.thrift.TGetFunctionsParams;
 import org.apache.impala.thrift.TFunctionCategory;
+import org.apache.impala.thrift.TGetFunctionsParams;
+
 import com.google.common.base.Preconditions;
 
 /**

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ShowCreateTableStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowCreateTableStmt.java b/fe/src/main/java/org/apache/impala/analysis/ShowCreateTableStmt.java
index d2b57e8..216562b 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowCreateTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowCreateTableStmt.java
@@ -17,12 +17,15 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.Table;
 import org.apache.impala.catalog.View;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TCatalogObjectType;
 import org.apache.impala.thrift.TTableName;
+
 import com.google.common.base.Preconditions;
 
 /**
@@ -36,7 +39,7 @@ public class ShowCreateTableStmt extends StatementBase {
   private TableName tableName_;
 
   // The object type keyword used, e.g. TABLE or VIEW, needed to output matching SQL.
-  private TCatalogObjectType objectType_;
+  private final TCatalogObjectType objectType_;
 
   public ShowCreateTableStmt(TableName table, TCatalogObjectType objectType) {
     Preconditions.checkNotNull(table);
@@ -50,6 +53,11 @@ public class ShowCreateTableStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     tableName_ = analyzer.getFqTableName(tableName_);
     Table table = analyzer.getTable(tableName_, Privilege.VIEW_METADATA);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java b/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java
index 42839dd..ee749d0 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowFilesStmt.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.catalog.Table;
@@ -43,8 +45,8 @@ public class ShowFilesStmt extends StatementBase {
   protected Table table_;
 
   public ShowFilesStmt(TableName tableName, PartitionSet partitionSet) {
-    this.tableName_ = tableName;
-    this.partitionSet_ = partitionSet;
+    tableName_ = Preconditions.checkNotNull(tableName);
+    partitionSet_ = partitionSet;
   }
 
   @Override
@@ -56,6 +58,11 @@ public class ShowFilesStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     // Resolve and analyze table ref to register privilege and audit events
     // and to allow us to evaluate partition predicates.

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ShowFunctionsStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowFunctionsStmt.java b/fe/src/main/java/org/apache/impala/analysis/ShowFunctionsStmt.java
index dc6a461..296fafd 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowFunctionsStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowFunctionsStmt.java
@@ -21,6 +21,7 @@ import org.apache.impala.authorization.Privilege;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TFunctionCategory;
 import org.apache.impala.thrift.TShowFunctionsParams;
+
 import com.google.common.base.Preconditions;
 
 /**

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ShowGrantRoleStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowGrantRoleStmt.java b/fe/src/main/java/org/apache/impala/analysis/ShowGrantRoleStmt.java
index e59f168..82c6ed0 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowGrantRoleStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowGrantRoleStmt.java
@@ -17,10 +17,13 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.catalog.Role;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.thrift.TShowGrantRoleParams;
+
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 
@@ -60,6 +63,11 @@ public class ShowGrantRoleStmt extends AuthorizationStmt {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    if (privilegeSpec_ != null) privilegeSpec_.collectTableRefs(tblRefs);
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     super.analyze(analyzer);
     if (Strings.isNullOrEmpty(roleName_)) {

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ShowStatsStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowStatsStmt.java b/fe/src/main/java/org/apache/impala/analysis/ShowStatsStmt.java
index 8801b0a..91dfe75 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowStatsStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowStatsStmt.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import java.util.List;
+
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.HdfsTable;
 import org.apache.impala.catalog.KuduTable;
@@ -25,6 +27,7 @@ import org.apache.impala.catalog.View;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TShowStatsOp;
 import org.apache.impala.thrift.TShowStatsParams;
+
 import com.google.common.base.Preconditions;
 
 /**
@@ -39,8 +42,8 @@ public class ShowStatsStmt extends StatementBase {
   protected Table table_;
 
   public ShowStatsStmt(TableName tableName, TShowStatsOp op) {
-    this.op_ = op;
-    this.tableName_ = tableName;
+    op_ = Preconditions.checkNotNull(op);
+    tableName_ = Preconditions.checkNotNull(tableName);
   }
 
   @Override
@@ -64,6 +67,11 @@ public class ShowStatsStmt extends StatementBase {
   }
 
   @Override
+  public void collectTableRefs(List<TableRef> tblRefs) {
+    tblRefs.add(new TableRef(tableName_.toPath(), null));
+  }
+
+  @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     table_ = analyzer.getTable(tableName_, Privilege.VIEW_METADATA);
     Preconditions.checkNotNull(table_);

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/ShowTablesStmt.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/ShowTablesStmt.java b/fe/src/main/java/org/apache/impala/analysis/ShowTablesStmt.java
index a6a23c9..bf2612b 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ShowTablesStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ShowTablesStmt.java
@@ -20,6 +20,7 @@ package org.apache.impala.analysis;
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TShowTablesParams;
+
 import com.google.common.base.Preconditions;
 
 /**

http://git-wip-us.apache.org/repos/asf/impala/blob/e0c09181/fe/src/main/java/org/apache/impala/analysis/StatementBase.java
----------------------------------------------------------------------
diff --git a/fe/src/main/java/org/apache/impala/analysis/StatementBase.java b/fe/src/main/java/org/apache/impala/analysis/StatementBase.java
index bd8e8e2..30dc9b5 100644
--- a/fe/src/main/java/org/apache/impala/analysis/StatementBase.java
+++ b/fe/src/main/java/org/apache/impala/analysis/StatementBase.java
@@ -31,7 +31,7 @@ import com.google.common.base.Preconditions;
 /**
  * Base class for all Impala SQL statements.
  */
-abstract class StatementBase implements ParseNode {
+public abstract class StatementBase implements ParseNode {
 
   // True if this Stmt is the top level of an explain stmt.
   protected boolean isExplain_ = false;
@@ -56,6 +56,13 @@ abstract class StatementBase implements ParseNode {
   }
 
   /**
+   * Returns all table references in this statement and all its nested statements.
+   * The TableRefs are collected depth-first in SQL-clause order.
+   * Subclasses should override this method as necessary.
+   */
+  public void collectTableRefs(List<TableRef> tblRefs) { }
+
+  /**
    * Analyzes the statement and throws an AnalysisException if analysis fails. A failure
    * could be due to a problem with the statement or because one or more tables/views
    * were missing from the catalog.