You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@impala.apache.org by ph...@apache.org on 2019/02/01 19:15:27 UTC

[impala] 03/05: IMPALA-7867 (Part 5): Collection cleanup in analyzer

This is an automated email from the ASF dual-hosted git repository.

philz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/impala.git

commit 396f542eda32dd92e80edbeb216a4cdeb7fe0ace
Author: paul-rogers <pr...@cloudera.com>
AuthorDate: Wed Dec 26 23:02:00 2018 -0800

    IMPALA-7867 (Part 5): Collection cleanup in analyzer
    
    Continues the work to clean up the code to:
    
    * Use collection interfaces for variable and function declarations,
    * Replace Guava newArrayList(), etc. calls with the direct
      use of Java collection classes.
    * Clean up unused imports and add override annotations.
    
    This commit cleans up remaining issues in the analyzer now that the
    other modules use collection interfaces.
    
    Tests: this is purely a refactoring with no functional change. Reran
    existing tests.
    
    Change-Id: I1d1c37beb926896f5e00faab0b06034aebb835c5
    Reviewed-on: http://gerrit.cloudera.org:8080/12266
    Reviewed-by: Impala Public Jenkins <im...@cloudera.com>
    Tested-by: Impala Public Jenkins <im...@cloudera.com>
---
 .../org/apache/impala/analysis/AdminFnStmt.java    |  4 +--
 .../org/apache/impala/analysis/AggregateInfo.java  |  4 +--
 .../apache/impala/analysis/AggregateInfoBase.java  |  2 +-
 .../analysis/AlterTableSetTblProperties.java       |  5 ++--
 .../apache/impala/analysis/AnalysisContext.java    |  8 +++---
 .../org/apache/impala/analysis/AnalyticExpr.java   | 10 +++----
 .../org/apache/impala/analysis/AnalyticInfo.java   |  5 ++--
 .../java/org/apache/impala/analysis/Analyzer.java  |  2 +-
 .../java/org/apache/impala/analysis/ColumnDef.java | 15 +++++-----
 .../apache/impala/analysis/ColumnLineageGraph.java | 24 ++++++++--------
 .../apache/impala/analysis/ComputeStatsStmt.java   | 13 +++++----
 .../impala/analysis/CreateOrAlterViewStmtBase.java | 10 +++----
 .../impala/analysis/CreateTableAsSelectStmt.java   |  4 +--
 .../apache/impala/analysis/CreateTableStmt.java    |  8 +++---
 .../org/apache/impala/analysis/DeleteStmt.java     |  7 ++---
 .../main/java/org/apache/impala/analysis/Expr.java | 21 +++++++-------
 .../impala/analysis/ExprSubstitutionMap.java       |  5 ++--
 .../org/apache/impala/analysis/FromClause.java     |  5 ++--
 .../apache/impala/analysis/KuduPartitionExpr.java  |  2 --
 .../apache/impala/analysis/KuduPartitionParam.java | 10 ++++---
 .../org/apache/impala/analysis/ModifyStmt.java     |  5 ++--
 .../apache/impala/analysis/MultiAggregateInfo.java |  2 --
 .../apache/impala/analysis/PartitionKeyValue.java  | 11 ++++----
 .../org/apache/impala/analysis/PartitionSet.java   | 12 ++++----
 .../org/apache/impala/analysis/PartitionSpec.java  | 19 +++++++------
 .../main/java/org/apache/impala/analysis/Path.java | 11 ++++----
 .../java/org/apache/impala/analysis/PlanHint.java  |  4 +--
 .../org/apache/impala/analysis/PrivilegeSpec.java  |  8 +++---
 .../org/apache/impala/analysis/SelectList.java     |  7 +++--
 .../org/apache/impala/analysis/SlotDescriptor.java |  3 +-
 .../java/org/apache/impala/analysis/SortInfo.java  | 15 +++++-----
 .../apache/impala/analysis/StmtMetadataLoader.java |  4 +--
 .../apache/impala/analysis/TableDataLayout.java    | 10 +++----
 .../java/org/apache/impala/analysis/TableRef.java  | 17 +++++------
 .../impala/analysis/TupleIsNullPredicate.java      |  4 ++-
 .../java/org/apache/impala/analysis/UnionStmt.java | 16 +++++------
 .../org/apache/impala/analysis/UpdateStmt.java     |  4 +--
 .../org/apache/impala/analysis/WithClause.java     | 10 +++----
 .../impala/analysis/AnalyzeAuthStmtsTest.java      |  4 +--
 .../org/apache/impala/analysis/AnalyzeDDLTest.java |  5 ++--
 .../apache/impala/analysis/AnalyzeExprsTest.java   | 10 +++----
 .../apache/impala/analysis/AnalyzeStmtsTest.java   |  7 +++--
 .../org/apache/impala/analysis/AnalyzerTest.java   |  2 +-
 .../impala/analysis/AuthorizationStmtTest.java     | 33 +++++++++++-----------
 .../apache/impala/analysis/AuthorizationTest.java  |  9 +++---
 .../impala/analysis/ExprRewriteRulesTest.java      |  8 +++---
 .../org/apache/impala/analysis/ParserTest.java     |  8 +++---
 .../impala/analysis/StmtMetadataLoaderTest.java    |  1 -
 48 files changed, 212 insertions(+), 201 deletions(-)

diff --git a/fe/src/main/java/org/apache/impala/analysis/AdminFnStmt.java b/fe/src/main/java/org/apache/impala/analysis/AdminFnStmt.java
index f2492d3..5eb900d 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AdminFnStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AdminFnStmt.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.impala.authorization.PrivilegeRequestBuilder;
@@ -30,7 +31,6 @@ import org.apache.impala.thrift.TShutdownParams;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 
 /**
  * Represents an administrative function call, e.g. ": shutdown('hostname:123')".
@@ -63,7 +63,7 @@ public class AdminFnStmt extends StatementBase {
   public String toSql(ToSqlOptions options) {
     StringBuilder sb = new StringBuilder();
     sb.append(":").append(fnName_).append("(");
-    List<String> paramsSql = Lists.newArrayList();
+    List<String> paramsSql = new ArrayList<>();
     for (Expr param : params_) paramsSql.add(param.toSql(options));
     sb.append(Joiner.on(", ").join(paramsSql));
     sb.append(")");
diff --git a/fe/src/main/java/org/apache/impala/analysis/AggregateInfo.java b/fe/src/main/java/org/apache/impala/analysis/AggregateInfo.java
index d7dcc1a..7069bb5 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AggregateInfo.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AggregateInfo.java
@@ -320,14 +320,14 @@ public class AggregateInfo extends AggregateInfoBase {
     Preconditions.checkState(mergeAggInfo_ == null);
     TupleDescriptor inputDesc = intermediateTupleDesc_;
     // construct grouping exprs
-    ArrayList<Expr> groupingExprs = new ArrayList<>();
+    List<Expr> groupingExprs = new ArrayList<>();
     for (int i = 0; i < getGroupingExprs().size(); ++i) {
       SlotRef slotRef = new SlotRef(inputDesc.getSlots().get(i));
       groupingExprs.add(slotRef);
     }
 
     // construct agg exprs
-    ArrayList<FunctionCallExpr> aggExprs = new ArrayList<>();
+    List<FunctionCallExpr> aggExprs = new ArrayList<>();
     for (int i = 0; i < getAggregateExprs().size(); ++i) {
       FunctionCallExpr inputExpr = getAggregateExprs().get(i);
       Preconditions.checkState(inputExpr.isAggregateFunction());
diff --git a/fe/src/main/java/org/apache/impala/analysis/AggregateInfoBase.java b/fe/src/main/java/org/apache/impala/analysis/AggregateInfoBase.java
index 95b88f0..a3fc5ed 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AggregateInfoBase.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AggregateInfoBase.java
@@ -66,7 +66,7 @@ public abstract class AggregateInfoBase {
   // For analytics: indices into the analytic exprs and their corresponding aggregate
   // exprs that need to be materialized.
   // Populated in materializeRequiredSlots() which must be implemented by subclasses.
-  protected List<Integer> materializedSlots_ = Lists.newArrayList();
+  protected List<Integer> materializedSlots_ = new ArrayList<>();
 
   protected AggregateInfoBase(List<Expr> groupingExprs, List<FunctionCallExpr> aggExprs) {
     Preconditions.checkState(groupingExprs != null || aggExprs != null);
diff --git a/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java b/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java
index 6356ad7..5528316 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AlterTableSetTblProperties.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -144,7 +145,7 @@ public class AlterTableSetTblProperties extends AlterTableSetStmt {
    */
   private void analyzeAvroSchema(Analyzer analyzer)
       throws AnalysisException {
-    List<Map<String, String>> schemaSearchLocations = Lists.newArrayList();
+    List<Map<String, String>> schemaSearchLocations = new ArrayList<>();
     schemaSearchLocations.add(tblProperties_);
 
     String avroSchema = AvroSchemaUtils.getAvroSchema(schemaSearchLocations);
@@ -204,7 +205,7 @@ public class AlterTableSetTblProperties extends AlterTableSetStmt {
       Map<String, String> tblProperties) throws AnalysisException {
     if (!tblProperties.containsKey(
         AlterTableSortByStmt.TBL_PROP_SORT_COLUMNS)) {
-      return Lists.newArrayList();
+      return new ArrayList<>();
     }
 
     // ALTER TABLE SET is not supported on HBase tables at all, see
diff --git a/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java b/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java
index 8d9ff90..3d32999 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AnalysisContext.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import static org.apache.impala.analysis.ToSqlOptions.REWRITTEN;
+
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -50,10 +52,8 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
 import com.google.common.collect.ImmutableList;
-
-import static org.apache.impala.analysis.ToSqlOptions.REWRITTEN;
+import com.google.common.collect.Lists;
 
 /**
  * Wrapper class for parsing, analyzing and rewriting a SQL stmt.
@@ -466,7 +466,7 @@ public class AnalysisContext {
     // types and column labels to restore them after the rewritten stmt has been
     // reset() and re-analyzed. For a CTAS statement, the types represent column types
     // of the table that will be created, including the partition columns, if any.
-    List<Type> origResultTypes = Lists.newArrayList();
+    List<Type> origResultTypes = new ArrayList<>();
     for (Expr e : analysisResult_.stmt_.getResultExprs()) {
       origResultTypes.add(e.getType());
     }
diff --git a/fe/src/main/java/org/apache/impala/analysis/AnalyticExpr.java b/fe/src/main/java/org/apache/impala/analysis/AnalyticExpr.java
index 7eeb86c..3bfe86e 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AnalyticExpr.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AnalyticExpr.java
@@ -63,7 +63,7 @@ public class AnalyticExpr extends Expr {
   private final List<Expr> partitionExprs_;
   // These elements are modified to point to the corresponding child exprs to keep them
   // in sync through expr substitutions.
-  private List<OrderByElement> orderByElements_ = Lists.newArrayList();
+  private List<OrderByElement> orderByElements_ = new ArrayList<>();
   private AnalyticWindow window_;
 
   // If set, requires the window to be set to null in resetAnalysisState(). Required for
@@ -156,7 +156,7 @@ public class AnalyticExpr extends Expr {
       needsSpace = true;
     }
     if (!orderByElements_.isEmpty()) {
-      List<String> orderByStrings = Lists.newArrayList();
+      List<String> orderByStrings = new ArrayList<>();
       for (OrderByElement e: orderByElements_) {
         orderByStrings.add(e.toSql(options));
       }
@@ -272,7 +272,7 @@ public class AnalyticExpr extends Expr {
         new ArithmeticExpr(ArithmeticExpr.Operator.SUBTRACT, rankExpr, one),
         new ArithmeticExpr(ArithmeticExpr.Operator.SUBTRACT, countExpr, one));
 
-    List<Expr> ifParams = Lists.newArrayList();
+    List<Expr> ifParams = new ArrayList<>();
     ifParams.add(
       new BinaryPredicate(BinaryPredicate.Operator.EQ, one, countExpr));
     ifParams.add(zero);
@@ -322,7 +322,7 @@ public class AnalyticExpr extends Expr {
     AnalyticExpr rowNumExpr = create("row_number", analyticExpr, true, false);
     AnalyticExpr countExpr = create("count", analyticExpr, false, false);
 
-    List<Expr> ifParams = Lists.newArrayList();
+    List<Expr> ifParams = new ArrayList<>();
     ifParams.add(
         new BinaryPredicate(BinaryPredicate.Operator.LT, bucketExpr, countExpr));
     ifParams.add(bucketExpr);
@@ -357,7 +357,7 @@ public class AnalyticExpr extends Expr {
       if (reverseOrderBy) {
         orderByElements = OrderByElement.reverse(referenceExpr.getOrderByElements());
       } else {
-        orderByElements = Lists.newArrayList();
+        orderByElements = new ArrayList<>();
         for (OrderByElement elem: referenceExpr.getOrderByElements()) {
           orderByElements.add(elem.clone());
         }
diff --git a/fe/src/main/java/org/apache/impala/analysis/AnalyticInfo.java b/fe/src/main/java/org/apache/impala/analysis/AnalyticInfo.java
index 12d15b2..2ce3461 100644
--- a/fe/src/main/java/org/apache/impala/analysis/AnalyticInfo.java
+++ b/fe/src/main/java/org/apache/impala/analysis/AnalyticInfo.java
@@ -26,7 +26,6 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Objects;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 
 /**
  * Encapsulates the analytic functions found in a single select block plus
@@ -110,7 +109,7 @@ public class AnalyticInfo extends AggregateInfoBase {
    * analytic functions.
    */
   private List<Expr> computeCommonPartitionExprs() {
-    List<Expr> result = Lists.newArrayList();
+    List<Expr> result = new ArrayList<>();
     for (Expr analyticExpr: analyticExprs_) {
       Preconditions.checkState(analyticExpr.isAnalyzed());
       List<Expr> partitionExprs = ((AnalyticExpr) analyticExpr).getPartitionExprs();
@@ -128,7 +127,7 @@ public class AnalyticInfo extends AggregateInfoBase {
   @Override
   public void materializeRequiredSlots(Analyzer analyzer, ExprSubstitutionMap smap) {
     materializedSlots_.clear();
-    List<Expr> exprs = Lists.newArrayList();
+    List<Expr> exprs = new ArrayList<>();
     for (int i = 0; i < analyticExprs_.size(); ++i) {
       SlotDescriptor outputSlotDesc = outputTupleDesc_.getSlots().get(i);
       if (!outputSlotDesc.isMaterialized()) continue;
diff --git a/fe/src/main/java/org/apache/impala/analysis/Analyzer.java b/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
index 2d39b43..d6529ad 100644
--- a/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
+++ b/fe/src/main/java/org/apache/impala/analysis/Analyzer.java
@@ -1822,7 +1822,7 @@ public class Analyzer {
   }
 
   public <T extends Expr> void createEquivConjuncts(TupleId tid, List<T> conjuncts) {
-    createEquivConjuncts(tid, conjuncts, new HashSet<SlotId>());
+    createEquivConjuncts(tid, conjuncts, new HashSet<>());
   }
 
   /**
diff --git a/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java b/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
index 7d93531..401e576 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ColumnDef.java
@@ -18,17 +18,13 @@
 package org.apache.impala.analysis;
 
 import java.math.BigInteger;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
 import java.util.LinkedHashMap;
+import java.util.List;
 import java.util.Map;
 
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.impala.catalog.Type;
@@ -40,6 +36,11 @@ import org.apache.impala.util.MetaStoreUtil;
 import org.apache.kudu.ColumnSchema.CompressionAlgorithm;
 import org.apache.kudu.ColumnSchema.Encoding;
 
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
 /**
  * Represents a column definition in a CREATE/ALTER TABLE/VIEW/COLUMN statement.
  * Column definitions in CREATE/ALTER TABLE statements require a column type,
@@ -380,7 +381,7 @@ public class ColumnDef {
   }
 
   static List<String> toColumnNames(Collection<ColumnDef> colDefs) {
-    List<String> colNames = Lists.newArrayList();
+    List<String> colNames = new ArrayList<>();
     for (ColumnDef colDef: colDefs) {
       colNames.add(colDef.getColName());
     }
diff --git a/fe/src/main/java/org/apache/impala/analysis/ColumnLineageGraph.java b/fe/src/main/java/org/apache/impala/analysis/ColumnLineageGraph.java
index 0dd5c56..53d3a3e 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ColumnLineageGraph.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ColumnLineageGraph.java
@@ -27,28 +27,28 @@ import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
 
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.json.simple.JSONValue;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.impala.catalog.FeTable;
 import org.apache.impala.common.Id;
 import org.apache.impala.common.IdGenerator;
 import org.apache.impala.thrift.TEdgeType;
-import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TLineageGraph;
 import org.apache.impala.thrift.TMultiEdge;
+import org.apache.impala.thrift.TQueryCtx;
 import org.apache.impala.thrift.TUniqueId;
 import org.apache.impala.thrift.TVertex;
 import org.apache.impala.util.TUniqueIdUtil;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableSortedSet;
-import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.hash.Hasher;
 import com.google.common.hash.Hashing;
@@ -220,11 +220,11 @@ final class MultiEdge {
    * Encodes this MultiEdge object to a thrift object
    */
   public TMultiEdge toThrift() {
-    List<TVertex> sources = Lists.newArrayList();
+    List<TVertex> sources = new ArrayList<>();
     for (Vertex vertex: getOrderedSources()) {
       sources.add(vertex.toThrift());
     }
-    List<TVertex> targets = Lists.newArrayList();
+    List<TVertex> targets = new ArrayList<>();
     for (Vertex vertex: getOrderedTargets()) {
       targets.add(vertex.toThrift());
     }
@@ -295,7 +295,7 @@ public class ColumnLineageGraph {
   // Name of the user that issued this query
   private String user_;
 
-  private final List<Expr> resultDependencyPredicates_ = Lists.newArrayList();
+  private final List<Expr> resultDependencyPredicates_ = new ArrayList<>();
 
   private final List<MultiEdge> edges_ = new ArrayList<>();
 
diff --git a/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java b/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java
index 72f28a2..80f2258 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ComputeStatsStmt.java
@@ -18,6 +18,7 @@
 package org.apache.impala.analysis;
 
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
@@ -161,14 +162,14 @@ public class ComputeStatsStmt extends StatementBase {
 
   // The list of valid partition statistics that can be used in an incremental computation
   // without themselves being recomputed. Populated in analyze().
-  private final List<TPartitionStats> validPartStats_ = Lists.newArrayList();
+  private final List<TPartitionStats> validPartStats_ = new ArrayList<>();
 
   // For incremental computations, the list of partitions (identified by list of partition
   // column values) that we expect to receive results for. Used to ensure that even empty
   // partitions emit results.
   // TODO: Consider using partition IDs (and adding them to the child queries with a
   // PARTITION_ID() builtin)
-  private final List<List<String>> expectedPartitions_ = Lists.newArrayList();
+  private final List<List<String>> expectedPartitions_ = new ArrayList<>();
 
   // If non-null, partitions that an incremental computation might apply to. Must be
   // null if this is a non-incremental computation.
@@ -235,7 +236,7 @@ public class ComputeStatsStmt extends StatementBase {
   }
 
   private List<String> getBaseColumnStatsQuerySelectList(Analyzer analyzer) {
-    List<String> columnStatsSelectList = Lists.newArrayList();
+    List<String> columnStatsSelectList = new ArrayList<>();
     // For Hdfs tables, exclude partition columns from stats gathering because Hive
     // cannot store them as part of the non-partition column stats. For HBase tables,
     // include the single clustering column (the row key).
@@ -421,7 +422,7 @@ public class ComputeStatsStmt extends StatementBase {
 
     // Build partition filters that only select partitions without valid statistics for
     // incremental computation.
-    List<String> filterPreds = Lists.newArrayList();
+    List<String> filterPreds = new ArrayList<>();
     if (isIncremental_) {
       if (partitionSet_ == null) {
         // If any column does not have stats, we recompute statistics for all partitions
@@ -534,7 +535,7 @@ public class ComputeStatsStmt extends StatementBase {
     }
     List<String> tableStatsSelectList = Lists.newArrayList(countSql);
     // Add group by columns for incremental stats or with extrapolation disabled.
-    List<String> groupByCols = Lists.newArrayList();
+    List<String> groupByCols = new ArrayList<>();
     if (!updateTableStatsOnly()) {
       for (Column partCol: hdfsTable.getClusteringColumns()) {
         groupByCols.add(ToSqlUtils.getIdentSql(partCol.getName()));
@@ -610,7 +611,7 @@ public class ComputeStatsStmt extends StatementBase {
         && !RuntimeEnv.INSTANCE.isTestEnv()) {
       // We're configured to fetch the statistics from catalogd, so collect the relevant
       // partition ids.
-      List<FeFsPartition> partitionsToFetch = Lists.newArrayList();
+      List<FeFsPartition> partitionsToFetch = new ArrayList<>();
       for (FeFsPartition p: partitions) {
         if (excludedPartitions.contains(p.getId())) continue;
         partitionsToFetch.add(p);
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java b/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java
index bef1c22..e0ff1c9 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateOrAlterViewStmtBase.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -28,9 +29,8 @@ import org.apache.impala.thrift.TTableName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import com.google.common.base.Joiner;
+import com.google.common.base.Preconditions;
 
 /**
  * Base class for CREATE VIEW and ALTER VIEW AS SELECT statements.
@@ -119,7 +119,7 @@ public abstract class CreateOrAlterViewStmtBase extends StatementBase {
       }
     } else {
       // Create list of column definitions from the view-definition statement.
-      finalColDefs_ = Lists.newArrayList();
+      finalColDefs_ = new ArrayList<>();
       List<Expr> exprs = viewDefStmt_.getBaseTblResultExprs();
       List<String> labels = viewDefStmt_.getColLabels();
       Preconditions.checkState(exprs.size() == labels.size());
@@ -175,7 +175,7 @@ public abstract class CreateOrAlterViewStmtBase extends StatementBase {
    */
   protected void computeLineageGraph(Analyzer analyzer) {
     ColumnLineageGraph graph = analyzer.getColumnLineageGraph();
-    List<String> colDefs = Lists.newArrayList();
+    List<String> colDefs = new ArrayList<>();
     for (ColumnDef colDef: finalColDefs_) {
       colDefs.add(dbName_ + "." + getTbl() + "." + colDef.getColName());
     }
@@ -222,7 +222,7 @@ public abstract class CreateOrAlterViewStmtBase extends StatementBase {
    */
   protected String getColumnNames() {
     Preconditions.checkNotNull(columnDefs_);
-    List<String> columnNames = Lists.newArrayList();
+    List<String> columnNames = new ArrayList<>();
     for (ColumnDef colDef : columnDefs_) {
       columnNames.add(colDef.getColName());
     }
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java b/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
index 372cfce..73f134d 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableAsSelectStmt.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.List;
@@ -37,7 +38,6 @@ import org.apache.impala.service.CatalogOpExecutor;
 import org.apache.impala.thrift.THdfsFileFormat;
 
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 
 /**
  * Represents a CREATE TABLE AS SELECT (CTAS) statement
@@ -96,7 +96,7 @@ public class CreateTableAsSelectStmt extends StatementBase {
     partitionKeys_ = params.partitionKeys;
     List<PartitionKeyValue> pkvs = null;
     if (partitionKeys_ != null) {
-      pkvs = Lists.newArrayList();
+      pkvs = new ArrayList<>();
       for (String key: partitionKeys_) {
         pkvs.add(new PartitionKeyValue(key, null));
       }
diff --git a/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java b/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
index d42d82f..8b393f1 100644
--- a/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/CreateTableStmt.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -39,9 +40,8 @@ import org.apache.impala.util.MetaStoreUtil;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import com.google.common.primitives.Ints;
 import com.google.common.collect.Maps;
+import com.google.common.primitives.Ints;
 
 /**
  * Represents a CREATE TABLE statement.
@@ -155,7 +155,7 @@ public class CreateTableStmt extends StatementBase {
   public TCreateTableParams toThrift() {
     TCreateTableParams params = new TCreateTableParams();
     params.setTable_name(new TTableName(getDb(), getTbl()));
-    List<org.apache.impala.thrift.TColumn> tColumns = Lists.newArrayList();
+    List<org.apache.impala.thrift.TColumn> tColumns = new ArrayList<>();
     for (ColumnDef col: getColumnDefs()) tColumns.add(col.toThrift());
     params.setColumns(tColumns);
     for (ColumnDef col: getPartitionColumnDefs()) {
@@ -403,7 +403,7 @@ public class CreateTableStmt extends StatementBase {
     Preconditions.checkState(getFileFormat() == THdfsFileFormat.AVRO);
     // Look for the schema in TBLPROPERTIES and in SERDEPROPERTIES, with latter
     // taking precedence.
-    List<Map<String, String>> schemaSearchLocations = Lists.newArrayList();
+    List<Map<String, String>> schemaSearchLocations = new ArrayList<>();
     schemaSearchLocations.add(getSerdeProperties());
     schemaSearchLocations.add(getTblProperties());
     String avroSchema;
diff --git a/fe/src/main/java/org/apache/impala/analysis/DeleteStmt.java b/fe/src/main/java/org/apache/impala/analysis/DeleteStmt.java
index 97d2ce2..6caa95c 100644
--- a/fe/src/main/java/org/apache/impala/analysis/DeleteStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/DeleteStmt.java
@@ -17,15 +17,14 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.impala.common.Pair;
 import org.apache.impala.planner.DataSink;
 import org.apache.impala.planner.TableSink;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
 
 /**
  * Representation of a DELETE statement.
@@ -42,13 +41,13 @@ public class DeleteStmt extends ModifyStmt {
 
   public DeleteStmt(List<String> targetTablePath, FromClause tableRefs,
       Expr wherePredicate) {
-    super(targetTablePath, tableRefs, Lists.<Pair<SlotRef, Expr>>newArrayList(),
+    super(targetTablePath, tableRefs, new ArrayList<>(),
         wherePredicate);
   }
 
   public DeleteStmt(DeleteStmt other) {
     super(other.targetTablePath_, other.fromClause_.clone(),
-        Lists.<Pair<SlotRef, Expr>>newArrayList(), other.wherePredicate_.clone());
+        new ArrayList<>(), other.wherePredicate_.clone());
   }
 
   @Override
diff --git a/fe/src/main/java/org/apache/impala/analysis/Expr.java b/fe/src/main/java/org/apache/impala/analysis/Expr.java
index 0bf1f8d..6987d66 100644
--- a/fe/src/main/java/org/apache/impala/analysis/Expr.java
+++ b/fe/src/main/java/org/apache/impala/analysis/Expr.java
@@ -17,7 +17,8 @@
 
 package org.apache.impala.analysis;
 
-import java.lang.reflect.Method;
+import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
+
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.HashSet;
@@ -53,8 +54,6 @@ import com.google.common.base.Predicates;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 
-import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
-
 /**
  * Root of the expr node hierarchy.
  *
@@ -619,7 +618,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
       Type targetType) throws AnalysisException {
     if (!targetType.isFloatingPointType() && !targetType.isIntegerType()) return child;
     if (targetType.isIntegerType()) targetType = Type.DOUBLE;
-    List<NumericLiteral> literals = Lists.newArrayList();
+    List<NumericLiteral> literals = new ArrayList<>();
     child.collectAll(Predicates.instanceOf(NumericLiteral.class), literals);
     ExprSubstitutionMap smap = new ExprSubstitutionMap();
     for (NumericLiteral l: literals) {
@@ -778,7 +777,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
   }
 
   public static List<TExpr> treesToThrift(List<? extends Expr> exprs) {
-    List<TExpr> result = Lists.newArrayList();
+    List<TExpr> result = new ArrayList<>();
     for (Expr expr: exprs) {
       result.add(expr.treeToThrift());
     }
@@ -794,7 +793,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
   }
 
   public List<String> childrenToSql(ToSqlOptions options) {
-    List<String> result = Lists.newArrayList();
+    List<String> result = new ArrayList<>();
     for (Expr child: children_) {
       result.add(child.toSql(options));
     }
@@ -807,7 +806,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
 
   public static String debugString(List<? extends Expr> exprs) {
     if (exprs == null || exprs.isEmpty()) return "";
-    List<String> strings = Lists.newArrayList();
+    List<String> strings = new ArrayList<>();
     for (Expr expr: exprs) {
       strings.add(expr.debugString());
     }
@@ -816,7 +815,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
 
   public static String toSql(List<? extends Expr> exprs, ToSqlOptions options) {
     if (exprs == null || exprs.isEmpty()) return "";
-    List<String> strings = Lists.newArrayList();
+    List<String> strings = new ArrayList<>();
     for (Expr expr: exprs) {
       strings.add(expr.toSql(options));
     }
@@ -921,7 +920,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
    * SlotRefs, etc. Hence, this method is placed here and not in Predicate.
    */
   public List<Expr> getConjuncts() {
-    List<Expr> list = Lists.newArrayList();
+    List<Expr> list = new ArrayList<>();
     if (this instanceof CompoundPredicate
         && ((CompoundPredicate) this).getOp() == CompoundPredicate.Operator.AND) {
       // TODO: we have to convert CompoundPredicate.AND to two expr trees for
@@ -1090,7 +1089,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
    */
   public static <C extends Expr> List<C> removeDuplicates(List<C> l,
       SlotRef.Comparator cmp) {
-    List<C> newList = Lists.newArrayList();
+    List<C> newList = new ArrayList<>();
     for (C expr: l) {
       boolean exists = false;
       for (C newExpr : newList) {
@@ -1514,7 +1513,7 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
    */
   public Subquery getSubquery() {
     if (!contains(Subquery.class)) return null;
-    List<Subquery> subqueries = Lists.newArrayList();
+    List<Subquery> subqueries = new ArrayList<>();
     collect(Subquery.class, subqueries);
     Preconditions.checkState(subqueries.size() == 1);
     return subqueries.get(0);
diff --git a/fe/src/main/java/org/apache/impala/analysis/ExprSubstitutionMap.java b/fe/src/main/java/org/apache/impala/analysis/ExprSubstitutionMap.java
index 2355294..5e9f875 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ExprSubstitutionMap.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ExprSubstitutionMap.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.slf4j.Logger;
@@ -41,7 +42,7 @@ public final class ExprSubstitutionMap {
   private List<Expr> rhs_; // right-hand side
 
   public ExprSubstitutionMap() {
-    this(Lists.<Expr>newArrayList(), Lists.<Expr>newArrayList());
+    this(new ArrayList<>(), new ArrayList<>());
   }
 
   public ExprSubstitutionMap(List<Expr> lhs, List<Expr> rhs) {
@@ -140,7 +141,7 @@ public final class ExprSubstitutionMap {
 
   public String debugString() {
     Preconditions.checkState(lhs_.size() == rhs_.size());
-    List<String> output = Lists.newArrayList();
+    List<String> output = new ArrayList<>();
     for (int i = 0; i < lhs_.size(); ++i) {
       output.add(lhs_.get(i).toSql() + ":" + rhs_.get(i).toSql());
       output.add("(" + lhs_.get(i).debugString() + ":" + rhs_.get(i).debugString() + ")");
diff --git a/fe/src/main/java/org/apache/impala/analysis/FromClause.java b/fe/src/main/java/org/apache/impala/analysis/FromClause.java
index ab7956e..a13b2dd 100644
--- a/fe/src/main/java/org/apache/impala/analysis/FromClause.java
+++ b/fe/src/main/java/org/apache/impala/analysis/FromClause.java
@@ -17,6 +17,8 @@
 
 package org.apache.impala.analysis;
 
+import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
+
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
@@ -26,8 +28,6 @@ import org.apache.impala.common.AnalysisException;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
-import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
-
 /**
  * Wraps a list of TableRef instances that form a FROM clause, allowing them to be
  * analyzed independently of the statement using them. To increase the flexibility of
@@ -118,6 +118,7 @@ public class FromClause extends StmtNode implements Iterable<TableRef> {
     return toSql(DEFAULT);
   }
 
+  @Override
   public String toSql(ToSqlOptions options) {
     StringBuilder builder = new StringBuilder();
     if (!tableRefs_.isEmpty()) {
diff --git a/fe/src/main/java/org/apache/impala/analysis/KuduPartitionExpr.java b/fe/src/main/java/org/apache/impala/analysis/KuduPartitionExpr.java
index 4cca939..af430b3 100644
--- a/fe/src/main/java/org/apache/impala/analysis/KuduPartitionExpr.java
+++ b/fe/src/main/java/org/apache/impala/analysis/KuduPartitionExpr.java
@@ -25,8 +25,6 @@ import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TExprNode;
 import org.apache.impala.thrift.TExprNodeType;
 import org.apache.impala.thrift.TKuduPartitionExpr;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 
diff --git a/fe/src/main/java/org/apache/impala/analysis/KuduPartitionParam.java b/fe/src/main/java/org/apache/impala/analysis/KuduPartitionParam.java
index 3e84cef..a9344e5 100644
--- a/fe/src/main/java/org/apache/impala/analysis/KuduPartitionParam.java
+++ b/fe/src/main/java/org/apache/impala/analysis/KuduPartitionParam.java
@@ -17,6 +17,9 @@
 
 package org.apache.impala.analysis;
 
+import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
+
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -25,13 +28,12 @@ import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TKuduPartitionByHashParam;
 import org.apache.impala.thrift.TKuduPartitionByRangeParam;
 import org.apache.impala.thrift.TKuduPartitionParam;
+
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 
-import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
-
 /**
  * Represents the partitioning of a Kudu table as defined in the PARTITION BY
  * clause of a CREATE TABLE statement. The partitioning can be hash-based or
@@ -92,7 +94,7 @@ public class KuduPartitionParam extends StmtNode {
 
   // Columns of this partitioning. If no columns are specified, all
   // the primary key columns of the associated table are used.
-  private final List<String> colNames_ = Lists.newArrayList();
+  private final List<String> colNames_ = new ArrayList<>();
 
   // Map of primary key column names to the associated column definitions. Must be set
   // before the call to analyze().
@@ -161,7 +163,7 @@ public class KuduPartitionParam extends StmtNode {
     } else {
       builder.append(" (");
       if (rangePartitions_ != null) {
-        List<String> partsSql = Lists.newArrayList();
+        List<String> partsSql = new ArrayList<>();
         for (RangePartition rangePartition: rangePartitions_) {
           partsSql.add(rangePartition.toSql(options));
         }
diff --git a/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java b/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java
index ec7252a..5d34b97 100644
--- a/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/ModifyStmt.java
@@ -24,6 +24,7 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.impala.authorization.Privilege;
 import org.apache.impala.catalog.Column;
@@ -225,8 +226,8 @@ public abstract class ModifyStmt extends StatementBase {
       List<SelectListItem> selectList, List<Integer> referencedColumns)
       throws AnalysisException {
     // The order of the referenced columns equals the order of the result expressions
-    HashSet<SlotId> uniqueSlots = new HashSet<>();
-    HashSet<SlotId> keySlots = new HashSet<>();
+    Set<SlotId> uniqueSlots = new HashSet<>();
+    Set<SlotId> keySlots = new HashSet<>();
 
     // Mapping from column name to index
     List<Column> cols = table_.getColumnsInHiveOrder();
diff --git a/fe/src/main/java/org/apache/impala/analysis/MultiAggregateInfo.java b/fe/src/main/java/org/apache/impala/analysis/MultiAggregateInfo.java
index 4eb594c..f054e04 100644
--- a/fe/src/main/java/org/apache/impala/analysis/MultiAggregateInfo.java
+++ b/fe/src/main/java/org/apache/impala/analysis/MultiAggregateInfo.java
@@ -30,8 +30,6 @@ import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.common.InternalException;
 import org.apache.kudu.shaded.com.google.common.collect.Lists;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 
diff --git a/fe/src/main/java/org/apache/impala/analysis/PartitionKeyValue.java b/fe/src/main/java/org/apache/impala/analysis/PartitionKeyValue.java
index 8b3b556..1add747 100644
--- a/fe/src/main/java/org/apache/impala/analysis/PartitionKeyValue.java
+++ b/fe/src/main/java/org/apache/impala/analysis/PartitionKeyValue.java
@@ -17,13 +17,14 @@
 
 package org.apache.impala.analysis;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-
-import org.apache.impala.common.AnalysisException;
+import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.List;
 
+import org.apache.impala.common.AnalysisException;
+
+import com.google.common.base.Preconditions;
+
 /**
  * Representation of a single column:value element in the PARTITION (...) clause of an
  * insert or alter table statement.
@@ -97,7 +98,7 @@ public class PartitionKeyValue {
    */
   public static List<String> getPartitionKeyValueStringList(
       List<LiteralExpr> literals, String nullPartitionKeyValue) {
-    List<String> partValues = Lists.newArrayList();
+    List<String> partValues = new ArrayList<>();
     for (LiteralExpr partValue : literals) {
       partValues.add(getPartitionKeyValueString(partValue, nullPartitionKeyValue));
     }
diff --git a/fe/src/main/java/org/apache/impala/analysis/PartitionSet.java b/fe/src/main/java/org/apache/impala/analysis/PartitionSet.java
index e9972bb..c14eced 100644
--- a/fe/src/main/java/org/apache/impala/analysis/PartitionSet.java
+++ b/fe/src/main/java/org/apache/impala/analysis/PartitionSet.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -34,7 +35,6 @@ import org.apache.impala.thrift.TPartitionKeyValue;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
 
 /**
  * Represents a set of partitions resulting from evaluating a list of partition conjuncts
@@ -55,7 +55,7 @@ public class PartitionSet extends PartitionSpecBase {
   @Override
   public void analyze(Analyzer analyzer) throws AnalysisException {
     super.analyze(analyzer);
-    List<Expr> conjuncts = Lists.newArrayList();
+    List<Expr> conjuncts = new ArrayList<>();
     // Do not register column-authorization requests.
     analyzer.setEnablePrivChecks(false);
     for (Expr e: partitionExprs_) {
@@ -144,7 +144,7 @@ public class PartitionSet extends PartitionSpecBase {
   // partition pruner.
   private List<Expr> transformPartitionConjuncts(Analyzer analyzer, List<Expr> conjuncts)
       throws AnalysisException {
-    List<Expr> transformedConjuncts = Lists.newArrayList();
+    List<Expr> transformedConjuncts = new ArrayList<>();
     for (Expr e : conjuncts) {
       Expr result = e;
       if (e instanceof BinaryPredicate) {
@@ -172,9 +172,9 @@ public class PartitionSet extends PartitionSpecBase {
   }
 
   public List<List<TPartitionKeyValue>> toThrift() {
-    List<List<TPartitionKeyValue>> thriftPartitionSet = Lists.newArrayList();
+    List<List<TPartitionKeyValue>> thriftPartitionSet = new ArrayList<>();
     for (FeFsPartition hdfsPartition : partitions_) {
-      List<TPartitionKeyValue> thriftPartitionSpec = Lists.newArrayList();
+      List<TPartitionKeyValue> thriftPartitionSpec = new ArrayList<>();
       for (int i = 0; i < table_.getNumClusteringCols(); ++i) {
         String key = table_.getColumns().get(i).getName();
         String value = PartitionKeyValue.getPartitionKeyValueString(
@@ -188,7 +188,7 @@ public class PartitionSet extends PartitionSpecBase {
 
   @Override
   public String toSql(ToSqlOptions options) {
-    List<String> partitionExprStr = Lists.newArrayList();
+    List<String> partitionExprStr = new ArrayList<>();
     for (Expr e : partitionExprs_) {
       partitionExprStr.add(e.toSql(options));
     }
diff --git a/fe/src/main/java/org/apache/impala/analysis/PartitionSpec.java b/fe/src/main/java/org/apache/impala/analysis/PartitionSpec.java
index d113cf9..c6434d9 100644
--- a/fe/src/main/java/org/apache/impala/analysis/PartitionSpec.java
+++ b/fe/src/main/java/org/apache/impala/analysis/PartitionSpec.java
@@ -17,9 +17,11 @@
 
 package org.apache.impala.analysis;
 
-import com.google.common.base.Joiner;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.impala.catalog.Column;
@@ -28,10 +30,9 @@ import org.apache.impala.catalog.Type;
 import org.apache.impala.common.AnalysisException;
 import org.apache.impala.thrift.TPartitionKeyValue;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
+import com.google.common.base.Joiner;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
 
 /**
  * Represents a partition spec - a collection of partition key/values.
@@ -126,7 +127,7 @@ public class PartitionSpec extends PartitionSpecBase {
    * Returns the Thrift representation of this PartitionSpec.
    */
   public List<TPartitionKeyValue> toThrift() {
-    List<TPartitionKeyValue> thriftPartitionSpec = Lists.newArrayList();
+    List<TPartitionKeyValue> thriftPartitionSpec = new ArrayList<>();
     for (PartitionKeyValue kv: partitionSpec_) {
       String value = PartitionKeyValue.getPartitionKeyValueString(
           kv.getLiteralValue(),  getNullPartitionKeyValue());
@@ -137,7 +138,7 @@ public class PartitionSpec extends PartitionSpecBase {
 
   @Override
   public String toSql(ToSqlOptions options) {
-    List<String> partitionSpecStr = Lists.newArrayList();
+    List<String> partitionSpecStr = new ArrayList<>();
     for (PartitionKeyValue kv: partitionSpec_) {
       partitionSpecStr.add(kv.getColName() + "=" + kv.getValue().toSql(options));
     }
diff --git a/fe/src/main/java/org/apache/impala/analysis/Path.java b/fe/src/main/java/org/apache/impala/analysis/Path.java
index 3687fec..b3cd9e4 100644
--- a/fe/src/main/java/org/apache/impala/analysis/Path.java
+++ b/fe/src/main/java/org/apache/impala/analysis/Path.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.impala.catalog.ArrayType;
@@ -135,8 +136,8 @@ public class Path {
 
   // List of matched types and field positions set during resolution. The matched
   // types/positions describe the physical path through the schema tree.
-  private final List<Type> matchedTypes_ = Lists.newArrayList();
-  private final List<Integer> matchedPositions_ = Lists.newArrayList();
+  private final List<Type> matchedTypes_ = new ArrayList<>();
+  private final List<Integer> matchedPositions_ = new ArrayList<>();
 
   // Remembers the indices into rawPath_ and matchedTypes_ of the first collection
   // matched during resolution.
@@ -282,7 +283,7 @@ public class Path {
    */
   public static List<TableName> getCandidateTables(List<String> path, String sessionDb) {
     Preconditions.checkArgument(path != null && !path.isEmpty());
-    List<TableName> result = Lists.newArrayList();
+    List<TableName> result = new ArrayList<>();
     int end = Math.min(2, path.size());
     for (int tblNameIdx = 0; tblNameIdx < end; ++tblNameIdx) {
       String dbName = (tblNameIdx == 0) ? sessionDb : path.get(0);
@@ -391,7 +392,7 @@ public class Path {
    * mydb.test.a.item.f1
    */
   public List<String> getCanonicalPath() {
-    List<String> result = Lists.newArrayList();
+    List<String> result = new ArrayList<>();
     getCanonicalPath(result);
     return result;
   }
@@ -429,7 +430,7 @@ public class Path {
   public List<Integer> getAbsolutePath() {
     if (absolutePath_ != null) return absolutePath_;
     Preconditions.checkState(isResolved_);
-    absolutePath_ = Lists.newArrayList();
+    absolutePath_ = new ArrayList<>();
     if (rootDesc_ != null) absolutePath_.addAll(rootDesc_.getPath().getAbsolutePath());
     absolutePath_.addAll(matchedPositions_);
     return absolutePath_;
diff --git a/fe/src/main/java/org/apache/impala/analysis/PlanHint.java b/fe/src/main/java/org/apache/impala/analysis/PlanHint.java
index 595bc47..f549316 100644
--- a/fe/src/main/java/org/apache/impala/analysis/PlanHint.java
+++ b/fe/src/main/java/org/apache/impala/analysis/PlanHint.java
@@ -18,10 +18,10 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
 
 /**
  * Class to parse and store query plan hints, which can occur in various places inside SQL
@@ -36,7 +36,7 @@ public class PlanHint {
 
   public PlanHint(String name) {
     name_ = name;
-    args_ = Lists.newArrayList();
+    args_ = new ArrayList<>();
   }
 
   public PlanHint(String name, List<String> args) {
diff --git a/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java b/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java
index 0837607..f6a26ca 100644
--- a/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java
+++ b/fe/src/main/java/org/apache/impala/analysis/PrivilegeSpec.java
@@ -17,6 +17,9 @@
 
 package org.apache.impala.analysis;
 
+import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
+
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.impala.authorization.Privilege;
@@ -32,9 +35,6 @@ import org.apache.impala.thrift.TPrivilegeScope;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-
-import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
 
 /**
  * Represents a privilege spec from a GRANT/REVOKE statement.
@@ -106,7 +106,7 @@ public class PrivilegeSpec extends StmtNode {
   }
 
   public List<TPrivilege> toThrift() {
-    List<TPrivilege> privileges = Lists.newArrayList();
+    List<TPrivilege> privileges = new ArrayList<>();
     if (scope_ == TPrivilegeScope.COLUMN) {
       // Create a TPrivilege for every referenced column
       for (String column: columnNames_) {
diff --git a/fe/src/main/java/org/apache/impala/analysis/SelectList.java b/fe/src/main/java/org/apache/impala/analysis/SelectList.java
index a38774a..3ac3830 100644
--- a/fe/src/main/java/org/apache/impala/analysis/SelectList.java
+++ b/fe/src/main/java/org/apache/impala/analysis/SelectList.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.impala.common.AnalysisException;
@@ -29,7 +30,7 @@ import com.google.common.collect.Lists;
  * Select list items plus optional distinct clause and optional plan hints.
  */
 public class SelectList {
-  private List<PlanHint> planHints_ = Lists.newArrayList();
+  private List<PlanHint> planHints_ = new ArrayList<>();
   private boolean isDistinct_;
 
   /////////////////////////////////////////
@@ -47,7 +48,7 @@ public class SelectList {
 
   public SelectList() {
     isDistinct_ = false;
-    items_ = Lists.newArrayList();
+    items_ = new ArrayList<>();
   }
 
   public SelectList(List<SelectListItem> items, boolean isDistinct,
@@ -62,7 +63,7 @@ public class SelectList {
    */
   public SelectList(SelectList other) {
     planHints_ = Lists.newArrayList(other.planHints_);
-    items_ = Lists.newArrayList();
+    items_ = new ArrayList<>();
     for (SelectListItem item: other.items_) {
       items_.add(item.clone());
     }
diff --git a/fe/src/main/java/org/apache/impala/analysis/SlotDescriptor.java b/fe/src/main/java/org/apache/impala/analysis/SlotDescriptor.java
index aae3efa..f203d70 100644
--- a/fe/src/main/java/org/apache/impala/analysis/SlotDescriptor.java
+++ b/fe/src/main/java/org/apache/impala/analysis/SlotDescriptor.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
@@ -49,7 +50,7 @@ public class SlotDescriptor {
 
   // Expr(s) materialized into this slot; multiple exprs for unions. Should be empty if
   // path_ is set.
-  private List<Expr> sourceExprs_ = Lists.newArrayList();
+  private List<Expr> sourceExprs_ = new ArrayList<>();
 
   // if false, this slot doesn't need to be materialized in parent tuple
   // (and physical layout parameters are invalid)
diff --git a/fe/src/main/java/org/apache/impala/analysis/SortInfo.java b/fe/src/main/java/org/apache/impala/analysis/SortInfo.java
index 8472725..1def7e0 100644
--- a/fe/src/main/java/org/apache/impala/analysis/SortInfo.java
+++ b/fe/src/main/java/org/apache/impala/analysis/SortInfo.java
@@ -17,7 +17,9 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.Collection;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
 
@@ -27,7 +29,6 @@ import org.apache.impala.planner.PlanNode;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicates;
 import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
 
 /**
  * Encapsulates all the information needed to compute ORDER BY
@@ -65,7 +66,7 @@ public class SortInfo {
     sortExprs_ = sortExprs;
     isAscOrder_ = isAscOrder;
     nullsFirstParams_ = nullsFirstParams;
-    materializedExprs_ = Lists.newArrayList();
+    materializedExprs_ = new ArrayList<>();
     outputSmap_ = new ExprSubstitutionMap();
   }
 
@@ -94,7 +95,7 @@ public class SortInfo {
    */
   public List<Boolean> getNullsFirst() {
     Preconditions.checkState(sortExprs_.size() == nullsFirstParams_.size());
-    List<Boolean> nullsFirst = Lists.newArrayList();
+    List<Boolean> nullsFirst = new ArrayList<>();
     for (int i = 0; i < sortExprs_.size(); ++i) {
       nullsFirst.add(OrderByElement.nullsFirst(nullsFirstParams_.get(i),
           isAscOrder_.get(i)));
@@ -112,7 +113,7 @@ public class SortInfo {
     Preconditions.checkState(sortTupleDesc_.isMaterialized());
     analyzer.materializeSlots(sortExprs_);
     List<SlotDescriptor> sortTupleSlotDescs = sortTupleDesc_.getSlots();
-    List<Expr> materializedExprs = Lists.newArrayList();
+    List<Expr> materializedExprs = new ArrayList<>();
     for (int i = 0; i < sortTupleSlotDescs.size(); ++i) {
       if (sortTupleSlotDescs.get(i).isMaterialized()) {
         materializedExprs.add(materializedExprs_.get(i));
@@ -188,7 +189,7 @@ public class SortInfo {
 
     // Case 2: Materialize required input slots. Using a LinkedHashSet prevents the
     // slots getting reordered unnecessarily.
-    Set<SlotRef> inputSlotRefs = Sets.newLinkedHashSet();
+    Set<SlotRef> inputSlotRefs = new LinkedHashSet<>();
     IsInputSlotRefPred pred = new IsInputSlotRefPred(sortTupleDesc_.getId());
     TreeNode.collect(Expr.substituteList(resultExprs, outputSmap_, analyzer, false),
         pred, inputSlotRefs);
@@ -197,7 +198,7 @@ public class SortInfo {
     addMaterializedExprs(inputSlotRefs, analyzer);
 
     // Case 3: Materialize TupleIsNullPredicates.
-    List<Expr> tupleIsNullPreds = Lists.newArrayList();
+    List<Expr> tupleIsNullPreds = new ArrayList<>();
     TreeNode.collect(resultExprs, Predicates.instanceOf(TupleIsNullPredicate.class),
         tupleIsNullPreds);
     Expr.removeDuplicates(tupleIsNullPreds);
@@ -255,7 +256,7 @@ public class SortInfo {
    * - does not have a cost set
    */
   private List<Expr> getMaterializedSortExprs() {
-    List<Expr> result = Lists.newArrayList();
+    List<Expr> result = new ArrayList<>();
     for (Expr sortExpr : sortExprs_) {
       if (!sortExpr.hasCost()
           || sortExpr.getCost() > SORT_MATERIALIZATION_COST_THRESHOLD
diff --git a/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java b/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java
index b810e69..32902e3 100644
--- a/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java
+++ b/fe/src/main/java/org/apache/impala/analysis/StmtMetadataLoader.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -35,7 +36,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 
 /**
  * Loads all table and view metadata relevant for a single SQL statement and returns the
@@ -272,7 +272,7 @@ public class StmtMetadataLoader {
    */
   private Set<TableName> collectTableCandidates(StatementBase stmt) {
     Preconditions.checkNotNull(stmt);
-    List<TableRef> tblRefs = Lists.newArrayList();
+    List<TableRef> tblRefs = new ArrayList<>();
     stmt.collectTableRefs(tblRefs);
     Set<TableName> tableNames = new HashSet<>();
     for (TableRef ref: tblRefs) {
diff --git a/fe/src/main/java/org/apache/impala/analysis/TableDataLayout.java b/fe/src/main/java/org/apache/impala/analysis/TableDataLayout.java
index aef5732..251cc2f 100644
--- a/fe/src/main/java/org/apache/impala/analysis/TableDataLayout.java
+++ b/fe/src/main/java/org/apache/impala/analysis/TableDataLayout.java
@@ -17,8 +17,7 @@
 
 package org.apache.impala.analysis;
 
-import com.google.common.collect.Lists;
-
+import java.util.ArrayList;
 import java.util.List;
 
 /**
@@ -37,17 +36,16 @@ class TableDataLayout {
 
   static TableDataLayout createPartitionedLayout(List<ColumnDef> partitionColumnDefs) {
     return new TableDataLayout(partitionColumnDefs,
-        Lists.<KuduPartitionParam>newArrayList());
+        new ArrayList<>());
   }
 
   static TableDataLayout createKuduPartitionedLayout(
       List<KuduPartitionParam> partitionParams) {
-    return new TableDataLayout(Lists.<ColumnDef>newArrayList(), partitionParams);
+    return new TableDataLayout(new ArrayList<>(), partitionParams);
   }
 
   static TableDataLayout createEmptyLayout() {
-    return new TableDataLayout(Lists.<ColumnDef>newArrayList(),
-        Lists.<KuduPartitionParam>newArrayList());
+    return new TableDataLayout(new ArrayList<>(), new ArrayList<>());
   }
 
   List<ColumnDef> getPartitionColumnDefs() { return partitionColDefs_; }
diff --git a/fe/src/main/java/org/apache/impala/analysis/TableRef.java b/fe/src/main/java/org/apache/impala/analysis/TableRef.java
index 01d3c78..5a6e866 100644
--- a/fe/src/main/java/org/apache/impala/analysis/TableRef.java
+++ b/fe/src/main/java/org/apache/impala/analysis/TableRef.java
@@ -17,6 +17,9 @@
 
 package org.apache.impala.analysis;
 
+import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
+
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
@@ -34,8 +37,6 @@ import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
-import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
-
 /**
  * Superclass of all table references, including references to views, base tables
  * (Hdfs, HBase or DataSource tables), and nested collections. Contains the join
@@ -87,10 +88,10 @@ public class TableRef extends StmtNode {
   protected TableSampleClause sampleParams_;
 
   protected JoinOperator joinOp_;
-  protected List<PlanHint> joinHints_ = Lists.newArrayList();
+  protected List<PlanHint> joinHints_ = new ArrayList<>();
   protected List<String> usingColNames_;
 
-  protected List<PlanHint> tableHints_ = Lists.newArrayList();
+  protected List<PlanHint> tableHints_ = new ArrayList<>();
   protected TReplicaPreference replicaPreference_;
   protected boolean randomReplica_;
 
@@ -118,14 +119,14 @@ public class TableRef extends StmtNode {
   // we may alter the chain of table refs during plan generation, but we still rely
   // on the original list of ids for correct predicate assignment.
   // Populated in analyzeJoin().
-  protected List<TupleId> allTableRefIds_ = Lists.newArrayList();
-  protected List<TupleId> allMaterializedTupleIds_ = Lists.newArrayList();
+  protected List<TupleId> allTableRefIds_ = new ArrayList<>();
+  protected List<TupleId> allMaterializedTupleIds_ = new ArrayList<>();
 
   // All physical tuple ids that this table ref is correlated with:
   // Tuple ids of root descriptors from outer query blocks that this table ref
   // (if a CollectionTableRef) or contained CollectionTableRefs (if an InlineViewRef)
   // are rooted at. Populated during analysis.
-  protected List<TupleId> correlatedTupleIds_ = Lists.newArrayList();
+  protected List<TupleId> correlatedTupleIds_ = new ArrayList<>();
 
   // analysis output
   protected TupleDescriptor desc_;
@@ -554,7 +555,7 @@ public class TableRef extends StmtNode {
       // without violating outer join semantics.
       analyzer.registerOnClauseConjuncts(conjuncts, this);
       for (Expr e: conjuncts) {
-        List<TupleId> tupleIds = Lists.newArrayList();
+        List<TupleId> tupleIds = new ArrayList<>();
         e.getIds(tupleIds, null);
         onClauseTupleIds.addAll(tupleIds);
       }
diff --git a/fe/src/main/java/org/apache/impala/analysis/TupleIsNullPredicate.java b/fe/src/main/java/org/apache/impala/analysis/TupleIsNullPredicate.java
index 5932174..0605ff9 100644
--- a/fe/src/main/java/org/apache/impala/analysis/TupleIsNullPredicate.java
+++ b/fe/src/main/java/org/apache/impala/analysis/TupleIsNullPredicate.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 
@@ -25,6 +26,7 @@ import org.apache.impala.common.InternalException;
 import org.apache.impala.thrift.TExprNode;
 import org.apache.impala.thrift.TExprNodeType;
 import org.apache.impala.thrift.TTupleIsNullPredicate;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
@@ -137,7 +139,7 @@ public class TupleIsNullPredicate extends Predicate {
   public static Expr wrapExpr(Expr expr, List<TupleId> tids, Analyzer analyzer)
       throws InternalException {
     if (!requiresNullWrapping(expr, analyzer)) return expr;
-    List<Expr> params = Lists.newArrayList();
+    List<Expr> params = new ArrayList<>();
     params.add(new TupleIsNullPredicate(tids));
     params.add(new NullLiteral());
     params.add(expr);
diff --git a/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java b/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java
index 1b6c387..8438c53 100644
--- a/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/UnionStmt.java
@@ -17,6 +17,7 @@
 
 package org.apache.impala.analysis;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 
@@ -28,7 +29,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 
 /**
  * Representation of a union with its list of operands, and optional order by and limit.
@@ -135,12 +135,12 @@ public class UnionStmt extends QueryStmt {
 
   // filled during analyze(); contains all operands that need to go through
   // distinct aggregation
-  protected final List<UnionOperand> distinctOperands_ = Lists.newArrayList();
+  protected final List<UnionOperand> distinctOperands_ = new ArrayList<>();
 
   // filled during analyze(); contains all operands that can be aggregated with
   // a simple merge without duplicate elimination (also needs to merge the output
   // of the DISTINCT operands)
-  protected final List<UnionOperand> allOperands_ = Lists.newArrayList();
+  protected final List<UnionOperand> allOperands_ = new ArrayList<>();
 
   protected MultiAggregateInfo distinctAggInfo_; // only set if we have DISTINCT ops
 
@@ -155,7 +155,7 @@ public class UnionStmt extends QueryStmt {
 
   // List of output expressions produced by the union without the ORDER BY portion
   // (if any). Same as resultExprs_ if there is no ORDER BY.
-  private List<Expr> unionResultExprs_ = Lists.newArrayList();
+  private List<Expr> unionResultExprs_ = new ArrayList<>();
 
   // END: Members that need to be reset()
   /////////////////////////////////////////
@@ -174,7 +174,7 @@ public class UnionStmt extends QueryStmt {
   protected UnionStmt(UnionStmt other) {
     super(other.cloneOrderByElements(),
         (other.limitElement_ == null) ? null : other.limitElement_.clone());
-    operands_ = Lists.newArrayList();
+    operands_ = new ArrayList<>();
     if (analyzer_ != null) {
       for (UnionOperand o: other.distinctOperands_) distinctOperands_.add(o.clone());
       for (UnionOperand o: other.allOperands_) allOperands_.add(o.clone());
@@ -237,7 +237,7 @@ public class UnionStmt extends QueryStmt {
     }
 
     // Collect all result expr lists and cast the exprs as necessary.
-    List<List<Expr>> resultExprLists = Lists.newArrayList();
+    List<List<Expr>> resultExprLists = new ArrayList<>();
     for (UnionOperand op: operands_) {
       resultExprLists.add(op.getQueryStmt().getResultExprs());
     }
@@ -307,7 +307,7 @@ public class UnionStmt extends QueryStmt {
 
     // collect operands' result exprs
     List<SlotDescriptor> outputSlots = tupleDesc.getSlots();
-    List<Expr> exprs = Lists.newArrayList();
+    List<Expr> exprs = new ArrayList<>();
     for (int i = 0; i < outputSlots.size(); ++i) {
       SlotDescriptor slotDesc = outputSlots.get(i);
       if (!slotDesc.isMaterialized()) continue;
@@ -465,7 +465,7 @@ public class UnionStmt extends QueryStmt {
     List<Expr> firstSelectExprs = operands_.get(0).getQueryStmt().getResultExprs();
 
     // Compute column stats for the materialized slots from the source exprs.
-    List<ColumnStats> columnStats = Lists.newArrayList();
+    List<ColumnStats> columnStats = new ArrayList<>();
     for (int i = 0; i < operands_.size(); ++i) {
       List<Expr> selectExprs = operands_.get(i).getQueryStmt().getResultExprs();
       for (int j = 0; j < selectExprs.size(); ++j) {
diff --git a/fe/src/main/java/org/apache/impala/analysis/UpdateStmt.java b/fe/src/main/java/org/apache/impala/analysis/UpdateStmt.java
index dfee72f..28af740 100644
--- a/fe/src/main/java/org/apache/impala/analysis/UpdateStmt.java
+++ b/fe/src/main/java/org/apache/impala/analysis/UpdateStmt.java
@@ -19,6 +19,7 @@ package org.apache.impala.analysis;
 
 import static java.lang.String.format;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.impala.common.Pair;
@@ -27,7 +28,6 @@ import org.apache.impala.planner.TableSink;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Lists;
 
 /**
  * Representation of an Update statement.
@@ -54,7 +54,7 @@ public class UpdateStmt extends ModifyStmt {
 
   public UpdateStmt(UpdateStmt other) {
     super(other.targetTablePath_, other.fromClause_.clone(),
-        Lists.<Pair<SlotRef, Expr>>newArrayList(), other.wherePredicate_);
+        new ArrayList<>(), other.wherePredicate_);
   }
 
   /**
diff --git a/fe/src/main/java/org/apache/impala/analysis/WithClause.java b/fe/src/main/java/org/apache/impala/analysis/WithClause.java
index 90a13d4..9459ab3 100644
--- a/fe/src/main/java/org/apache/impala/analysis/WithClause.java
+++ b/fe/src/main/java/org/apache/impala/analysis/WithClause.java
@@ -17,6 +17,9 @@
 
 package org.apache.impala.analysis;
 
+import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
+
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.impala.authorization.PrivilegeRequest;
@@ -26,9 +29,6 @@ import org.apache.impala.common.AnalysisException;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-
-import static org.apache.impala.analysis.ToSqlOptions.DEFAULT;
 
 /**
  * Representation of the WITH clause that may appear before a query statement or insert
@@ -102,7 +102,7 @@ public class WithClause extends StmtNode {
    */
   private WithClause(WithClause other) {
     Preconditions.checkNotNull(other);
-    views_ = Lists.newArrayList();
+    views_ = new ArrayList<>();
     for (View view: other.views_) {
       views_.add(new View(view.getName(), view.getQueryStmt().clone(),
           view.getOriginalColLabels()));
@@ -123,7 +123,7 @@ public class WithClause extends StmtNode {
 
   @Override
   public String toSql(ToSqlOptions options) {
-    List<String> viewStrings = Lists.newArrayList();
+    List<String> viewStrings = new ArrayList<>();
     for (View view: views_) {
       // Enclose the view alias and explicit labels in quotes if Hive cannot parse it
       // without quotes. This is needed for view compatibility between Impala and Hive.
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java
index 9b059bb..18c663d 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeAuthStmtsTest.java
@@ -32,9 +32,9 @@ import org.junit.Test;
 public class AnalyzeAuthStmtsTest extends AnalyzerTest {
   public AnalyzeAuthStmtsTest() {
     catalog_.getAuthPolicy().addPrincipal(
-        new Role("myRole", new HashSet<String>()));
+        new Role("myRole", new HashSet<>()));
     catalog_.getAuthPolicy().addPrincipal(
-        new User("myUser", new HashSet<String>()));
+        new User("myUser", new HashSet<>()));
   }
 
   @Override
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
index 7ffa719..365e3e7 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeDDLTest.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.UUID;
@@ -916,7 +917,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
   public void TestAlterTableSetColumnStats() {
     // Contains entries of the form 'statsKey'='statsValue' for every
     // stats key. A dummy value is used for 'statsValue'.
-    List<String> testKeyValues = Lists.newArrayList();
+    List<String> testKeyValues = new ArrayList<>();
     for (ColumnStats.StatsKey statsKey: ColumnStats.StatsKey.values()) {
       testKeyValues.add(String.format("'%s'='10'", statsKey));
     }
@@ -1462,7 +1463,7 @@ public class AnalyzeDDLTest extends FrontendTestBase {
     Set<Column> actCols = parsedStmt.getValidatedColumnWhitelist();
     if (expColNames == null) assertTrue("Expected no whitelist.", actCols == null);
     assertTrue("Expected whitelist.", actCols != null);
-    Set<String> actColSet = Sets.newHashSet();
+    Set<String> actColSet = new HashSet<>();
     for (Column col: actCols) actColSet.add(col.getName());
     Set<String> expColSet = Sets.newHashSet(expColNames);
     assertEquals(actColSet, expColSet);
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java
index 40c2521..a0d34f8 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeExprsTest.java
@@ -217,7 +217,7 @@ public class AnalyzeExprsTest extends AnalyzerTest {
         "IS NOT DISTINCT FROM", "<", ">", ">=", "<=", "!=", "=", "<>"}) {
       // Operator can compare numeric values (literals, casts, and columns), even ones of
       // different types.
-      ArrayList<String> numericValues =
+      List<String> numericValues =
           new ArrayList<String>(Arrays.asList("0", "1", "1.1", "-7", "-7.7", "1.2e99",
               "false", "1234567890123456789012345678901234567890", "tinyint_col",
               "smallint_col", "int_col", "bigint_col", "float_col", "double_col"));
@@ -2058,8 +2058,8 @@ public class AnalyzeExprsTest extends AnalyzerTest {
    * as materialized and computes their mem layout.
    */
   private void makeExprExecutable(Expr e, Analyzer analyzer) {
-    List<TupleId> tids = Lists.newArrayList();
-    List<SlotId> sids = Lists.newArrayList();
+    List<TupleId> tids = new ArrayList<>();
+    List<SlotId> sids = new ArrayList<>();
     e.getIds(tids, sids);
     for (SlotId sid: sids) {
       SlotDescriptor slotDesc = analyzer.getDescTbl().getSlotDesc(sid);
@@ -2756,9 +2756,9 @@ public class AnalyzeExprsTest extends AnalyzerTest {
     queryOptions.setAppx_count_distinct(true);
 
     // Accumulates count(distinct) for all columns of alltypesTbl or decimalTbl.
-    List<String> countDistinctFns = Lists.newArrayList();
+    List<String> countDistinctFns = new ArrayList<>();
     // Accumulates count(distinct) for all columns of both alltypesTbl and decimalTbl.
-    List<String> allCountDistinctFns = Lists.newArrayList();
+    List<String> allCountDistinctFns = new ArrayList<>();
 
     Table alltypesTbl = catalog_.getOrLoadTable("functional", "alltypes");
     for (Column col: alltypesTbl.getColumns()) {
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java
index 3b87c89..0dae1e6 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzeStmtsTest.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.lang.reflect.Field;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 
@@ -405,7 +406,7 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
    */
   private void testStarPath(String sql, List<Integer>... expectedAbsPaths) {
     SelectStmt stmt = (SelectStmt) AnalyzesOk(sql);
-    List<List<Integer>> actualAbsPaths = Lists.newArrayList();
+    List<List<Integer>> actualAbsPaths = new ArrayList<>();
     for (int i = 0; i < stmt.getResultExprs().size(); ++i) {
       Expr e = stmt.getResultExprs().get(i);
       Preconditions.checkState(e instanceof SlotRef);
@@ -2209,7 +2210,7 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
 
     // Test multiple distinct aggregations.
     Table alltypesTbl = catalog_.getOrLoadTable("functional", "alltypes");
-    List<String> distinctFns = Lists.newArrayList();
+    List<String> distinctFns = new ArrayList<>();
     for (Column col : alltypesTbl.getColumns()) {
       distinctFns.add(String.format("count(distinct %s)", col.getName()));
     }
@@ -2257,7 +2258,7 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
     // Positive tests: Test all scalar types and valid sampling percents.
     double validSamplePercs[] = new double[] { 0.0, 0.1, 0.2, 0.5, 0.8, 1.0 };
     for (double perc: validSamplePercs) {
-      List<String> allAggFnCalls = Lists.newArrayList();
+      List<String> allAggFnCalls = new ArrayList<>();
       for (Column col: allScalarTypes.getColumns()) {
         String aggFnCall = String.format("sampled_ndv(%s, %s)", col.getName(), perc);
         allAggFnCalls.add(aggFnCall);
diff --git a/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java b/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java
index 8d2a4f2..4b78d76 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AnalyzerTest.java
@@ -41,7 +41,7 @@ public class AnalyzerTest extends FrontendTestBase {
 
   // maps from type to string that will result in literal of that type
   protected static Map<ScalarType, String> typeToLiteralValue_ =
-      new HashMap<ScalarType, String>();
+      new HashMap<>();
   static {
     typeToLiteralValue_.put(Type.BOOLEAN, "true");
     typeToLiteralValue_.put(Type.TINYINT, "1");
diff --git a/fe/src/test/java/org/apache/impala/analysis/AuthorizationStmtTest.java b/fe/src/test/java/org/apache/impala/analysis/AuthorizationStmtTest.java
index 24086cc..541310f 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AuthorizationStmtTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AuthorizationStmtTest.java
@@ -17,8 +17,16 @@
 
 package org.apache.impala.analysis;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Sets;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.impala.analysis.AnalysisContext.AnalysisResult;
 import org.apache.impala.authorization.AuthorizationConfig;
@@ -52,15 +60,8 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
 
 /**
  * This class contains authorization tests for SQL statements.
@@ -2599,7 +2600,7 @@ public class AuthorizationStmtTest extends FrontendTestBase {
     // IMPALA-6086: Make sure use of a permanent function requires SELECT (or higher)
     // privilege on the database, and expr rewrite/constant-folding preserves
     // privilege requests for functions.
-    ArrayList<Type> argTypes = new ArrayList<Type>();
+    List<Type> argTypes = new ArrayList<Type>();
     argTypes.add(Type.STRING);
     fn = addFunction("functional", "to_lower", argTypes, Type.STRING,
         "/test-warehouse/libTestUdf.so",
@@ -2700,7 +2701,7 @@ public class AuthorizationStmtTest extends FrontendTestBase {
     return "User '%s' does not have privileges to DROP functions in: " + object;
   }
 
-  private ScalarFunction addFunction(String db, String fnName, ArrayList<Type> argTypes,
+  private ScalarFunction addFunction(String db, String fnName, List<Type> argTypes,
       Type retType, String uriPath, String symbolName) {
     ScalarFunction fn = ScalarFunction.createForTesting(db, fnName, argTypes, retType,
         uriPath, symbolName, null, null, TFunctionBinaryType.NATIVE);
@@ -2709,7 +2710,7 @@ public class AuthorizationStmtTest extends FrontendTestBase {
   }
 
   private ScalarFunction addFunction(String db, String fnName) {
-    return addFunction(db, fnName, new ArrayList<Type>(), Type.INT, "/dummy",
+    return addFunction(db, fnName, new ArrayList<>(), Type.INT, "/dummy",
         "dummy.class");
   }
 
@@ -2735,7 +2736,7 @@ public class AuthorizationStmtTest extends FrontendTestBase {
   }
 
   private static TPrivilegeLevel[] allExcept(TPrivilegeLevel... excludedPrivLevels) {
-    HashSet<TPrivilegeLevel> excludedSet = Sets.newHashSet(excludedPrivLevels);
+    Set<TPrivilegeLevel> excludedSet = Sets.newHashSet(excludedPrivLevels);
     List<TPrivilegeLevel> privLevels = new ArrayList<>();
     for (TPrivilegeLevel level: TPrivilegeLevel.values()) {
       if (!excludedSet.contains(level)) {
@@ -3165,7 +3166,7 @@ public class AuthorizationStmtTest extends FrontendTestBase {
   private void verifyPrivilegeReqs(AnalysisContext ctx, String stmt,
       Set<String> expectedPrivilegeNames) throws ImpalaException {
     AnalysisResult analysisResult = parseAndAnalyze(stmt, ctx, frontend_);
-    Set<String> actualPrivilegeNames = Sets.newHashSet();
+    Set<String> actualPrivilegeNames = new HashSet<>();
     for (PrivilegeRequest privReq: analysisResult.getAnalyzer().getPrivilegeReqs()) {
       actualPrivilegeNames.add(privReq.getName());
     }
diff --git a/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java b/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
index 9de90d6..ccdc8c4 100644
--- a/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/AuthorizationTest.java
@@ -21,6 +21,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -128,7 +129,7 @@ public class AuthorizationTest extends FrontendTestBase {
   private static final List<TestContext> testCtxs_;
 
   @Parameters
-  public static Collection testVectors() { return testCtxs_; }
+  public static Collection<TestContext> testVectors() { return testCtxs_; }
 
   /**
    * Create test contexts used for parameterizing this test. We create these statically
@@ -137,7 +138,7 @@ public class AuthorizationTest extends FrontendTestBase {
    * relative to the work done in the tests.
    */
   static {
-    testCtxs_ = Lists.newArrayList();
+    testCtxs_ = new ArrayList<>();
     // Create and init file based auth config.
     AuthorizationConfig filePolicyAuthzConfig = createPolicyFileAuthzConfig();
     filePolicyAuthzConfig.validateConfig();
@@ -455,7 +456,7 @@ public class AuthorizationTest extends FrontendTestBase {
     sentryService.grantRoleToGroup(USER, roleName, USER.getName());
 
     // select (id, int_col, year) on functional.alltypessmall
-    List<TPrivilege> privileges = Lists.newArrayList();
+    List<TPrivilege> privileges = new ArrayList<>();
     for (String columnName: Arrays.asList("id", "int_col", "year")) {
       TPrivilege priv = new TPrivilege(TPrivilegeLevel.SELECT, TPrivilegeScope.COLUMN,
           false);
@@ -543,7 +544,7 @@ public class AuthorizationTest extends FrontendTestBase {
       sentryService.grantRolePrivilege(USER, roleName, privilege);
     }
 
-    List<TPrivilege> privileges = Lists.newArrayList();
+    List<TPrivilege> privileges = new ArrayList<>();
     for (int i = 0; i < 10; ++i) {
       TPrivilege priv = new TPrivilege(TPrivilegeLevel.SELECT, TPrivilegeScope.COLUMN,
           false);
diff --git a/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java b/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java
index 011a558..b91c526 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ExprRewriteRulesTest.java
@@ -393,7 +393,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   @Test
   public void testCaseWithExpr() throws ImpalaException {
     ExprRewriteRule rule = SimplifyConditionalsRule.INSTANCE;
-    List<ExprRewriteRule> rules = Lists.newArrayList();
+    List<ExprRewriteRule> rules = new ArrayList<>();
     rules.add(FoldConstantsRule.INSTANCE);
     rules.add(rule);
 
@@ -422,7 +422,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   @Test
   public void testCaseWithoutExpr() throws ImpalaException {
     ExprRewriteRule rule = SimplifyConditionalsRule.INSTANCE;
-    List<ExprRewriteRule> rules = Lists.newArrayList();
+    List<ExprRewriteRule> rules = new ArrayList<>();
     rules.add(FoldConstantsRule.INSTANCE);
     rules.add(rule);
 
@@ -455,7 +455,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   @Test
   public void testDecode() throws ImpalaException {
     ExprRewriteRule rule = SimplifyConditionalsRule.INSTANCE;
-    List<ExprRewriteRule> rules = Lists.newArrayList();
+    List<ExprRewriteRule> rules = new ArrayList<>();
     rules.add(FoldConstantsRule.INSTANCE);
     rules.add(rule);
 
@@ -505,7 +505,7 @@ public class ExprRewriteRulesTest extends FrontendTestBase {
   @Test
   public void testCoalesce() throws ImpalaException {
     ExprRewriteRule rule = SimplifyConditionalsRule.INSTANCE;
-    List<ExprRewriteRule> rules = Lists.newArrayList();
+    List<ExprRewriteRule> rules = new ArrayList<>();
     rules.add(FoldConstantsRule.INSTANCE);
     rules.add(rule);
 
diff --git a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
index 2810890..9d4e977 100644
--- a/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/ParserTest.java
@@ -249,7 +249,7 @@ public class ParserTest extends FrontendTestBase {
   private void TestJoinHints(String stmt, String... expectedHints) {
     SelectStmt selectStmt = (SelectStmt) ParsesOk(stmt);
     Preconditions.checkState(selectStmt.getTableRefs().size() > 1);
-    List<String> actualHints = Lists.newArrayList();
+    List<String> actualHints = new ArrayList<>();
     assertTrue(selectStmt.getTableRefs().get(0).getJoinHints().isEmpty());
     for (int i = 1; i < selectStmt.getTableRefs().size(); ++i) {
       List<PlanHint> hints = selectStmt.getTableRefs().get(i).getJoinHints();
@@ -262,7 +262,7 @@ public class ParserTest extends FrontendTestBase {
   private void TestTableHints(String stmt, String... expectedHints) {
     SelectStmt selectStmt = (SelectStmt) ParsesOk(stmt);
     Preconditions.checkState(selectStmt.getTableRefs().size() > 0);
-    List<String> actualHints = Lists.newArrayList();
+    List<String> actualHints = new ArrayList<>();
     for (int i = 0; i < selectStmt.getTableRefs().size(); ++i) {
       List<PlanHint> hints = selectStmt.getTableRefs().get(i).getTableHints();
       for (PlanHint hint: hints) actualHints.add(hint.toString());
@@ -274,7 +274,7 @@ public class ParserTest extends FrontendTestBase {
   private void TestTableAndJoinHints(String stmt, String... expectedHints) {
     SelectStmt selectStmt = (SelectStmt) ParsesOk(stmt);
     Preconditions.checkState(selectStmt.getTableRefs().size() > 0);
-    List<String> actualHints = Lists.newArrayList();
+    List<String> actualHints = new ArrayList<>();
     for (int i = 0; i < selectStmt.getTableRefs().size(); ++i) {
       List<PlanHint> joinHints = selectStmt.getTableRefs().get(i).getJoinHints();
       for (PlanHint hint: joinHints) actualHints.add(hint.toString());
@@ -291,7 +291,7 @@ public class ParserTest extends FrontendTestBase {
    */
   private void TestSelectListHints(String stmt, String... expectedHints) {
     SelectStmt selectStmt = (SelectStmt) ParsesOk(stmt);
-    List<String> actualHints = Lists.newArrayList();
+    List<String> actualHints = new ArrayList<>();
     List<PlanHint> hints = selectStmt.getSelectList().getPlanHints();
     for (PlanHint hint: hints) actualHints.add(hint.toString());
     if (actualHints.isEmpty()) actualHints = Lists.newArrayList((String) null);
diff --git a/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java b/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java
index 5f33683..1b8556e 100644
--- a/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java
+++ b/fe/src/test/java/org/apache/impala/analysis/StmtMetadataLoaderTest.java
@@ -23,7 +23,6 @@ import org.apache.impala.analysis.StmtMetadataLoader.StmtTableCache;
 import org.apache.impala.authorization.AuthorizationConfig;
 import org.apache.impala.catalog.Catalog;
 import org.apache.impala.catalog.FeTable;
-import org.apache.impala.catalog.Table;
 import org.apache.impala.common.ImpalaException;
 import org.apache.impala.common.InternalException;
 import org.apache.impala.service.Frontend;