You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/21 11:38:15 UTC

svn commit: r901644 [18/37] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/jav...

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Thu Jan 21 10:37:58 2010
@@ -18,119 +18,113 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import org.antlr.runtime.tree.*;
-
-import org.apache.hadoop.hive.ql.metadata.HiveUtils;
-
-import java.util.Map;
 import java.util.HashMap;
-import java.util.regex.Pattern;
+import java.util.Map;
 import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 
 /**
  * List of error messages thrown by the parser
  **/
 
 public enum ErrorMsg {
-  //SQLStates are taken from Section 12.5 of ISO-9075.
-  //See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt
-  //Most will just rollup to the generic syntax error state of 42000, but
-  //specific errors can override the that state.
-  //See this page for how MySQL uses SQLState codes:
-  //http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html
-
-  GENERIC_ERROR("Exception while processing"),
-  INVALID_TABLE("Table not found", "42S02"),
-  INVALID_COLUMN("Invalid Column Reference"),
-  INVALID_TABLE_OR_COLUMN("Invalid Table Alias or Column Reference"),
-  AMBIGUOUS_TABLE_OR_COLUMN("Ambiguous Table Alias or Column Reference"),
-  INVALID_PARTITION("Partition not found"),
-  AMBIGUOUS_COLUMN("Ambiguous Column Reference"),
-  AMBIGUOUS_TABLE_ALIAS("Ambiguous Table Alias"),
-  INVALID_TABLE_ALIAS("Invalid Table Alias"),
-  NO_TABLE_ALIAS("No Table Alias"),
-  INVALID_FUNCTION("Invalid Function"),
-  INVALID_FUNCTION_SIGNATURE("Function Argument Type Mismatch"),
-  INVALID_OPERATOR_SIGNATURE("Operator Argument Type Mismatch"),
-  INVALID_ARGUMENT("Wrong Arguments"),
-  INVALID_ARGUMENT_LENGTH("Arguments Length Mismatch", "21000"),
-  INVALID_ARGUMENT_TYPE("Argument Type Mismatch"),
-  INVALID_JOIN_CONDITION_1("Both Left and Right Aliases Encountered in Join"),
-  INVALID_JOIN_CONDITION_2("Neither Left nor Right Aliases Encountered in Join"),
-  INVALID_JOIN_CONDITION_3("OR not supported in Join currently"),
-  INVALID_TRANSFORM("TRANSFORM with Other Select Columns not Supported"),
-  DUPLICATE_GROUPBY_KEY("Repeated Key in Group By"),
-  UNSUPPORTED_MULTIPLE_DISTINCTS("DISTINCT on Different Columns not Supported"),
-  NO_SUBQUERY_ALIAS("No Alias For Subquery"),
-  NO_INSERT_INSUBQUERY("Cannot insert in a Subquery. Inserting to table "),
-  NON_KEY_EXPR_IN_GROUPBY("Expression Not In Group By Key"),
-  INVALID_XPATH("General . and [] Operators are Not Supported"),
-  INVALID_PATH("Invalid Path"),
-  ILLEGAL_PATH("Path is not legal"),
-  INVALID_NUMERICAL_CONSTANT("Invalid Numerical Constant"),
-  INVALID_ARRAYINDEX_CONSTANT("Non Constant Expressions for Array Indexes not Supported"),
-  INVALID_MAPINDEX_CONSTANT("Non Constant Expression for Map Indexes not Supported"),
-  INVALID_MAPINDEX_TYPE("Map Key Type does not Match Index Expression Type"),
-  NON_COLLECTION_TYPE("[] not Valid on Non Collection Types"),
-  SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"),
-  COLUMN_REPEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"),
-  DUPLICATE_COLUMN_NAMES("Duplicate column name:"),
-  INVALID_BUCKET_NUMBER("Bucket number should be bigger than zero"),
-  COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in cluster and sort by"),
-  SAMPLE_RESTRICTION("Cannot Sample on More Than Two Columns"),
-  SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"),
-  NO_PARTITION_PREDICATE("No Partition Predicate Found"),
-  INVALID_DOT(". operator is only supported on struct or list of struct types"),
-  INVALID_TBL_DDL_SERDE("Either list of columns or a custom serializer should be specified"),
-  TARGET_TABLE_COLUMN_MISMATCH("Cannot insert into target table because column number/types are different"),
-  TABLE_ALIAS_NOT_ALLOWED("Table Alias not Allowed in Sampling Clause"),
-  CLUSTERBY_DISTRIBUTEBY_CONFLICT("Cannot have both Cluster By and Distribute By Clauses"),
-  ORDERBY_DISTRIBUTEBY_CONFLICT("Cannot have both Order By and Distribute By Clauses"),
-  CLUSTERBY_SORTBY_CONFLICT("Cannot have both Cluster By and Sort By Clauses"),
-  ORDERBY_SORTBY_CONFLICT("Cannot have both Order By and Sort By Clauses"),
-  CLUSTERBY_ORDERBY_CONFLICT("Cannot have both Cluster By and Order By Clauses"),
-  NO_LIMIT_WITH_ORDERBY("In strict mode, limit must be specified if ORDER BY is present"),
-  NO_CARTESIAN_PRODUCT("In strict mode, cartesian product is not allowed. If you really want to perform the operation, set hive.mapred.mode=nonstrict"),
-  UNION_NOTIN_SUBQ("Top level Union is not supported currently; use a subquery for the union"),
-  INVALID_INPUT_FORMAT_TYPE("Input Format must implement InputFormat"),
-  INVALID_OUTPUT_FORMAT_TYPE("Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"),
-  NO_VALID_PARTN("The query does not reference any valid partition. To run this query, set hive.mapred.mode=nonstrict"),
-  NO_OUTER_MAPJOIN("Map Join cannot be performed with Outer join"),
-  INVALID_MAPJOIN_HINT("neither table specified as map-table"),
-  INVALID_MAPJOIN_TABLE("result of a union cannot be a map table"),
-  NON_BUCKETED_TABLE("Sampling Expression Needed for Non-Bucketed Table"),
-  BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR("Numberator should not be bigger than denaminator in sample clause for Table"),
-  NEED_PARTITION_ERROR("need to specify partition columns because the destination table is partitioned."),
-  CTAS_CTLT_COEXISTENCE("Create table command does not allow LIKE and AS-SELECT in the same command"),
-  LINES_TERMINATED_BY_NON_NEWLINE("LINES TERMINATED BY only supports newline '\\n' right now"),
-  CTAS_COLLST_COEXISTENCE("Create table as select command cannot specify the list of columns for the target table."),
-  CTLT_COLLST_COEXISTENCE("Create table like command cannot specify the list of columns for the target table."),
-  INVALID_SELECT_SCHEMA("Cannot derive schema from the select-clause."),
-  CTAS_PARCOL_COEXISTENCE("CREATE-TABLE-AS-SELECT does not support partitioning in the target table."),
-  CTAS_MULTI_LOADFILE("CREATE-TABLE-AS-SELECT results in multiple file load."),
-  CTAS_EXTTBL_COEXISTENCE("CREATE-TABLE-AS-SELECT cannot create external table."),
-  TABLE_ALREADY_EXISTS("Table already exists:", "42S02"),
-  COLUMN_ALIAS_ALREADY_EXISTS("Column alias already exists:", "42S02"),
-  UDTF_MULTIPLE_EXPR("Only a single expression in the SELECT clause is supported with UDTF's"),
-  UDTF_REQUIRE_AS("UDTF's require an AS clause"),
-  UDTF_NO_GROUP_BY("GROUP BY is not supported with a UDTF in the SELECT clause"),
-  UDTF_NO_SORT_BY("SORT BY is not supported with a UDTF in the SELECT clause"),
-  UDTF_NO_CLUSTER_BY("CLUSTER BY is not supported with a UDTF in the SELECT clause"),
-  UDTF_NO_DISTRIBUTE_BY("DISTRUBTE BY is not supported with a UDTF in the SELECT clause"),
-  UDTF_INVALID_LOCATION("UDTF's are not supported outside the SELECT clause, nor nested in expressions"),
-  UDTF_LATERAL_VIEW("UDTF's cannot be in a select expression when there is a lateral view"),
-  UDTF_ALIAS_MISMATCH("The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF"),
-  LATERAL_VIEW_WITH_JOIN("Join with a lateral view is not supported"),
-  LATERAL_VIEW_INVALID_CHILD("Lateral view AST with invalid child"),
-  OUTPUT_SPECIFIED_MULTIPLE_TIMES("The same output cannot be present multiple times: "),
-  INVALID_AS("AS clause has an invalid number of aliases"),
-  VIEW_COL_MISMATCH("The number of columns produced by the SELECT clause does not match the number of column names specified by CREATE VIEW"),
-  DML_AGAINST_VIEW("A view cannot be used as target table for LOAD or INSERT");
+  // SQLStates are taken from Section 12.5 of ISO-9075.
+  // See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt
+  // Most will just rollup to the generic syntax error state of 42000, but
+  // specific errors can override the that state.
+  // See this page for how MySQL uses SQLState codes:
+  // http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html
+
+  GENERIC_ERROR("Exception while processing"), INVALID_TABLE("Table not found",
+      "42S02"), INVALID_COLUMN("Invalid Column Reference"), INVALID_TABLE_OR_COLUMN(
+      "Invalid Table Alias or Column Reference"), AMBIGUOUS_TABLE_OR_COLUMN(
+      "Ambiguous Table Alias or Column Reference"), INVALID_PARTITION(
+      "Partition not found"), AMBIGUOUS_COLUMN("Ambiguous Column Reference"), AMBIGUOUS_TABLE_ALIAS(
+      "Ambiguous Table Alias"), INVALID_TABLE_ALIAS("Invalid Table Alias"), NO_TABLE_ALIAS(
+      "No Table Alias"), INVALID_FUNCTION("Invalid Function"), INVALID_FUNCTION_SIGNATURE(
+      "Function Argument Type Mismatch"), INVALID_OPERATOR_SIGNATURE(
+      "Operator Argument Type Mismatch"), INVALID_ARGUMENT("Wrong Arguments"), INVALID_ARGUMENT_LENGTH(
+      "Arguments Length Mismatch", "21000"), INVALID_ARGUMENT_TYPE(
+      "Argument Type Mismatch"), INVALID_JOIN_CONDITION_1(
+      "Both Left and Right Aliases Encountered in Join"), INVALID_JOIN_CONDITION_2(
+      "Neither Left nor Right Aliases Encountered in Join"), INVALID_JOIN_CONDITION_3(
+      "OR not supported in Join currently"), INVALID_TRANSFORM(
+      "TRANSFORM with Other Select Columns not Supported"), DUPLICATE_GROUPBY_KEY(
+      "Repeated Key in Group By"), UNSUPPORTED_MULTIPLE_DISTINCTS(
+      "DISTINCT on Different Columns not Supported"), NO_SUBQUERY_ALIAS(
+      "No Alias For Subquery"), NO_INSERT_INSUBQUERY(
+      "Cannot insert in a Subquery. Inserting to table "), NON_KEY_EXPR_IN_GROUPBY(
+      "Expression Not In Group By Key"), INVALID_XPATH(
+      "General . and [] Operators are Not Supported"), INVALID_PATH(
+      "Invalid Path"), ILLEGAL_PATH("Path is not legal"), INVALID_NUMERICAL_CONSTANT(
+      "Invalid Numerical Constant"), INVALID_ARRAYINDEX_CONSTANT(
+      "Non Constant Expressions for Array Indexes not Supported"), INVALID_MAPINDEX_CONSTANT(
+      "Non Constant Expression for Map Indexes not Supported"), INVALID_MAPINDEX_TYPE(
+      "Map Key Type does not Match Index Expression Type"), NON_COLLECTION_TYPE(
+      "[] not Valid on Non Collection Types"), SELECT_DISTINCT_WITH_GROUPBY(
+      "SELECT DISTINCT and GROUP BY can not be in the same query"), COLUMN_REPEATED_IN_PARTITIONING_COLS(
+      "Column repeated in partitioning columns"), DUPLICATE_COLUMN_NAMES(
+      "Duplicate column name:"), INVALID_BUCKET_NUMBER(
+      "Bucket number should be bigger than zero"), COLUMN_REPEATED_IN_CLUSTER_SORT(
+      "Same column cannot appear in cluster and sort by"), SAMPLE_RESTRICTION(
+      "Cannot Sample on More Than Two Columns"), SAMPLE_COLUMN_NOT_FOUND(
+      "Sample Column Not Found"), NO_PARTITION_PREDICATE(
+      "No Partition Predicate Found"), INVALID_DOT(
+      ". operator is only supported on struct or list of struct types"), INVALID_TBL_DDL_SERDE(
+      "Either list of columns or a custom serializer should be specified"), TARGET_TABLE_COLUMN_MISMATCH(
+      "Cannot insert into target table because column number/types are different"), TABLE_ALIAS_NOT_ALLOWED(
+      "Table Alias not Allowed in Sampling Clause"), CLUSTERBY_DISTRIBUTEBY_CONFLICT(
+      "Cannot have both Cluster By and Distribute By Clauses"), ORDERBY_DISTRIBUTEBY_CONFLICT(
+      "Cannot have both Order By and Distribute By Clauses"), CLUSTERBY_SORTBY_CONFLICT(
+      "Cannot have both Cluster By and Sort By Clauses"), ORDERBY_SORTBY_CONFLICT(
+      "Cannot have both Order By and Sort By Clauses"), CLUSTERBY_ORDERBY_CONFLICT(
+      "Cannot have both Cluster By and Order By Clauses"), NO_LIMIT_WITH_ORDERBY(
+      "In strict mode, limit must be specified if ORDER BY is present"), NO_CARTESIAN_PRODUCT(
+      "In strict mode, cartesian product is not allowed. If you really want to perform the operation, set hive.mapred.mode=nonstrict"), UNION_NOTIN_SUBQ(
+      "Top level Union is not supported currently; use a subquery for the union"), INVALID_INPUT_FORMAT_TYPE(
+      "Input Format must implement InputFormat"), INVALID_OUTPUT_FORMAT_TYPE(
+      "Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"), NO_VALID_PARTN(
+      "The query does not reference any valid partition. To run this query, set hive.mapred.mode=nonstrict"), NO_OUTER_MAPJOIN(
+      "Map Join cannot be performed with Outer join"), INVALID_MAPJOIN_HINT(
+      "neither table specified as map-table"), INVALID_MAPJOIN_TABLE(
+      "result of a union cannot be a map table"), NON_BUCKETED_TABLE(
+      "Sampling Expression Needed for Non-Bucketed Table"), BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR(
+      "Numberator should not be bigger than denaminator in sample clause for Table"), NEED_PARTITION_ERROR(
+      "need to specify partition columns because the destination table is partitioned."), CTAS_CTLT_COEXISTENCE(
+      "Create table command does not allow LIKE and AS-SELECT in the same command"), LINES_TERMINATED_BY_NON_NEWLINE(
+      "LINES TERMINATED BY only supports newline '\\n' right now"), CTAS_COLLST_COEXISTENCE(
+      "Create table as select command cannot specify the list of columns for the target table."), CTLT_COLLST_COEXISTENCE(
+      "Create table like command cannot specify the list of columns for the target table."), INVALID_SELECT_SCHEMA(
+      "Cannot derive schema from the select-clause."), CTAS_PARCOL_COEXISTENCE(
+      "CREATE-TABLE-AS-SELECT does not support partitioning in the target table."), CTAS_MULTI_LOADFILE(
+      "CREATE-TABLE-AS-SELECT results in multiple file load."), CTAS_EXTTBL_COEXISTENCE(
+      "CREATE-TABLE-AS-SELECT cannot create external table."), TABLE_ALREADY_EXISTS(
+      "Table already exists:", "42S02"), COLUMN_ALIAS_ALREADY_EXISTS(
+      "Column alias already exists:", "42S02"), UDTF_MULTIPLE_EXPR(
+      "Only a single expression in the SELECT clause is supported with UDTF's"), UDTF_REQUIRE_AS(
+      "UDTF's require an AS clause"), UDTF_NO_GROUP_BY(
+      "GROUP BY is not supported with a UDTF in the SELECT clause"), UDTF_NO_SORT_BY(
+      "SORT BY is not supported with a UDTF in the SELECT clause"), UDTF_NO_CLUSTER_BY(
+      "CLUSTER BY is not supported with a UDTF in the SELECT clause"), UDTF_NO_DISTRIBUTE_BY(
+      "DISTRUBTE BY is not supported with a UDTF in the SELECT clause"), UDTF_INVALID_LOCATION(
+      "UDTF's are not supported outside the SELECT clause, nor nested in expressions"), UDTF_LATERAL_VIEW(
+      "UDTF's cannot be in a select expression when there is a lateral view"), UDTF_ALIAS_MISMATCH(
+      "The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF"), LATERAL_VIEW_WITH_JOIN(
+      "Join with a lateral view is not supported"), LATERAL_VIEW_INVALID_CHILD(
+      "Lateral view AST with invalid child"), OUTPUT_SPECIFIED_MULTIPLE_TIMES(
+      "The same output cannot be present multiple times: "), INVALID_AS(
+      "AS clause has an invalid number of aliases"), VIEW_COL_MISMATCH(
+      "The number of columns produced by the SELECT clause does not match the number of column names specified by CREATE VIEW"), DML_AGAINST_VIEW(
+      "A view cannot be used as target table for LOAD or INSERT");
   private String mesg;
   private String SQLState;
 
   private static char SPACE = ' ';
-  private static Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*line [0-9]+:[0-9]+ (.*)");
+  private static Pattern ERROR_MESSAGE_PATTERN = Pattern
+      .compile(".*line [0-9]+:[0-9]+ (.*)");
   private static Map<String, ErrorMsg> mesgToErrorMsgMap = new HashMap<String, ErrorMsg>();
   private static int minMesgLength = -1;
 
@@ -139,20 +133,22 @@
       mesgToErrorMsgMap.put(errorMsg.getMsg().trim(), errorMsg);
 
       int length = errorMsg.getMsg().trim().length();
-      if (minMesgLength == -1 || length < minMesgLength)
+      if (minMesgLength == -1 || length < minMesgLength) {
         minMesgLength = length;
+      }
     }
   }
 
   /**
-   * For a given error message string, searches for a <code>ErrorMsg</code>
-   * enum that appears to be a match. If an match is found, returns the
+   * For a given error message string, searches for a <code>ErrorMsg</code> enum
+   * that appears to be a match. If an match is found, returns the
    * <code>SQLState</code> associated with the <code>ErrorMsg</code>. If a match
    * is not found or <code>ErrorMsg</code> has no <code>SQLState</code>, returns
    * the <code>SQLState</code> bound to the <code>GENERIC_ERROR</code>
    * <code>ErrorMsg</code>.
-   *
-   * @param mesg An error message string
+   * 
+   * @param mesg
+   *          An error message string
    * @return SQLState
    */
   public static String findSQLState(String mesg) {
@@ -161,33 +157,41 @@
       return GENERIC_ERROR.getSQLState();
     }
 
-    //first see if there is a direct match
+    // first see if there is a direct match
     ErrorMsg errorMsg = mesgToErrorMsgMap.get(mesg);
     if (errorMsg != null) {
-      if (errorMsg.getSQLState() != null)
+      if (errorMsg.getSQLState() != null) {
         return errorMsg.getSQLState();
-      else
+      } else {
         return GENERIC_ERROR.getSQLState();
+      }
     }
 
-    //if not see if the mesg follows type of format, which is typically the case:
-    //line 1:14 Table not found table_name
+    // if not see if the mesg follows type of format, which is typically the
+    // case:
+    // line 1:14 Table not found table_name
     String truncatedMesg = mesg.trim();
     Matcher match = ERROR_MESSAGE_PATTERN.matcher(mesg);
-    if (match.matches()) truncatedMesg = match.group(1);
+    if (match.matches()) {
+      truncatedMesg = match.group(1);
+    }
 
-    //appends might exist after the root message, so strip tokens off until we match
+    // appends might exist after the root message, so strip tokens off until we
+    // match
     while (truncatedMesg.length() > minMesgLength) {
       errorMsg = mesgToErrorMsgMap.get(truncatedMesg.trim());
       if (errorMsg != null) {
-        if (errorMsg.getSQLState() != null)
+        if (errorMsg.getSQLState() != null) {
           return errorMsg.getSQLState();
-        else
+        } else {
           return GENERIC_ERROR.getSQLState();
+        }
       }
 
       int lastSpace = truncatedMesg.lastIndexOf(SPACE);
-      if (lastSpace == -1) break;
+      if (lastSpace == -1) {
+        break;
+      }
 
       // hack off the last word and try again
       truncatedMesg = truncatedMesg.substring(0, lastSpace).trim();
@@ -197,7 +201,7 @@
   }
 
   ErrorMsg(String mesg) {
-    //42000 is the generic SQLState for syntax error.
+    // 42000 is the generic SQLState for syntax error.
     this(mesg, "42000");
   }
 
@@ -211,7 +215,7 @@
       return tree.getToken().getLine();
     }
 
-    return getLine((ASTNode)tree.getChild(0));
+    return getLine((ASTNode) tree.getChild(0));
   }
 
   private static int getCharPositionInLine(ASTNode tree) {
@@ -219,16 +223,17 @@
       return tree.getToken().getCharPositionInLine();
     }
 
-    return getCharPositionInLine((ASTNode)tree.getChild(0));
+    return getCharPositionInLine((ASTNode) tree.getChild(0));
   }
 
-  // Dirty hack as this will throw away spaces and other things - find a better way!
+  // Dirty hack as this will throw away spaces and other things - find a better
+  // way!
   private String getText(ASTNode tree) {
     if (tree.getChildCount() == 0) {
       return tree.getText();
     }
 
-    return getText((ASTNode)tree.getChild(tree.getChildCount() - 1));
+    return getText((ASTNode) tree.getChild(tree.getChildCount() - 1));
   }
 
   public String getMsg(ASTNode tree) {
@@ -269,7 +274,7 @@
   }
 
   String getMsg(Tree tree) {
-    return getMsg((ASTNode)tree);
+    return getMsg((ASTNode) tree);
   }
 
   String getMsg(ASTNode tree, String reason) {
@@ -277,7 +282,7 @@
   }
 
   String getMsg(Tree tree, String reason) {
-    return getMsg((ASTNode)tree, reason);
+    return getMsg((ASTNode) tree, reason);
   }
 
   public String getMsg(String reason) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Thu Jan 21 10:37:58 2010
@@ -24,44 +24,43 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.explainWork;
 
 public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
 
-  
   public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException {
     super(conf);
   }
 
+  @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
     ctx.setExplain(true);
 
     // Create a semantic analyzer for the query
-    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (ASTNode)ast.getChild(0));
-    sem.analyze((ASTNode)ast.getChild(0), ctx);
-    
+    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (ASTNode) ast
+        .getChild(0));
+    sem.analyze((ASTNode) ast.getChild(0), ctx);
+
     boolean extended = false;
     if (ast.getChildCount() > 1) {
       extended = true;
     }
-    
+
     ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
     List<Task<? extends Serializable>> tasks = sem.getRootTasks();
     Task<? extends Serializable> fetchTask = sem.getFetchTask();
     if (tasks == null) {
-    	if (fetchTask != null) {
-    		tasks = new ArrayList<Task<? extends Serializable>>();
-    		tasks.add(fetchTask);
-    	}
+      if (fetchTask != null) {
+        tasks = new ArrayList<Task<? extends Serializable>>();
+        tasks.add(fetchTask);
+      }
+    } else if (fetchTask != null) {
+      tasks.add(fetchTask);
     }
-    else if (fetchTask != null)
-    	tasks.add(fetchTask); 
-    		
+
     rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), tasks,
-                                                  ((ASTNode)ast.getChild(0)).toStringTree(),
-                                                  extended), this.conf));
+        ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf));
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Thu Jan 21 10:37:58 2010
@@ -21,39 +21,39 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.createFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.dropFunctionDesc;
 
 public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer {
-  private static final Log LOG =
-    LogFactory.getLog("hive.ql.parse.FunctionSemanticAnalyzer");
-  
+  private static final Log LOG = LogFactory
+      .getLog("hive.ql.parse.FunctionSemanticAnalyzer");
+
   public FunctionSemanticAnalyzer(HiveConf conf) throws SemanticException {
     super(conf);
   }
-  
+
+  @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
-    if (ast.getToken().getType() == HiveParser.TOK_CREATEFUNCTION)
+    if (ast.getToken().getType() == HiveParser.TOK_CREATEFUNCTION) {
       analyzeCreateFunction(ast);
-    if (ast.getToken().getType() == HiveParser.TOK_DROPFUNCTION)
+    }
+    if (ast.getToken().getType() == HiveParser.TOK_DROPFUNCTION) {
       analyzeDropFunction(ast);
+    }
 
     LOG.info("analyze done");
   }
-  
-  private void analyzeCreateFunction(ASTNode ast) 
-      throws SemanticException {
+
+  private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
     String className = unescapeSQLString(ast.getChild(1).getText());
     createFunctionDesc desc = new createFunctionDesc(functionName, className);
     rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
   }
-  
-  private void analyzeDropFunction(ASTNode ast) 
-      throws SemanticException {
+
+  private void analyzeDropFunction(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
     dropFunctionDesc desc = new dropFunctionDesc(functionName);
     rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java Thu Jan 21 10:37:58 2010
@@ -32,34 +32,40 @@
 
   /**
    * constructor of the walker - the dispatcher is passed
-   * @param disp the dispatcher to be called for each node visited
+   * 
+   * @param disp
+   *          the dispatcher to be called for each node visited
    */
   public GenMapRedWalker(Dispatcher disp) {
     super(disp);
   }
-  
+
   /**
    * Walk the given operator
-   * @param nd operator being walked
+   * 
+   * @param nd
+   *          operator being walked
    */
   @Override
   public void walk(Node nd) throws SemanticException {
     List<? extends Node> children = nd.getChildren();
-    
+
     // maintain the stack of operators encountered
     opStack.push(nd);
     dispatch(nd, opStack);
 
     // kids of reduce sink operator need not be traversed again
-    if ((children == null) ||
-        ((nd instanceof ReduceSinkOperator) && (getDispatchedList().containsAll(children)))) {
+    if ((children == null)
+        || ((nd instanceof ReduceSinkOperator) && (getDispatchedList()
+            .containsAll(children)))) {
       opStack.pop();
       return;
     }
 
     // move all the children to the front of queue
-    for (Node ch : children)
+    for (Node ch : children) {
       walk(ch);
+    }
 
     // done with this operator
     opStack.pop();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/InputSignature.java Thu Jan 21 10:37:58 2010
@@ -19,47 +19,48 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import java.util.ArrayList;
-import java.lang.Class;
-import java.lang.Object;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
- * The input signature of a function or operator. The signature basically consists
- * of name, list of parameter types.
- *
+ * The input signature of a function or operator. The signature basically
+ * consists of name, list of parameter types.
+ * 
  **/
 
 public class InputSignature {
-  private String name;
-  private ArrayList<TypeInfo> typeArray;
+  private final String name;
+  private final ArrayList<TypeInfo> typeArray;
 
   @SuppressWarnings("unused")
-  private static final Log LOG = LogFactory.getLog(InputSignature.class.getName());
+  private static final Log LOG = LogFactory.getLog(InputSignature.class
+      .getName());
 
   public InputSignature(String name) {
     this.name = name;
     typeArray = new ArrayList<TypeInfo>();
   }
 
-  public InputSignature(String name, TypeInfo ... classList) {
+  public InputSignature(String name, TypeInfo... classList) {
     this(name);
-    
+
     if (classList.length != 0) {
-      for(TypeInfo cl: classList) {
+      for (TypeInfo cl : classList) {
         typeArray.add(cl);
       }
     }
   }
 
-  public InputSignature(String name, Class<?> ... classList) {
+  public InputSignature(String name, Class<?>... classList) {
     this(name);
-    
+
     if (classList.length != 0) {
-      for(Class<?> cl: classList) {
-        typeArray.add(TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(cl));
+      for (Class<?> cl : classList) {
+        typeArray.add(TypeInfoFactory
+            .getPrimitiveTypeInfoFromPrimitiveWritable(cl));
       }
     }
   }
@@ -76,6 +77,7 @@
     return typeArray;
   }
 
+  @Override
   public boolean equals(Object obj) {
     if (obj == null) {
       return false;
@@ -83,9 +85,8 @@
 
     InputSignature other = null;
     try {
-      other = (InputSignature)obj;
-    }
-    catch (ClassCastException cce) {
+      other = (InputSignature) obj;
+    } catch (ClassCastException cce) {
       return false;
     }
 
@@ -93,16 +94,18 @@
         && (other.typeArray.equals(typeArray));
   }
 
+  @Override
   public int hashCode() {
     return toString().hashCode();
   }
 
+  @Override
   public String toString() {
     StringBuffer sb = new StringBuffer();
     sb.append(getName());
     sb.append("(");
     boolean isfirst = true;
-    for(TypeInfo cls: getTypeArray()) {
+    for (TypeInfo cls : getTypeArray()) {
       if (!isfirst) {
         sb.append(",");
       }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Thu Jan 21 10:37:58 2010
@@ -48,35 +48,39 @@
     super(conf);
   }
 
-  public static FileStatus [] matchFilesOrDir(FileSystem fs, Path path) throws IOException {
-    FileStatus [] srcs = fs.globStatus(path);
-    if((srcs != null) && srcs.length == 1) {
-      if(srcs[0].isDir()) {
+  public static FileStatus[] matchFilesOrDir(FileSystem fs, Path path)
+      throws IOException {
+    FileStatus[] srcs = fs.globStatus(path);
+    if ((srcs != null) && srcs.length == 1) {
+      if (srcs[0].isDir()) {
         srcs = fs.listStatus(srcs[0].getPath());
       }
     }
     return (srcs);
   }
 
-  private URI initializeFromURI(String fromPath) throws IOException, URISyntaxException {
+  private URI initializeFromURI(String fromPath) throws IOException,
+      URISyntaxException {
     URI fromURI = new Path(fromPath).toUri();
 
     String fromScheme = fromURI.getScheme();
     String fromAuthority = fromURI.getAuthority();
     String path = fromURI.getPath();
 
-    // generate absolute path relative to current directory or hdfs home directory
-    if(!path.startsWith("/")) {
-      if(isLocal) {
+    // generate absolute path relative to current directory or hdfs home
+    // directory
+    if (!path.startsWith("/")) {
+      if (isLocal) {
         path = new Path(System.getProperty("user.dir"), path).toString();
       } else {
-        path = new Path(new Path("/user/"+System.getProperty("user.name")), path).toString();
+        path = new Path(new Path("/user/" + System.getProperty("user.name")),
+            path).toString();
       }
     }
 
     // set correct scheme and authority
-    if(StringUtils.isEmpty(fromScheme)) {
-      if(isLocal) {
+    if (StringUtils.isEmpty(fromScheme)) {
+      if (isLocal) {
         // file for local
         fromScheme = "file";
       } else {
@@ -88,7 +92,7 @@
     }
 
     // if scheme is specified but not authority then use the default authority
-    if(fromScheme.equals("hdfs") && StringUtils.isEmpty(fromAuthority)) {
+    if (fromScheme.equals("hdfs") && StringUtils.isEmpty(fromAuthority)) {
       URI defaultURI = FileSystem.get(conf).getUri();
       fromAuthority = defaultURI.getAuthority();
     }
@@ -97,51 +101,53 @@
     return new URI(fromScheme, fromAuthority, path, null, null);
   }
 
-
-  private void applyConstraints(URI fromURI, URI toURI, Tree ast, boolean isLocal) throws SemanticException {
-    if(!fromURI.getScheme().equals("file") &&
-       !fromURI.getScheme().equals("hdfs")) {
-      throw new SemanticException (ErrorMsg.INVALID_PATH.getMsg(ast, "only \"file\" or \"hdfs\" file systems accepted"));
+  private void applyConstraints(URI fromURI, URI toURI, Tree ast,
+      boolean isLocal) throws SemanticException {
+    if (!fromURI.getScheme().equals("file")
+        && !fromURI.getScheme().equals("hdfs")) {
+      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
+          "only \"file\" or \"hdfs\" file systems accepted"));
     }
 
     // local mode implies that scheme should be "file"
     // we can change this going forward
-    if(isLocal && !fromURI.getScheme().equals("file")) {
-      throw new SemanticException (ErrorMsg.ILLEGAL_PATH.getMsg(ast, "Source file system should be \"file\" if \"local\" is specified"));
+    if (isLocal && !fromURI.getScheme().equals("file")) {
+      throw new SemanticException(ErrorMsg.ILLEGAL_PATH.getMsg(ast,
+          "Source file system should be \"file\" if \"local\" is specified"));
     }
 
     try {
-      FileStatus [] srcs = matchFilesOrDir(FileSystem.get(fromURI, conf),
-                                           new Path(fromURI.getScheme(),
-                                                    fromURI.getAuthority(),
-                                                    fromURI.getPath()));
-
-      if(srcs == null || srcs.length == 0) {
-        throw new SemanticException (ErrorMsg.INVALID_PATH.getMsg(ast, "No files matching path " + fromURI));
+      FileStatus[] srcs = matchFilesOrDir(FileSystem.get(fromURI, conf),
+          new Path(fromURI.getScheme(), fromURI.getAuthority(), fromURI
+              .getPath()));
+
+      if (srcs == null || srcs.length == 0) {
+        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
+            "No files matching path " + fromURI));
       }
 
-
-      for(FileStatus oneSrc: srcs) {
-        if(oneSrc.isDir()) {
-          throw new SemanticException
-            (ErrorMsg.INVALID_PATH.getMsg(ast,
-                                          "source contains directory: " + oneSrc.getPath().toString()));
+      for (FileStatus oneSrc : srcs) {
+        if (oneSrc.isDir()) {
+          throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
+              "source contains directory: " + oneSrc.getPath().toString()));
         }
       }
     } catch (IOException e) {
-      // Has to use full name to make sure it does not conflict with org.apache.commons.lang.StringUtils
-      throw new SemanticException (ErrorMsg.INVALID_PATH.getMsg(ast), e);
+      // Has to use full name to make sure it does not conflict with
+      // org.apache.commons.lang.StringUtils
+      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast), e);
     }
 
-
     // only in 'local' mode do we copy stuff from one place to another.
     // reject different scheme/authority in other cases.
-    if(!isLocal && (!StringUtils.equals(fromURI.getScheme(), toURI.getScheme()) ||
-                    !StringUtils.equals(fromURI.getAuthority(), toURI.getAuthority()))) {
-      String reason = "Move from: " + fromURI.toString() + " to: " + toURI.toString() + " is not valid. " +
-      		"Please check that values for params \"default.fs.name\" and " +
-      		"\"hive.metastore.warehouse.dir\" do not conflict.";
-      throw new SemanticException(ErrorMsg.ILLEGAL_PATH.getMsg(ast, reason)) ;
+    if (!isLocal
+        && (!StringUtils.equals(fromURI.getScheme(), toURI.getScheme()) || !StringUtils
+            .equals(fromURI.getAuthority(), toURI.getAuthority()))) {
+      String reason = "Move from: " + fromURI.toString() + " to: "
+          + toURI.toString() + " is not valid. "
+          + "Please check that values for params \"default.fs.name\" and "
+          + "\"hive.metastore.warehouse.dir\" do not conflict.";
+      throw new SemanticException(ErrorMsg.ILLEGAL_PATH.getMsg(ast, reason));
     }
   }
 
@@ -151,12 +157,12 @@
     Tree from_t = ast.getChild(0);
     Tree table_t = ast.getChild(1);
 
-    if(ast.getChildCount() == 4) {
+    if (ast.getChildCount() == 4) {
       isOverWrite = isLocal = true;
     }
 
-    if(ast.getChildCount() == 3) {
-      if(ast.getChild(2).getText().toLowerCase().equals("local")) {
+    if (ast.getChildCount() == 3) {
+      if (ast.getChild(2).getText().toLowerCase().equals("local")) {
         isLocal = true;
       } else {
         isOverWrite = true;
@@ -169,9 +175,11 @@
       String fromPath = stripQuotes(from_t.getText());
       fromURI = initializeFromURI(fromPath);
     } catch (IOException e) {
-      throw new SemanticException (ErrorMsg.INVALID_PATH.getMsg(from_t, e.getMessage()), e);
+      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(from_t, e
+          .getMessage()), e);
     } catch (URISyntaxException e) {
-      throw new SemanticException (ErrorMsg.INVALID_PATH.getMsg(from_t, e.getMessage()), e);
+      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(from_t, e
+          .getMessage()), e);
     }
 
     // initialize destination table/partition
@@ -180,7 +188,8 @@
     if (ts.tableHandle.isView()) {
       throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
     }
-    URI toURI = (ts.partHandle != null) ? ts.partHandle.getDataLocation() : ts.tableHandle.getDataLocation();
+    URI toURI = (ts.partHandle != null) ? ts.partHandle.getDataLocation()
+        : ts.tableHandle.getDataLocation();
 
     List<FieldSchema> parts = ts.tableHandle.getTTable().getPartitionKeys();
     if (isOverWrite && (parts != null && parts.size() > 0)
@@ -194,28 +203,32 @@
     Task<? extends Serializable> rTask = null;
 
     // create copy work
-    if(isLocal) {
-      // if the local keyword is specified - we will always make a copy. this might seem redundant in the case
-      // that the hive warehouse is also located in the local file system - but that's just a test case.
+    if (isLocal) {
+      // if the local keyword is specified - we will always make a copy. this
+      // might seem redundant in the case
+      // that the hive warehouse is also located in the local file system - but
+      // that's just a test case.
       String copyURIStr = ctx.getExternalTmpFileURI(toURI);
       URI copyURI = URI.create(copyURIStr);
-      rTask = TaskFactory.get(new copyWork(fromURI.toString(), copyURIStr), this.conf);
+      rTask = TaskFactory.get(new copyWork(fromURI.toString(), copyURIStr),
+          conf);
       fromURI = copyURI;
     }
 
     // create final load/move work
 
     String loadTmpPath = ctx.getExternalTmpFileURI(toURI);
-    loadTableDesc loadTableWork = new loadTableDesc(fromURI.toString(), loadTmpPath,
-                                        Utilities.getTableDesc(ts.tableHandle),
-                                        (ts.partSpec != null) ? ts.partSpec :
-                                        new HashMap<String, String> (),
-                                        isOverWrite);
-
-    if(rTask != null) {
-      rTask.addDependentTask(TaskFactory.get(new moveWork(getInputs(), getOutputs(), loadTableWork, null, true), this.conf));
+    loadTableDesc loadTableWork = new loadTableDesc(fromURI.toString(),
+        loadTmpPath, Utilities.getTableDesc(ts.tableHandle),
+        (ts.partSpec != null) ? ts.partSpec : new HashMap<String, String>(),
+        isOverWrite);
+
+    if (rTask != null) {
+      rTask.addDependentTask(TaskFactory.get(new moveWork(getInputs(),
+          getOutputs(), loadTableWork, null, true), conf));
     } else {
-      rTask = TaskFactory.get(new moveWork(getInputs(), getOutputs(), loadTableWork, null, true), this.conf);
+      rTask = TaskFactory.get(new moveWork(getInputs(), getOutputs(),
+          loadTableWork, null, true), conf);
     }
 
     rootTasks.add(rTask);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/OpParseContext.java Thu Jan 21 10:37:58 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-
 /**
  * Implementation of the Operator Parse Context. It maintains the parse context
  * that may be needed by an operator. Currently, it only maintains the row
@@ -26,13 +25,14 @@
  **/
 
 public class OpParseContext {
-  private RowResolver rr;  // row resolver for the operator
+  private RowResolver rr; // row resolver for the operator
 
-  public OpParseContext() {  
+  public OpParseContext() {
   }
-  
+
   /**
-   * @param rr row resolver
+   * @param rr
+   *          row resolver
    */
   public OpParseContext(RowResolver rr) {
     this.rr = rr;
@@ -46,7 +46,8 @@
   }
 
   /**
-   * @param rr the row resolver to set
+   * @param rr
+   *          the row resolver to set
    */
   public void setRR(RowResolver rr) {
     this.rr = rr;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Thu Jan 21 10:37:58 2010
@@ -25,18 +25,18 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.GroupByOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.loadFileDesc;
 import org.apache.hadoop.hive.ql.plan.loadTableDesc;
-import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
 import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
 
 /**
@@ -46,7 +46,7 @@
  * populated. Note that since the parse context contains the operator tree, it
  * can be easily retrieved by the next optimization step or finally for task
  * generation after the plan has been completely optimized.
- *
+ * 
  **/
 
 public class ParseContext {
@@ -66,13 +66,18 @@
   private HashMap<String, String> idToTableNameMap;
   private int destTableId;
   private UnionProcContext uCtx;
-  private List<MapJoinOperator> listMapJoinOpsNoReducer;  // list of map join operators with no reducer
+  private List<MapJoinOperator> listMapJoinOpsNoReducer; // list of map join
+                                                         // operators with no
+                                                         // reducer
   private Map<GroupByOperator, Set<String>> groupOpToInputTables;
   private Map<String, PrunedPartitionList> prunedPartitions;
 
-  // is set to true if the expression only contains partitioning columns and not any other column reference.
-  // This is used to optimize select * from table where ... scenario, when the where condition only references
-  // partitioning columns - the partitions are identified and streamed directly to the client without requiring
+  // is set to true if the expression only contains partitioning columns and not
+  // any other column reference.
+  // This is used to optimize select * from table where ... scenario, when the
+  // where condition only references
+  // partitioning columns - the partitions are identified and streamed directly
+  // to the client without requiring
   // a map-reduce job
   private boolean hasNonPartCols;
 
@@ -93,21 +98,28 @@
    * @param opParseCtx
    *          operator parse context - contains a mapping from operator to
    *          operator parse state (row resolver etc.)
-   * @param joinContext context needed join processing (map join specifically)
-   * @param topToTable the top tables being processed
+   * @param joinContext
+   *          context needed join processing (map join specifically)
+   * @param topToTable
+   *          the top tables being processed
    * @param loadTableWork
    *          list of destination tables being loaded
    * @param loadFileWork
    *          list of destination files being loaded
-   * @param ctx parse context
+   * @param ctx
+   *          parse context
    * @param idToTableNameMap
    * @param destTableId
    * @param uCtx
    * @param listMapJoinOpsNoReducer
    *          list of map join operators with no reducer
-   * @param opToSamplePruner operator to sample pruner map
+   * @param opToSamplePruner
+   *          operator to sample pruner map
    */
-  public ParseContext(HiveConf conf, QB qb, ASTNode ast,
+  public ParseContext(
+      HiveConf conf,
+      QB qb,
+      ASTNode ast,
       HashMap<TableScanOperator, exprNodeDesc> opToPartPruner,
       HashMap<String, Operator<? extends Serializable>> topOps,
       HashMap<String, Operator<? extends Serializable>> topSelOps,
@@ -115,8 +127,8 @@
       Map<JoinOperator, QBJoinTree> joinContext,
       HashMap<TableScanOperator, Table> topToTable,
       List<loadTableDesc> loadTableWork, List<loadFileDesc> loadFileWork,
-      Context ctx, HashMap<String, String> idToTableNameMap, int destTableId, UnionProcContext uCtx,
-      List<MapJoinOperator> listMapJoinOpsNoReducer,
+      Context ctx, HashMap<String, String> idToTableNameMap, int destTableId,
+      UnionProcContext uCtx, List<MapJoinOperator> listMapJoinOpsNoReducer,
       Map<GroupByOperator, Set<String>> groupOpToInputTables,
       Map<String, PrunedPartitionList> prunedPartitions,
       HashMap<TableScanOperator, sampleDesc> opToSamplePruner) {
@@ -136,7 +148,7 @@
     this.destTableId = destTableId;
     this.uCtx = uCtx;
     this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer;
-    this.hasNonPartCols = false;
+    hasNonPartCols = false;
     this.groupOpToInputTables = new HashMap<GroupByOperator, Set<String>>();
     this.groupOpToInputTables = groupOpToInputTables;
     this.prunedPartitions = prunedPartitions;
@@ -214,7 +226,8 @@
    * @param opToPartPruner
    *          the opToPartPruner to set
    */
-  public void setOpToPartPruner(HashMap<TableScanOperator, exprNodeDesc> opToPartPruner) {
+  public void setOpToPartPruner(
+      HashMap<TableScanOperator, exprNodeDesc> opToPartPruner) {
     this.opToPartPruner = opToPartPruner;
   }
 
@@ -342,7 +355,8 @@
   }
 
   /**
-   * @param joinContext the joinContext to set
+   * @param joinContext
+   *          the joinContext to set
    */
   public void setJoinContext(Map<JoinOperator, QBJoinTree> joinContext) {
     this.joinContext = joinContext;
@@ -356,7 +370,8 @@
   }
 
   /**
-   * @param listMapJoinOpsNoReducer the listMapJoinOpsNoReducer to set
+   * @param listMapJoinOpsNoReducer
+   *          the listMapJoinOpsNoReducer to set
    */
   public void setListMapJoinOpsNoReducer(
       List<MapJoinOperator> listMapJoinOpsNoReducer) {
@@ -365,17 +380,18 @@
 
   /**
    * Sets the hasNonPartCols flag
+   * 
    * @param val
    */
   public void setHasNonPartCols(boolean val) {
-    this.hasNonPartCols = val;
+    hasNonPartCols = val;
   }
 
   /**
    * Gets the value of the hasNonPartCols flag
    */
   public boolean getHasNonPartCols() {
-    return this.hasNonPartCols;
+    return hasNonPartCols;
   }
 
   /**
@@ -389,7 +405,8 @@
    * @param opToSamplePruner
    *          the opToSamplePruner to set
    */
-  public void setOpToSamplePruner(HashMap<TableScanOperator, sampleDesc> opToSamplePruner) {
+  public void setOpToSamplePruner(
+      HashMap<TableScanOperator, sampleDesc> opToSamplePruner) {
     this.opToSamplePruner = opToSamplePruner;
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Thu Jan 21 10:37:58 2010
@@ -21,16 +21,24 @@
 import java.util.ArrayList;
 import java.util.HashMap;
 
-import org.antlr.runtime.*;
-import org.antlr.runtime.tree.*;
-
+import org.antlr.runtime.ANTLRStringStream;
+import org.antlr.runtime.BitSet;
+import org.antlr.runtime.CharStream;
+import org.antlr.runtime.IntStream;
+import org.antlr.runtime.MismatchedTokenException;
+import org.antlr.runtime.NoViableAltException;
+import org.antlr.runtime.RecognitionException;
+import org.antlr.runtime.Token;
+import org.antlr.runtime.TokenRewriteStream;
+import org.antlr.runtime.TokenStream;
+import org.antlr.runtime.tree.CommonTreeAdaptor;
+import org.antlr.runtime.tree.TreeAdaptor;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.hive.ql.Context;
 
 public class ParseDriver {
-    
+
   static final private Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver");
 
   private static HashMap<String, String> xlateMap;
@@ -148,25 +156,25 @@
     xlateMap.put("KW_PROPERTIES", "TBLPROPERTIES");
     xlateMap.put("KW_VALUE_TYPE", "$VALUE$");
     xlateMap.put("KW_ELEM_TYPE", "$ELEM$");
-    
+
     // Operators
     xlateMap.put("DOT", ".");
     xlateMap.put("COLON", ":");
     xlateMap.put("COMMA", ",");
     xlateMap.put("SEMICOLON", ");");
-    
+
     xlateMap.put("LPAREN", "(");
     xlateMap.put("RPAREN", ")");
     xlateMap.put("LSQUARE", "[");
     xlateMap.put("RSQUARE", "]");
-    
+
     xlateMap.put("EQUAL", "=");
     xlateMap.put("NOTEQUAL", "<>");
     xlateMap.put("LESSTHANOREQUALTO", "<=");
     xlateMap.put("LESSTHAN", "<");
     xlateMap.put("GREATERTHANOREQUALTO", ">=");
     xlateMap.put("GREATERTHAN", ">");
-    
+
     xlateMap.put("DIVIDE", "/");
     xlateMap.put("PLUS", "+");
     xlateMap.put("MINUS", "-");
@@ -180,49 +188,58 @@
   }
 
   private static String xlate(String name) {
-    
+
     String ret = xlateMap.get(name);
     if (ret == null) {
       ret = name;
     }
-    
+
     return ret;
   }
 
-  // This class provides and implementation for a case insensitive token checker for
-  // the lexical analysis part of antlr. By converting the token stream into upper case
-  // at the time when lexical rules are checked, this class ensures that the lexical rules
-  // need to just match the token with upper case letters as opposed to combination of upper
-  // case and lower case characteres. This is purely used for matching lexical rules. The 
-  // actual token text is stored in the same way as the user input without actually converting
-  // it into an upper case. The token values are generated by the consume() function of the
-  // super class ANTLRStringStream. The LA() function is the lookahead funtion and is purely
-  // used for matching lexical rules. This also means that the grammar will only accept
-  // capitalized tokens in case it is run from other tools like antlrworks which do not
+  // This class provides and implementation for a case insensitive token checker
+  // for
+  // the lexical analysis part of antlr. By converting the token stream into
+  // upper case
+  // at the time when lexical rules are checked, this class ensures that the
+  // lexical rules
+  // need to just match the token with upper case letters as opposed to
+  // combination of upper
+  // case and lower case characteres. This is purely used for matching lexical
+  // rules. The
+  // actual token text is stored in the same way as the user input without
+  // actually converting
+  // it into an upper case. The token values are generated by the consume()
+  // function of the
+  // super class ANTLRStringStream. The LA() function is the lookahead funtion
+  // and is purely
+  // used for matching lexical rules. This also means that the grammar will only
+  // accept
+  // capitalized tokens in case it is run from other tools like antlrworks which
+  // do not
   // have the ANTLRNoCaseStringStream implementation.
-  public class ANTLRNoCaseStringStream  extends ANTLRStringStream {
+  public class ANTLRNoCaseStringStream extends ANTLRStringStream {
 
     public ANTLRNoCaseStringStream(String input) {
       super(input);
     }
-    
+
     public int LA(int i) {
 
       int returnChar = super.LA(i);
-      if(returnChar == CharStream.EOF) {
-        return returnChar; 
-      }
-      else if(returnChar == 0) {
+      if (returnChar == CharStream.EOF) {
+        return returnChar;
+      } else if (returnChar == 0) {
         return returnChar;
       }
-    
-      return Character.toUpperCase((char)returnChar);
+
+      return Character.toUpperCase((char) returnChar);
     }
   }
 
   public class HiveLexerX extends HiveLexer {
 
-    private ArrayList<ParseError> errors;
+    private final ArrayList<ParseError> errors;
 
     public HiveLexerX() {
       super();
@@ -235,7 +252,7 @@
     }
 
     public void displayRecognitionError(String[] tokenNames,
-                                        RecognitionException e) {
+        RecognitionException e) {
 
       errors.add(new ParseError(this, e, tokenNames));
     }
@@ -245,13 +262,13 @@
 
       if (e instanceof NoViableAltException) {
         @SuppressWarnings("unused")
-    NoViableAltException nvae = (NoViableAltException)e;
-        // for development, can add "decision=<<"+nvae.grammarDecisionDescription+">>"
+        NoViableAltException nvae = (NoViableAltException) e;
+        // for development, can add
+        // "decision=<<"+nvae.grammarDecisionDescription+">>"
         // and "(decision="+nvae.decisionNumber+") and
         // "state "+nvae.stateNumber
         msg = "character " + getCharErrorDisplay(e.c) + " not supported here";
-      }
-      else {
+      } else {
         msg = super.getErrorMessage(e, tokenNames);
       }
 
@@ -266,29 +283,26 @@
 
   public class HiveParserX extends HiveParser {
 
-    private ArrayList<ParseError> errors;
+    private final ArrayList<ParseError> errors;
 
     public HiveParserX(TokenStream input) {
       super(input);
       errors = new ArrayList<ParseError>();
     }
 
-    protected void mismatch(IntStream input, int ttype, BitSet follow) 
-      throws RecognitionException {
+    protected void mismatch(IntStream input, int ttype, BitSet follow)
+        throws RecognitionException {
 
       throw new MismatchedTokenException(ttype, input);
     }
 
     public void recoverFromMismatchedSet(IntStream input,
-                                         RecognitionException re,
-                                         BitSet follow)
-      throws RecognitionException
-    {
+        RecognitionException re, BitSet follow) throws RecognitionException {
       throw re;
     }
 
     public void displayRecognitionError(String[] tokenNames,
-                                        RecognitionException e) {
+        RecognitionException e) {
 
       errors.add(new ParseError(this, e, tokenNames));
     }
@@ -298,25 +312,25 @@
 
       // Transalate the token names to something that the user can understand
       String[] xlateNames = new String[tokenNames.length];
-      for(int i=0; i<tokenNames.length; ++i) {
+      for (int i = 0; i < tokenNames.length; ++i) {
         xlateNames[i] = ParseDriver.xlate(tokenNames[i]);
       }
 
       if (e instanceof NoViableAltException) {
         @SuppressWarnings("unused")
-    NoViableAltException nvae = (NoViableAltException)e;
-        // for development, can add "decision=<<"+nvae.grammarDecisionDescription+">>"
+        NoViableAltException nvae = (NoViableAltException) e;
+        // for development, can add
+        // "decision=<<"+nvae.grammarDecisionDescription+">>"
         // and "(decision="+nvae.decisionNumber+") and
         // "state "+nvae.stateNumber
         msg = "cannot recognize input " + getTokenErrorDisplay(e.token);
-      }
-      else {
+      } else {
         msg = super.getErrorMessage(e, xlateNames);
       }
 
-        if(msgs.size() > 0) {
-          msg = msg + " in " + msgs.peek();
-        }
+      if (msgs.size() > 0) {
+        msg = msg + " in " + msgs.peek();
+      }
       return msg;
     }
 
@@ -333,10 +347,11 @@
    */
   static final TreeAdaptor adaptor = new CommonTreeAdaptor() {
     /**
-     * Creates an ASTNode for the given token. The ASTNode is a wrapper around antlr's
-     * CommonTree class that implements the Node interface.
+     * Creates an ASTNode for the given token. The ASTNode is a wrapper around
+     * antlr's CommonTree class that implements the Node interface.
      * 
-     * @param payload The token.
+     * @param payload
+     *          The token.
      * @return Object (which is actually an ASTNode) for the token.
      */
     @Override
@@ -344,26 +359,28 @@
       return new ASTNode(payload);
     }
   };
-  
+
   public ASTNode parse(String command) throws ParseException {
     return parse(command, null);
   }
 
   /**
-   * Parses a command, optionally assigning the parser's token stream to
-   * the given context.
-   *
-   * @param command command to parse
-   *
-   * @param ctx context with which to associate this parser's
-   * token stream, or null if either no context is available
-   * or the context already has an existing stream
-   *
+   * Parses a command, optionally assigning the parser's token stream to the
+   * given context.
+   * 
+   * @param command
+   *          command to parse
+   * 
+   * @param ctx
+   *          context with which to associate this parser's token stream, or
+   *          null if either no context is available or the context already has
+   *          an existing stream
+   * 
    * @return parsed AST
    */
   public ASTNode parse(String command, Context ctx) throws ParseException {
     LOG.info("Parsing command: " + command);
-      
+
     HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
     TokenRewriteStream tokens = new TokenRewriteStream(lexer);
     if (ctx != null) {
@@ -378,17 +395,14 @@
       throw new ParseException(parser.getErrors());
     }
 
-    if (lexer.getErrors().size() == 0 &&
-        parser.getErrors().size() == 0) {
+    if (lexer.getErrors().size() == 0 && parser.getErrors().size() == 0) {
       LOG.info("Parse Completed");
-    }
-    else if (lexer.getErrors().size() != 0) {
+    } else if (lexer.getErrors().size() != 0) {
       throw new ParseException(lexer.getErrors());
     } else {
       throw new ParseException(parser.getErrors());
     }
-      
-    return (ASTNode)r.getTree();
+
+    return (ASTNode) r.getTree();
   }
 }
-

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseError.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseError.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseError.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseError.java Thu Jan 21 10:37:58 2010
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import org.antlr.runtime.*;
+import org.antlr.runtime.BaseRecognizer;
+import org.antlr.runtime.RecognitionException;
 
 /*
  * SemanticException.java
@@ -33,16 +34,16 @@
  *
  */
 public class ParseError {
-  private BaseRecognizer br;
-  private RecognitionException re;
-  private String[] tokenNames;
-  
+  private final BaseRecognizer br;
+  private final RecognitionException re;
+  private final String[] tokenNames;
+
   ParseError(BaseRecognizer br, RecognitionException re, String[] tokenNames) {
     this.br = br;
     this.re = re;
     this.tokenNames = tokenNames;
-    }
-  
+  }
+
   BaseRecognizer getBaseRecognizer() {
     return br;
   }
@@ -50,7 +51,7 @@
   RecognitionException getRecognitionException() {
     return re;
   }
-  
+
   String[] getTokenNames() {
     return tokenNames;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java Thu Jan 21 10:37:58 2010
@@ -30,15 +30,16 @@
     this.errors = errors;
   }
 
+  @Override
   public String getMessage() {
 
     StringBuilder sb = new StringBuilder();
-    for(ParseError err: errors) {
+    for (ParseError err : errors) {
       sb.append(err.getMessage());
       sb.append("\n");
     }
 
     return sb.toString();
   }
-    
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Thu Jan 21 10:37:58 2010
@@ -20,39 +20,41 @@
 
 /**
  * Library of utility functions used in the parse code
- *
+ * 
  */
 public class ParseUtils {
-	
+
   /**
    * Tests whether the parse tree node is a join token
    * 
-   * @param node The parse tree node
+   * @param node
+   *          The parse tree node
    * @return boolean
    */
   public static boolean isJoinToken(ASTNode node) {
     if ((node.getToken().getType() == HiveParser.TOK_JOIN)
-      || (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
-      || (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
-      || (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN))
+        || (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
+        || (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
+        || (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN)) {
       return true;
+    }
 
     return false;
   }
 
   /**
-   * Performs a descent of the leftmost branch of a tree, stopping
-   * when either a node with a non-null token is found or the leaf
-   * level is encountered.
-   *
-   * @param tree candidate node from which to start searching
-   *
+   * Performs a descent of the leftmost branch of a tree, stopping when either a
+   * node with a non-null token is found or the leaf level is encountered.
+   * 
+   * @param tree
+   *          candidate node from which to start searching
+   * 
    * @return node at which descent stopped
    */
   public static ASTNode findRootNonNullToken(ASTNode tree) {
-      while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
-        tree = (ASTNode) tree.getChild(0);
-      }
-      return tree;
+    while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
+      tree = (ASTNode) tree.getChild(0);
+    }
+    return tree;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java Thu Jan 21 10:37:58 2010
@@ -29,26 +29,27 @@
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 
 public class PrintOpTreeProcessor implements NodeProcessor {
-  
-  private PrintStream out;
-  private HashMap<Operator<? extends Serializable>, Integer> opMap = new HashMap<Operator<? extends Serializable>, Integer>();
+
+  private final PrintStream out;
+  private final HashMap<Operator<? extends Serializable>, Integer> opMap = new HashMap<Operator<? extends Serializable>, Integer>();
   private Integer curNum = 0;
 
   public PrintOpTreeProcessor() {
     out = System.out;
   }
-  
+
   public PrintOpTreeProcessor(PrintStream o) {
     out = o;
   }
-  
+
   private String getParents(Operator<? extends Serializable> op) {
     StringBuilder ret = new StringBuilder("[");
     boolean first = true;
-    if(op.getParentOperators() != null) {
-      for(Operator<? extends Serializable> parent :  op.getParentOperators()) {
-        if(!first)
+    if (op.getParentOperators() != null) {
+      for (Operator<? extends Serializable> parent : op.getParentOperators()) {
+        if (!first) {
           ret.append(",");
+        }
         ret.append(opMap.get(parent));
         first = false;
       }
@@ -56,14 +57,15 @@
     ret.append("]");
     return ret.toString();
   }
-  
+
   private String getChildren(Operator<? extends Serializable> op) {
     StringBuilder ret = new StringBuilder("[");
     boolean first = true;
-    if(op.getChildOperators() != null) {
-      for(Operator<? extends Serializable> child :  op.getChildOperators()) {
-        if(!first)
+    if (op.getChildOperators() != null) {
+      for (Operator<? extends Serializable> child : op.getChildOperators()) {
+        if (!first) {
           ret.append(",");
+        }
         ret.append(opMap.get(child));
         first = false;
       }
@@ -71,14 +73,16 @@
     ret.append("]");
     return ret.toString();
   }
-  
-  public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException {
-    Operator<? extends Serializable> op = (Operator<? extends Serializable>)nd;
+
+  public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
+      Object... nodeOutputs) throws SemanticException {
+    Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
     if (opMap.get(op) == null) {
       opMap.put(op, curNum++);
     }
-    out.println("[" + opMap.get(op) + "] " + op.getClass().getName() + " =p=> " + getParents(op) + " =c=> " + getChildren(op));
-    if(op.getConf() == null) {
+    out.println("[" + opMap.get(op) + "] " + op.getClass().getName() + " =p=> "
+        + getParents(op) + " =c=> " + getChildren(op));
+    if (op.getConf() == null) {
       return null;
     }
     return null;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java Thu Jan 21 10:37:58 2010
@@ -27,51 +27,59 @@
  */
 public class PrunedPartitionList {
   // confirmed partitions - satisfy the partition criteria
-  private Set<Partition>  confirmedPartns;
+  private Set<Partition> confirmedPartns;
 
   // unknown partitions - may/may not satisfy the partition criteria
-  private Set<Partition>  unknownPartns;
+  private Set<Partition> unknownPartns;
 
   // denied partitions - do not satisfy the partition criteria
-  private Set<Partition> deniedPartns;
+  private final Set<Partition> deniedPartns;
 
   /**
-   * @param confirmedPartns  confirmed paritions
-   * @param unknownPartns    unknown partitions
+   * @param confirmedPartns
+   *          confirmed paritions
+   * @param unknownPartns
+   *          unknown partitions
    */
-  public PrunedPartitionList(Set<Partition> confirmedPartns, Set<Partition> unknownPartns, Set<Partition> deniedPartns) {
-    this.confirmedPartns  = confirmedPartns;
-    this.unknownPartns    = unknownPartns;
-    this.deniedPartns     = deniedPartns;
+  public PrunedPartitionList(Set<Partition> confirmedPartns,
+      Set<Partition> unknownPartns, Set<Partition> deniedPartns) {
+    this.confirmedPartns = confirmedPartns;
+    this.unknownPartns = unknownPartns;
+    this.deniedPartns = deniedPartns;
   }
 
   /**
    * get confirmed partitions
-   * @return confirmedPartns  confirmed paritions
+   * 
+   * @return confirmedPartns confirmed paritions
    */
-  public Set<Partition>  getConfirmedPartns() {
+  public Set<Partition> getConfirmedPartns() {
     return confirmedPartns;
   }
 
   /**
    * get unknown partitions
-   * @return unknownPartns  unknown paritions
+   * 
+   * @return unknownPartns unknown paritions
    */
-  public Set<Partition>  getUnknownPartns() {
+  public Set<Partition> getUnknownPartns() {
     return unknownPartns;
   }
 
   /**
    * get denied partitions
-   * @return deniedPartns  denied paritions
+   * 
+   * @return deniedPartns denied paritions
    */
-  public Set<Partition>  getDeniedPartns() {
+  public Set<Partition> getDeniedPartns() {
     return deniedPartns;
   }
 
   /**
    * set confirmed partitions
-   * @param confirmedPartns  confirmed paritions
+   * 
+   * @param confirmedPartns
+   *          confirmed paritions
    */
   public void setConfirmedPartns(Set<Partition> confirmedPartns) {
     this.confirmedPartns = confirmedPartns;
@@ -79,9 +87,11 @@
 
   /**
    * set unknown partitions
-   * @param unknownPartns    unknown partitions
+   * 
+   * @param unknownPartns
+   *          unknown partitions
    */
   public void setUnknownPartns(Set<Partition> unknownPartns) {
-    this.unknownPartns   = unknownPartns;
+    this.unknownPartns = unknownPartns;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Thu Jan 21 10:37:58 2010
@@ -18,26 +18,24 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import java.util.*;
-
-import org.apache.hadoop.hive.ql.parse.QBParseInfo;
-import org.apache.hadoop.hive.ql.parse.QBMetaData;
-import org.apache.hadoop.hive.ql.plan.createTableDesc;
+import java.util.HashMap;
+import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.plan.createTableDesc;
 
 /**
  * Implementation of the query block
- *
+ * 
  **/
 
 public class QB {
 
   private static final Log LOG = LogFactory.getLog("hive.ql.parse.QB");
 
-  private int numJoins = 0;
-  private int numGbys = 0;
+  private final int numJoins = 0;
+  private final int numGbys = 0;
   private int numSels = 0;
   private int numSelDi = 0;
   private HashMap<String, String> aliasToTabs;
@@ -47,21 +45,22 @@
   private QBJoinTree qbjoin;
   private String id;
   private boolean isQuery;
-  private createTableDesc tblDesc = null;   // table descriptor of the final results
+  private createTableDesc tblDesc = null; // table descriptor of the final
+                                          // results
 
   public void print(String msg) {
     LOG.info(msg + "alias=" + qbp.getAlias());
-    for(String alias: getSubqAliases()) {
+    for (String alias : getSubqAliases()) {
       QBExpr qbexpr = getSubqForAlias(alias);
-      LOG.info(msg+"start subquery " + alias);
-      qbexpr.print(msg+" ");
-      LOG.info(msg+"end subquery " + alias);
+      LOG.info(msg + "start subquery " + alias);
+      qbexpr.print(msg + " ");
+      LOG.info(msg + "end subquery " + alias);
     }
   }
 
-  public QB() {  
+  public QB() {
   }
-  
+
   public QB(String outer_id, String alias, boolean isSubQ) {
     aliasToTabs = new HashMap<String, String>();
     aliasToSubq = new HashMap<String, QBExpr>();
@@ -70,7 +69,7 @@
     }
     qbp = new QBParseInfo(alias, isSubQ);
     qbm = new QBMetaData();
-    this.id = (outer_id == null ? alias : outer_id + ":" + alias);
+    id = (outer_id == null ? alias : outer_id + ":" + alias);
   }
 
   public QBParseInfo getParseInfo() {
@@ -95,8 +94,9 @@
 
   public boolean exists(String alias) {
     alias = alias.toLowerCase();
-    if (aliasToTabs.get(alias) != null || aliasToSubq.get(alias) != null)
+    if (aliasToTabs.get(alias) != null || aliasToSubq.get(alias) != null) {
       return true;
+    }
 
     return false;
   }
@@ -171,7 +171,7 @@
   public boolean isSelectStarQuery() {
     return qbp.isSelectStarQuery() && aliasToSubq.isEmpty() && !isCTAS();
   }
-  
+
   public createTableDesc getTableDesc() {
     return tblDesc;
   }
@@ -179,7 +179,7 @@
   public void setTableDesc(createTableDesc desc) {
     tblDesc = desc;
   }
-  
+
   /**
    * Whether this QB is for a CREATE-TABLE-AS-SELECT.
    */

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java Thu Jan 21 10:37:58 2010
@@ -23,13 +23,17 @@
 
 /**
  * Implementation of the query block expression
- *
+ * 
  **/
 
 public class QBExpr {
 
   private static final Log LOG = LogFactory.getLog("hive.ql.parse.QBExpr");
-  public static enum Opcode { NULLOP, UNION, INTERSECT, DIFF };
+
+  public static enum Opcode {
+    NULLOP, UNION, INTERSECT, DIFF
+  };
+
   private Opcode opcode;
   private QBExpr qbexpr1;
   private QBExpr qbexpr2;
@@ -49,7 +53,7 @@
   }
 
   public QBExpr(QB qb) {
-    this.opcode = Opcode.NULLOP;
+    opcode = Opcode.NULLOP;
     this.qb = qb;
   }
 
@@ -68,15 +72,15 @@
   }
 
   public void setQBExpr1(QBExpr qbexpr) {
-    this.qbexpr1 = qbexpr;
+    qbexpr1 = qbexpr;
   }
 
   public void setQBExpr2(QBExpr qbexpr) {
-    this.qbexpr2 = qbexpr;
+    qbexpr2 = qbexpr;
   }
 
   public QB getQB() {
-    return this.qb;
+    return qb;
   }
 
   public Opcode getOpcode() {
@@ -94,15 +98,14 @@
   public void print(String msg) {
     if (opcode == Opcode.NULLOP) {
       LOG.info(msg + "start qb = " + qb);
-      qb.print(msg+" ");
+      qb.print(msg + " ");
       LOG.info(msg + "end qb = " + qb);
-    }
-    else {
+    } else {
       LOG.info(msg + "start qbexpr1 = " + qbexpr1);
-      qbexpr1.print(msg+" ");
+      qbexpr1.print(msg + " ");
       LOG.info(msg + "end qbexpr1 = " + qbexpr1);
       LOG.info(msg + "start qbexpr2 = " + qbexpr2);
-      qbexpr2.print(msg+" ");
+      qbexpr2.print(msg + " ");
       LOG.info(msg + "end qbexpr2 = " + qbexpr2);
     }
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java Thu Jan 21 10:37:58 2010
@@ -18,31 +18,31 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.Vector;
 import java.util.List;
-import java.util.ArrayList;
+import java.util.Vector;
 import java.util.Map.Entry;
 
 /**
  * Internal representation of the join tree
- *
+ * 
  */
-public class QBJoinTree 
-{
-  private String        leftAlias;
-  private String[]      rightAliases;
-  private String[]      leftAliases;
-  private QBJoinTree    joinSrc;
-  private String[]      baseSrc;
-  private int           nextTag;
-  private joinCond[]    joinCond;
-  private boolean       noOuterJoin;
-  private boolean       noSemiJoin;
-  
-  // keeps track of the right-hand-side table name of the left-semi-join, and its list of join keys
-  private HashMap<String, ArrayList<ASTNode>> rhsSemijoin;
-  
+public class QBJoinTree {
+  private String leftAlias;
+  private String[] rightAliases;
+  private String[] leftAliases;
+  private QBJoinTree joinSrc;
+  private String[] baseSrc;
+  private int nextTag;
+  private joinCond[] joinCond;
+  private boolean noOuterJoin;
+  private boolean noSemiJoin;
+
+  // keeps track of the right-hand-side table name of the left-semi-join, and
+  // its list of join keys
+  private final HashMap<String, ArrayList<ASTNode>> rhsSemijoin;
+
   // join conditions
   private Vector<Vector<ASTNode>> expressions;
 
@@ -50,24 +50,25 @@
   private Vector<Vector<ASTNode>> filters;
 
   // user asked for map-side join
-  private  boolean        mapSideJoin;
-  private  List<String>   mapAliases;
-  
+  private boolean mapSideJoin;
+  private List<String> mapAliases;
+
   // big tables that should be streamed
-  private  List<String>   streamAliases;
+  private List<String> streamAliases;
 
   /**
-   * constructor 
+   * constructor
    */
-  public QBJoinTree() { 
+  public QBJoinTree() {
     nextTag = 0;
     noOuterJoin = true;
-    noSemiJoin  = true;
+    noSemiJoin = true;
     rhsSemijoin = new HashMap<String, ArrayList<ASTNode>>();
   }
 
   /**
    * returns left alias if any - this is used for merging later on
+   * 
    * @return left alias if any
    */
   public String getLeftAlias() {
@@ -76,7 +77,9 @@
 
   /**
    * set left alias for the join expression
-   * @param leftAlias String
+   * 
+   * @param leftAlias
+   *          String
    */
   public void setLeftAlias(String leftAlias) {
     this.leftAlias = leftAlias;
@@ -145,13 +148,13 @@
   public void setNoOuterJoin(boolean noOuterJoin) {
     this.noOuterJoin = noOuterJoin;
   }
-  
+
   public boolean getNoSemiJoin() {
     return noSemiJoin;
   }
 
   public void setNoSemiJoin(boolean semi) {
-    this.noSemiJoin = semi;
+    noSemiJoin = semi;
   }
 
   /**
@@ -162,7 +165,8 @@
   }
 
   /**
-   * @param filters the filters to set
+   * @param filters
+   *          the filters to set
    */
   public void setFilters(Vector<Vector<ASTNode>> filters) {
     this.filters = filters;
@@ -176,7 +180,8 @@
   }
 
   /**
-   * @param mapSideJoin the mapSidejoin to set
+   * @param mapSideJoin
+   *          the mapSidejoin to set
    */
   public void setMapSideJoin(boolean mapSideJoin) {
     this.mapSideJoin = mapSideJoin;
@@ -190,12 +195,13 @@
   }
 
   /**
-   * @param mapAliases the mapAliases to set
+   * @param mapAliases
+   *          the mapAliases to set
    */
   public void setMapAliases(List<String> mapAliases) {
     this.mapAliases = mapAliases;
   }
-  
+
   public List<String> getStreamAliases() {
     return streamAliases;
   }
@@ -203,39 +209,43 @@
   public void setStreamAliases(List<String> streamAliases) {
     this.streamAliases = streamAliases;
   }
-  
+
   /**
-   * Insert only a key to the semijoin table name to column names map. 
-   * @param alias table name alias.
+   * Insert only a key to the semijoin table name to column names map.
+   * 
+   * @param alias
+   *          table name alias.
    */
   public void addRHSSemijoin(String alias) {
-    if ( ! rhsSemijoin.containsKey(alias) ) {
+    if (!rhsSemijoin.containsKey(alias)) {
       rhsSemijoin.put(alias, null);
     }
   }
-  
+
   /**
    * Remeber the mapping of table alias to set of columns.
+   * 
    * @param alias
    * @param columns
    */
   public void addRHSSemijoinColumns(String alias, ArrayList<ASTNode> columns) {
     ArrayList<ASTNode> cols = rhsSemijoin.get(alias);
-    if ( cols == null ) {
+    if (cols == null) {
       rhsSemijoin.put(alias, columns);
     } else {
       cols.addAll(columns);
     }
   }
-  
+
   /**
    * Remeber the mapping of table alias to set of columns.
+   * 
    * @param alias
    * @param columns
    */
   public void addRHSSemijoinColumns(String alias, ASTNode column) {
     ArrayList<ASTNode> cols = rhsSemijoin.get(alias);
-    if ( cols == null ) {
+    if (cols == null) {
       cols = new ArrayList<ASTNode>();
       cols.add(column);
       rhsSemijoin.put(alias, cols);
@@ -243,26 +253,26 @@
       cols.add(column);
     }
   }
-  
+
   public ArrayList<ASTNode> getRHSSemijoinColumns(String alias) {
     return rhsSemijoin.get(alias);
   }
-  
+
   /**
    * Merge the rhs tables from another join tree.
-   * @param src the source join tree
+   * 
+   * @param src
+   *          the source join tree
    */
   public void mergeRHSSemijoin(QBJoinTree src) {
-    for (Entry<String, ArrayList<ASTNode>> e: src.rhsSemijoin.entrySet()) {
+    for (Entry<String, ArrayList<ASTNode>> e : src.rhsSemijoin.entrySet()) {
       String key = e.getKey();
-      ArrayList<ASTNode> value = this.rhsSemijoin.get(key);
-      if ( value == null ) {
-        this.rhsSemijoin.put(key, e.getValue());
+      ArrayList<ASTNode> value = rhsSemijoin.get(key);
+      if (value == null) {
+        rhsSemijoin.put(key, e.getValue());
       } else {
         value.addAll(e.getValue());
       }
     }
   }
 }
-
-

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java Thu Jan 21 10:37:58 2010
@@ -18,14 +18,16 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import java.util.*;
-import org.apache.hadoop.hive.ql.metadata.*;
+import java.util.HashMap;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
 
 /**
  * Implementation of the metadata information related to a query block
- *
+ * 
  **/
 
 public class QBMetaData {
@@ -37,25 +39,27 @@
   public static final int DEST_REDUCE = 4;
   public static final int DEST_LOCAL_FILE = 5;
 
-  private HashMap<String, Table> aliasToTable;
-  private HashMap<String, Table> nameToDestTable;
-  private HashMap<String, Partition> nameToDestPartition;
-  private HashMap<String, String> nameToDestFile;
-  private HashMap<String, Integer> nameToDestType;
+  private final HashMap<String, Table> aliasToTable;
+  private final HashMap<String, Table> nameToDestTable;
+  private final HashMap<String, Partition> nameToDestPartition;
+  private final HashMap<String, String> nameToDestFile;
+  private final HashMap<String, Integer> nameToDestType;
 
   @SuppressWarnings("unused")
   private static final Log LOG = LogFactory.getLog(QBMetaData.class.getName());
-  
+
   public QBMetaData() {
-    this.aliasToTable = new HashMap<String, Table>();
-    this.nameToDestTable = new HashMap<String, Table>();
-    this.nameToDestPartition = new HashMap<String, Partition>();
-    this.nameToDestFile = new HashMap<String, String>();
-    this.nameToDestType = new HashMap<String, Integer>();
+    aliasToTable = new HashMap<String, Table>();
+    nameToDestTable = new HashMap<String, Table>();
+    nameToDestPartition = new HashMap<String, Partition>();
+    nameToDestFile = new HashMap<String, String>();
+    nameToDestType = new HashMap<String, Integer>();
   }
 
-  // All getXXX needs toLowerCase() because they are directly called from SemanticAnalyzer
-  // All setXXX does not need it because they are called from QB which already lowercases
+  // All getXXX needs toLowerCase() because they are directly called from
+  // SemanticAnalyzer
+  // All setXXX does not need it because they are called from QB which already
+  // lowercases
   // the aliases.
 
   public HashMap<String, Table> getAliasToTable() {
@@ -63,46 +67,46 @@
   }
 
   public Table getTableForAlias(String alias) {
-    return this.aliasToTable.get(alias.toLowerCase());
+    return aliasToTable.get(alias.toLowerCase());
   }
 
   public void setSrcForAlias(String alias, Table tab) {
-    this.aliasToTable.put(alias, tab);
+    aliasToTable.put(alias, tab);
   }
 
   public void setDestForAlias(String alias, Table tab) {
-    this.nameToDestType.put(alias, Integer.valueOf(DEST_TABLE));
-    this.nameToDestTable.put(alias, tab);
+    nameToDestType.put(alias, Integer.valueOf(DEST_TABLE));
+    nameToDestTable.put(alias, tab);
   }
 
   public void setDestForAlias(String alias, Partition part) {
-    this.nameToDestType.put(alias, Integer.valueOf(DEST_PARTITION));
-    this.nameToDestPartition.put(alias, part);
+    nameToDestType.put(alias, Integer.valueOf(DEST_PARTITION));
+    nameToDestPartition.put(alias, part);
   }
 
   public void setDestForAlias(String alias, String fname, boolean isDfsFile) {
-    this.nameToDestType.put(alias, 
-                       isDfsFile ? Integer.valueOf(DEST_DFS_FILE) : Integer.valueOf(DEST_LOCAL_FILE));
-    this.nameToDestFile.put(alias, fname);
+    nameToDestType.put(alias, isDfsFile ? Integer.valueOf(DEST_DFS_FILE)
+        : Integer.valueOf(DEST_LOCAL_FILE));
+    nameToDestFile.put(alias, fname);
   }
 
   public Integer getDestTypeForAlias(String alias) {
-    return this.nameToDestType.get(alias.toLowerCase());
+    return nameToDestType.get(alias.toLowerCase());
   }
 
   public Table getDestTableForAlias(String alias) {
-    return this.nameToDestTable.get(alias.toLowerCase());
+    return nameToDestTable.get(alias.toLowerCase());
   }
 
   public Partition getDestPartitionForAlias(String alias) {
-    return this.nameToDestPartition.get(alias.toLowerCase());
+    return nameToDestPartition.get(alias.toLowerCase());
   }
 
   public String getDestFileForAlias(String alias) {
-    return this.nameToDestFile.get(alias.toLowerCase());
+    return nameToDestFile.get(alias.toLowerCase());
   }
 
   public Table getSrcForAlias(String alias) {
-    return this.aliasToTable.get(alias.toLowerCase());
+    return aliasToTable.get(alias.toLowerCase());
   }
 }