You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/02/09 08:55:50 UTC

svn commit: r907950 [6/15] - in /hadoop/hive/trunk: ./ checkstyle/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/java/org/apache/hadoop/hive/contrib/file...

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java Tue Feb  9 07:55:30 2010
@@ -29,14 +29,14 @@
 import org.apache.hadoop.hive.ql.ppd.PredicatePushDown;
 
 /**
- * Implementation of the optimizer
+ * Implementation of the optimizer.
  */
 public class Optimizer {
   private ParseContext pctx;
   private List<Transform> transformations;
 
   /**
-   * create the list of transformations
+   * Create the list of transformations.
    * 
    * @param hiveConf
    */
@@ -61,7 +61,7 @@
   }
 
   /**
-   * invoke all the transformations one-by-one, and alter the query plan
+   * Invoke all the transformations one-by-one, and alter the query plan.
    * 
    * @return ParseContext
    * @throws SemanticException

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java Tue Feb  9 07:55:30 2010
@@ -50,6 +50,10 @@
  */
 public class SamplePruner implements Transform {
 
+  /**
+   * SamplePrunerCtx.
+   *
+   */
   public static class SamplePrunerCtx implements NodeProcessorCtx {
     HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
 
@@ -109,7 +113,10 @@
     return pctx;
   }
 
-  // Filter processor
+  /**
+   * FilterPPR filter processor.
+   *
+   */
   public static class FilterPPR implements NodeProcessor {
 
     @Override
@@ -134,7 +141,10 @@
     return new FilterPPR();
   }
 
-  // Default processor which does nothing
+  /**
+   * DefaultPPR default processor which does nothing.
+   *
+   */
   public static class DefaultPPR implements NodeProcessor {
 
     @Override
@@ -151,7 +161,7 @@
 
   /**
    * Prunes to get all the files in the partition that satisfy the TABLESAMPLE
-   * clause
+   * clause.
    * 
    * @param part
    *          The partition to prune

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Transform.java Tue Feb  9 07:55:30 2010
@@ -29,12 +29,12 @@
  */
 public interface Transform {
   /**
-   * All transformation steps implement this interface
+   * All transformation steps implement this interface.
    * 
    * @param pctx
    *          input parse context
    * @return ParseContext
    * @throws SemanticException
    */
-  public ParseContext transform(ParseContext pctx) throws SemanticException;
+  ParseContext transform(ParseContext pctx) throws SemanticException;
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java Tue Feb  9 07:55:30 2010
@@ -45,7 +45,6 @@
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ConditionalResolverSkewJoin;
 import org.apache.hadoop.hive.ql.plan.ConditionalWork;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
@@ -54,14 +53,20 @@
 import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
-public class GenMRSkewJoinProcessor {
+/**
+ * GenMRSkewJoinProcessor.
+ *
+ */
+public final class GenMRSkewJoinProcessor {
 
-  public GenMRSkewJoinProcessor() {
+  private GenMRSkewJoinProcessor() {
+    // prevent instantiation
   }
 
   /**
@@ -139,7 +144,8 @@
     joinDescriptor.setSkewKeyDefinition(HiveConf.getIntVar(parseCtx.getConf(),
         HiveConf.ConfVars.HIVESKEWJOINKEY));
 
-    Map<String, Task<? extends Serializable>> bigKeysDirToTaskMap = new HashMap<String, Task<? extends Serializable>>();
+    Map<String, Task<? extends Serializable>> bigKeysDirToTaskMap =
+      new HashMap<String, Task<? extends Serializable>>();
     List<Serializable> listWorks = new ArrayList<Serializable>();
     List<Task<? extends Serializable>> listTasks = new ArrayList<Task<? extends Serializable>>();
     MapredWork currPlan = (MapredWork) currTask.getWork();
@@ -156,7 +162,7 @@
     // used for create mapJoinDesc, should be in order
     List<TableDesc> newJoinValueTblDesc = new ArrayList<TableDesc>();
 
-    for (int k = 0; k < tags.length; k++) {
+    for (Byte tag : tags) {
       newJoinValueTblDesc.add(null);
     }
 
@@ -261,7 +267,7 @@
 
       MapJoinDesc mapJoinDescriptor = new MapJoinDesc(newJoinKeys, keyTblDesc,
           newJoinValues, newJoinValueTblDesc, joinDescriptor
-              .getOutputColumnNames(), i, joinDescriptor.getConds());
+          .getOutputColumnNames(), i, joinDescriptor.getConds());
       mapJoinDescriptor.setNoOuterJoin(joinDescriptor.isNoOuterJoin());
       mapJoinDescriptor.setTagOrder(tags);
       mapJoinDescriptor.setHandleSkewJoin(false);
@@ -300,8 +306,10 @@
       HiveConf jc = new HiveConf(parseCtx.getConf(),
           GenMRSkewJoinProcessor.class);
 
-      newPlan.setNumMapTasks(HiveConf.getIntVar(jc, HiveConf.ConfVars.HIVESKEWJOINMAPJOINNUMMAPTASK));
-      newPlan.setMinSplitSize(HiveConf.getIntVar(jc, HiveConf.ConfVars.HIVESKEWJOINMAPJOINMINSPLIT));
+      newPlan.setNumMapTasks(HiveConf
+          .getIntVar(jc, HiveConf.ConfVars.HIVESKEWJOINMAPJOINNUMMAPTASK));
+      newPlan
+          .setMinSplitSize(HiveConf.getIntVar(jc, HiveConf.ConfVars.HIVESKEWJOINMAPJOINMINSPLIT));
       newPlan.setInputformat(HiveInputFormat.class.getName());
       Task<? extends Serializable> skewJoinMapJoinTask = TaskFactory.get(
           newPlan, jc);
@@ -317,7 +325,7 @@
     cndTsk.setResolver(new ConditionalResolverSkewJoin());
     cndTsk
         .setResolverCtx(new ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx(
-            bigKeysDirToTaskMap));
+        bigKeysDirToTaskMap));
     List<Task<? extends Serializable>> oldChildTasks = currTask.getChildTasks();
     currTask.setChildTasks(new ArrayList<Task<? extends Serializable>>());
     currTask.addDependentTask(cndTsk);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java Tue Feb  9 07:55:30 2010
@@ -40,7 +40,7 @@
   }
 
   /**
-   * create the list of physical plan resolvers
+   * create the list of physical plan resolvers.
    * 
    * @param hiveConf
    */
@@ -52,7 +52,7 @@
   }
 
   /**
-   * invoke all the resolvers one-by-one, and alter the physical plan
+   * invoke all the resolvers one-by-one, and alter the physical plan.
    * 
    * @return PhysicalContext
    * @throws HiveException

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalPlanResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalPlanResolver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalPlanResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalPlanResolver.java Tue Feb  9 07:55:30 2010
@@ -32,6 +32,6 @@
    * @param pctx
    * @return
    */
-  public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException;
+  PhysicalContext resolve(PhysicalContext pctx) throws SemanticException;
 
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinProcFactory.java Tue Feb  9 07:55:30 2010
@@ -33,7 +33,7 @@
 /**
  * Node processor factory for skew join resolver.
  */
-public class SkewJoinProcFactory {
+public final class SkewJoinProcFactory {
 
   public static NodeProcessor getDefaultProc() {
     return new SkewJoinDefaultProcessor();
@@ -43,6 +43,10 @@
     return new SkewJoinJoinProcessor();
   }
 
+  /**
+   * SkewJoinJoinProcessor.
+   *
+   */
   public static class SkewJoinJoinProcessor implements NodeProcessor {
     public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
         Object... nodeOutputs) throws SemanticException {
@@ -55,10 +59,18 @@
     }
   }
 
+  /**
+   * SkewJoinDefaultProcessor.
+   *
+   */
   public static class SkewJoinDefaultProcessor implements NodeProcessor {
     public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
         Object... nodeOutputs) throws SemanticException {
       return null;
     }
   }
+
+  private SkewJoinProcFactory() {
+    // prevent instantiation
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SkewJoinResolver.java Tue Feb  9 07:55:30 2010
@@ -56,7 +56,7 @@
   }
 
   /**
-   * Iterator a task with a rule dispatcher for its reducer operator tree,
+   * Iterator a task with a rule dispatcher for its reducer operator tree.
    */
   class SkewJoinTaskDispatcher implements Dispatcher {
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java Tue Feb  9 07:55:30 2010
@@ -49,7 +49,11 @@
  * expression is then used to figure out whether a particular partition should
  * be scanned or not.
  */
-public class ExprProcFactory {
+public final class ExprProcFactory {
+
+  private ExprProcFactory() {
+    // prevent instantiation
+  }
 
   /**
    * Processor for column expressions.
@@ -79,7 +83,7 @@
   /**
    * If all children are candidates and refer only to one table alias then this
    * expr is a candidate else it is not a candidate but its children could be
-   * final candidates
+   * final candidates.
    */
   public static class GenericFuncExprProcessor implements NodeProcessor {
 
@@ -132,6 +136,10 @@
 
   }
 
+  /**
+   * FieldExprProcessor.
+   *
+   */
   public static class FieldExprProcessor implements NodeProcessor {
 
     @Override
@@ -202,7 +210,7 @@
   }
 
   /**
-   * Generates the partition pruner for the expression tree
+   * Generates the partition pruner for the expression tree.
    * 
    * @param tabAlias
    *          The table alias of the partition table that is being considered

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java Tue Feb  9 07:55:30 2010
@@ -37,7 +37,7 @@
  * that table alias and store a mapping from the table scan operator to that
  * pruner. We call that pruner later during plan generation.
  */
-public class OpProcFactory {
+public final class OpProcFactory {
 
   /**
    * Determines the partition pruner for the filter. This is called only when
@@ -116,7 +116,7 @@
   }
 
   /**
-   * Default processor which just merges its children
+   * Default processor which just merges its children.
    */
   public static class DefaultPPR implements NodeProcessor {
 
@@ -136,4 +136,7 @@
     return new DefaultPPR();
   }
 
+  private OpProcFactory() {
+    // prevent instantiation
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java Tue Feb  9 07:55:30 2010
@@ -33,12 +33,12 @@
 
   /**
    * Map from tablescan operator to partition pruning predicate that is
-   * initialized from the ParseContext
+   * initialized from the ParseContext.
    */
   private final HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
 
   /**
-   * Constructor
+   * Constructor.
    */
   public OpWalkerCtx(HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner) {
     this.opToPartPruner = opToPartPruner;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Tue Feb  9 07:55:30 2010
@@ -122,7 +122,7 @@
     // It cannot contain a non-deterministic function
     if ((expr instanceof ExprNodeGenericFuncDesc)
         && !FunctionRegistry.isDeterministic(((ExprNodeGenericFuncDesc) expr)
-            .getGenericUDF())) {
+        .getGenericUDF())) {
       return false;
     }
 
@@ -157,8 +157,7 @@
    */
   public static PrunedPartitionList prune(Table tab, ExprNodeDesc prunerExpr,
       HiveConf conf, String alias,
-      Map<String, PrunedPartitionList> prunedPartitionsMap)
-      throws HiveException {
+      Map<String, PrunedPartitionList> prunedPartitionsMap) throws HiveException {
     LOG.trace("Started pruning partiton");
     LOG.trace("tabname = " + tab.getName());
     LOG.trace("prune Expression = " + prunerExpr);
@@ -215,7 +214,7 @@
             if (!hasColumnExpr(prunerExpr)) {
               throw new SemanticException(ErrorMsg.NO_PARTITION_PREDICATE
                   .getMsg("for Alias \"" + alias + "\" Table \""
-                      + tab.getName() + "\""));
+                  + tab.getName() + "\""));
             }
           }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcContext.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcContext.java Tue Feb  9 07:55:30 2010
@@ -24,15 +24,23 @@
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 
+/**
+ * UnionProcContext.
+ *
+ */
 public class UnionProcContext implements NodeProcessorCtx {
 
+  /**
+   * UnionParseContext.
+   *
+   */
   public static class UnionParseContext {
-    transient private final boolean[] mapOnlySubq;
-    transient private final boolean[] rootTask;
-    transient private final boolean[] mapJoinSubq;
+    private final transient boolean[] mapOnlySubq;
+    private final transient boolean[] rootTask;
+    private final transient boolean[] mapJoinSubq;
 
-    transient private int numInputs;
-    transient private boolean mapJoinQuery;
+    private transient int numInputs;
+    private transient boolean mapJoinQuery;
 
     public UnionParseContext(int numInputs) {
       this.numInputs = numInputs;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcFactory.java Tue Feb  9 07:55:30 2010
@@ -30,9 +30,13 @@
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
- * Operator factory for union processing
+ * Operator factory for union processing.
  */
-public class UnionProcFactory {
+public final class UnionProcFactory {
+
+  private UnionProcFactory() {
+    // prevent instantiation
+  }
 
   public static int getPositionParent(UnionOperator union, Stack<Node> stack) {
     int pos = 0;
@@ -48,7 +52,7 @@
   }
 
   /**
-   * MapRed subquery followed by Union
+   * MapRed subquery followed by Union.
    */
   public static class MapRedUnion implements NodeProcessor {
 
@@ -74,7 +78,7 @@
   }
 
   /**
-   * Map-only subquery followed by Union
+   * Map-only subquery followed by Union.
    */
   public static class MapUnion implements NodeProcessor {
 
@@ -99,7 +103,7 @@
   }
 
   /**
-   * Map-join subquery followed by Union
+   * Map-join subquery followed by Union.
    */
   public static class MapJoinUnion implements NodeProcessor {
 
@@ -123,7 +127,7 @@
   }
 
   /**
-   * Union subquery followed by Union
+   * Union subquery followed by Union.
    */
   public static class UnknownUnion implements NodeProcessor {
 
@@ -148,7 +152,7 @@
   }
 
   /**
-   * Default processor
+   * Default processor.
    */
   public static class NoUnion implements NodeProcessor {
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/unionproc/UnionProcessor.java Tue Feb  9 07:55:30 2010
@@ -47,7 +47,7 @@
 public class UnionProcessor implements Transform {
 
   /**
-   * empty constructor
+   * empty constructor.
    */
   public UnionProcessor() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java Tue Feb  9 07:55:30 2010
@@ -36,7 +36,7 @@
   }
 
   /**
-   * Constructor
+   * Constructor.
    * 
    * @param t
    *          Token for the CommonTree Node

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Tue Feb  9 07:55:30 2010
@@ -26,7 +26,6 @@
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
-import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -54,6 +53,18 @@
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
+/**
+ * BaseSemanticAnalyzer.
+ *
+ */
+/**
+ * BaseSemanticAnalyzer.
+ *
+ */
+/**
+ * BaseSemanticAnalyzer.
+ *
+ */
 public abstract class BaseSemanticAnalyzer {
   protected final Hive db;
   protected final HiveConf conf;
@@ -391,6 +402,10 @@
     return typeStr;
   }
 
+  /**
+   * tableSpec.
+   *
+   */
   public static class tableSpec {
     public String tableName;
     public Table tableHandle;
@@ -436,7 +451,7 @@
           // In case the partition already exists, we need to get the partition
           // data from the metastore
           partHandle = db.getPartition(tableHandle, partSpec, false);
-          if(partHandle == null) {
+          if (partHandle == null) {
             // this doesn't create partition. partition is created in MoveTask
             partHandle = new Partition(tableHandle, partSpec, null);
           }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Tue Feb  9 07:55:30 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -37,30 +36,32 @@
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
-import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
-import org.apache.hadoop.hive.ql.plan.DDLWork;
-import org.apache.hadoop.hive.ql.plan.MsckDesc;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
+import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DescTableDesc;
 import org.apache.hadoop.hive.ql.plan.DropTableDesc;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
+import org.apache.hadoop.hive.ql.plan.MsckDesc;
 import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
 import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.ql.plan.AlterTableDesc.alterTableTypes;
+import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.mapred.TextInputFormat;
 
+/**
+ * DDLSemanticAnalyzer.
+ *
+ */
 public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
-  private static final Log LOG = LogFactory
-      .getLog("hive.ql.parse.DDLSemanticAnalyzer");
+  private static final Log LOG = LogFactory.getLog("hive.ql.parse.DDLSemanticAnalyzer");
   public static final Map<Integer, String> TokenToTypeName = new HashMap<Integer, String>();
   static {
     TokenToTypeName.put(HiveParser.TOK_BOOLEAN, Constants.BOOLEAN_TYPE_NAME);
@@ -73,14 +74,13 @@
     TokenToTypeName.put(HiveParser.TOK_STRING, Constants.STRING_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_DATE, Constants.DATE_TYPE_NAME);
     TokenToTypeName.put(HiveParser.TOK_DATETIME, Constants.DATETIME_TYPE_NAME);
-    TokenToTypeName
-        .put(HiveParser.TOK_TIMESTAMP, Constants.TIMESTAMP_TYPE_NAME);
+    TokenToTypeName.put(HiveParser.TOK_TIMESTAMP, Constants.TIMESTAMP_TYPE_NAME);
   }
 
   public static String getTypeName(int token) throws SemanticException {
     // date, datetime, and timestamp types aren't currently supported
-    if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME || 
-        token == HiveParser.TOK_TIMESTAMP ) {
+    if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME ||
+        token == HiveParser.TOK_TIMESTAMP) {
       throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
     }
     return TokenToTypeName.get(token);
@@ -117,9 +117,9 @@
     } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME) {
       analyzeAlterTableRename(ast);
     } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) {
-      analyzeAlterTableModifyCols(ast, alterTableTypes.ADDCOLS);
+      analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS);
     } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) {
-      analyzeAlterTableModifyCols(ast, alterTableTypes.REPLACECOLS);
+      analyzeAlterTableModifyCols(ast, AlterTableTypes.REPLACECOLS);
     } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) {
       analyzeAlterTableRenameCol(ast);
     } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) {
@@ -156,7 +156,7 @@
     String tableName = unescapeIdentifier(ast.getChild(0).getText());
     HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
         .getChild(0));
-    AlterTableDesc alterTblDesc = new AlterTableDesc(alterTableTypes.ADDPROPS);
+    AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDPROPS);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
@@ -169,7 +169,7 @@
     HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
         .getChild(0));
     AlterTableDesc alterTblDesc = new AlterTableDesc(
-        alterTableTypes.ADDSERDEPROPS);
+        AlterTableTypes.ADDSERDEPROPS);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
@@ -179,7 +179,7 @@
   private void analyzeAlterTableSerde(ASTNode ast) throws SemanticException {
     String tableName = unescapeIdentifier(ast.getChild(0).getText());
     String serdeName = unescapeSQLString(ast.getChild(1).getText());
-    AlterTableDesc alterTblDesc = new AlterTableDesc(alterTableTypes.ADDSERDE);
+    AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDSERDE);
     if (ast.getChildCount() > 2) {
       HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(2))
           .getChild(0));
@@ -284,7 +284,7 @@
   }
 
   /**
-   * Create a FetchTask for a given table and thrift ddl schema
+   * Create a FetchTask for a given table and thrift ddl schema.
    * 
    * @param tablename
    *          tablename
@@ -304,7 +304,7 @@
         LazySimpleSerDe.class, TextInputFormat.class,
         IgnoreKeyTextOutputFormat.class, prop), -1);
     fetch.setSerializationNullFormat(" ");
-    return (FetchTask)TaskFactory.get(fetch, conf);
+    return (FetchTask) TaskFactory.get(fetch, conf);
   }
 
   private void analyzeDescribeTable(ASTNode ast) throws SemanticException {
@@ -477,13 +477,13 @@
 
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName,
         unescapeIdentifier(ast.getChild(1).getText()), unescapeIdentifier(ast
-            .getChild(2).getText()), newType, newComment, first, flagCol);
+        .getChild(2).getText()), newType, newComment, first, flagCol);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
 
   private void analyzeAlterTableModifyCols(ASTNode ast,
-      alterTableTypes alterType) throws SemanticException {
+      AlterTableTypes alterType) throws SemanticException {
     String tblName = unescapeIdentifier(ast.getChild(0).getText());
     List<FieldSchema> newCols = getColumns((ASTNode) ast.getChild(1));
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols,
@@ -514,7 +514,6 @@
       throws SemanticException {
 
     String tblName = unescapeIdentifier(ast.getChild(0).getText());
-    ;
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Tue Feb  9 07:55:30 2010
@@ -27,7 +27,7 @@
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 
 /**
- * List of error messages thrown by the parser
+ * List of error messages thrown by the parser.
  **/
 
 public enum ErrorMsg {
@@ -38,94 +38,113 @@
   // See this page for how MySQL uses SQLState codes:
   // http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html
 
-  GENERIC_ERROR("Exception while processing"), INVALID_TABLE("Table not found",
-      "42S02"), INVALID_COLUMN("Invalid Column Reference"), INVALID_TABLE_OR_COLUMN(
-      "Invalid Table Alias or Column Reference"), AMBIGUOUS_TABLE_OR_COLUMN(
-      "Ambiguous Table Alias or Column Reference"), INVALID_PARTITION(
-      "Partition not found"), AMBIGUOUS_COLUMN("Ambiguous Column Reference"), AMBIGUOUS_TABLE_ALIAS(
-      "Ambiguous Table Alias"), INVALID_TABLE_ALIAS("Invalid Table Alias"), NO_TABLE_ALIAS(
-      "No Table Alias"), INVALID_FUNCTION("Invalid Function"), INVALID_FUNCTION_SIGNATURE(
-      "Function Argument Type Mismatch"), INVALID_OPERATOR_SIGNATURE(
-      "Operator Argument Type Mismatch"), INVALID_ARGUMENT("Wrong Arguments"), INVALID_ARGUMENT_LENGTH(
-      "Arguments Length Mismatch", "21000"), INVALID_ARGUMENT_TYPE(
-      "Argument Type Mismatch"), INVALID_JOIN_CONDITION_1(
-      "Both Left and Right Aliases Encountered in Join"), INVALID_JOIN_CONDITION_2(
-      "Neither Left nor Right Aliases Encountered in Join"), INVALID_JOIN_CONDITION_3(
-      "OR not supported in Join currently"), INVALID_TRANSFORM(
-      "TRANSFORM with Other Select Columns not Supported"), DUPLICATE_GROUPBY_KEY(
-      "Repeated Key in Group By"), UNSUPPORTED_MULTIPLE_DISTINCTS(
-      "DISTINCT on Different Columns not Supported"), NO_SUBQUERY_ALIAS(
-      "No Alias For Subquery"), NO_INSERT_INSUBQUERY(
-      "Cannot insert in a Subquery. Inserting to table "), NON_KEY_EXPR_IN_GROUPBY(
-      "Expression Not In Group By Key"), INVALID_XPATH(
-      "General . and [] Operators are Not Supported"), INVALID_PATH(
-      "Invalid Path"), ILLEGAL_PATH("Path is not legal"), INVALID_NUMERICAL_CONSTANT(
-      "Invalid Numerical Constant"), INVALID_ARRAYINDEX_CONSTANT(
-      "Non Constant Expressions for Array Indexes not Supported"), INVALID_MAPINDEX_CONSTANT(
-      "Non Constant Expression for Map Indexes not Supported"), INVALID_MAPINDEX_TYPE(
-      "Map Key Type does not Match Index Expression Type"), NON_COLLECTION_TYPE(
-      "[] not Valid on Non Collection Types"), SELECT_DISTINCT_WITH_GROUPBY(
-      "SELECT DISTINCT and GROUP BY can not be in the same query"), COLUMN_REPEATED_IN_PARTITIONING_COLS(
-      "Column repeated in partitioning columns"), DUPLICATE_COLUMN_NAMES(
-      "Duplicate column name:"), INVALID_BUCKET_NUMBER(
-      "Bucket number should be bigger than zero"), COLUMN_REPEATED_IN_CLUSTER_SORT(
-      "Same column cannot appear in cluster and sort by"), SAMPLE_RESTRICTION(
-      "Cannot Sample on More Than Two Columns"), SAMPLE_COLUMN_NOT_FOUND(
-      "Sample Column Not Found"), NO_PARTITION_PREDICATE(
-      "No Partition Predicate Found"), INVALID_DOT(
-      ". operator is only supported on struct or list of struct types"), INVALID_TBL_DDL_SERDE(
-      "Either list of columns or a custom serializer should be specified"), TARGET_TABLE_COLUMN_MISMATCH(
-      "Cannot insert into target table because column number/types are different"), TABLE_ALIAS_NOT_ALLOWED(
-      "Table Alias not Allowed in Sampling Clause"), CLUSTERBY_DISTRIBUTEBY_CONFLICT(
-      "Cannot have both Cluster By and Distribute By Clauses"), ORDERBY_DISTRIBUTEBY_CONFLICT(
-      "Cannot have both Order By and Distribute By Clauses"), CLUSTERBY_SORTBY_CONFLICT(
-      "Cannot have both Cluster By and Sort By Clauses"), ORDERBY_SORTBY_CONFLICT(
-      "Cannot have both Order By and Sort By Clauses"), CLUSTERBY_ORDERBY_CONFLICT(
-      "Cannot have both Cluster By and Order By Clauses"), NO_LIMIT_WITH_ORDERBY(
-      "In strict mode, limit must be specified if ORDER BY is present"), NO_CARTESIAN_PRODUCT(
-      "In strict mode, cartesian product is not allowed. If you really want to perform the operation, set hive.mapred.mode=nonstrict"), UNION_NOTIN_SUBQ(
-      "Top level Union is not supported currently; use a subquery for the union"), INVALID_INPUT_FORMAT_TYPE(
-      "Input Format must implement InputFormat"), INVALID_OUTPUT_FORMAT_TYPE(
-      "Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"), NO_VALID_PARTN(
-      "The query does not reference any valid partition. To run this query, set hive.mapred.mode=nonstrict"), NO_OUTER_MAPJOIN(
-      "Map Join cannot be performed with Outer join"), INVALID_MAPJOIN_HINT(
-      "neither table specified as map-table"), INVALID_MAPJOIN_TABLE(
-      "result of a union cannot be a map table"), NON_BUCKETED_TABLE(
-      "Sampling Expression Needed for Non-Bucketed Table"), BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR(
-      "Numberator should not be bigger than denaminator in sample clause for Table"), NEED_PARTITION_ERROR(
-      "need to specify partition columns because the destination table is partitioned."), CTAS_CTLT_COEXISTENCE(
-      "Create table command does not allow LIKE and AS-SELECT in the same command"), LINES_TERMINATED_BY_NON_NEWLINE(
-      "LINES TERMINATED BY only supports newline '\\n' right now"), CTAS_COLLST_COEXISTENCE(
-      "Create table as select command cannot specify the list of columns for the target table."), CTLT_COLLST_COEXISTENCE(
-      "Create table like command cannot specify the list of columns for the target table."), INVALID_SELECT_SCHEMA(
-      "Cannot derive schema from the select-clause."), CTAS_PARCOL_COEXISTENCE(
-      "CREATE-TABLE-AS-SELECT does not support partitioning in the target table."), CTAS_MULTI_LOADFILE(
-      "CREATE-TABLE-AS-SELECT results in multiple file load."), CTAS_EXTTBL_COEXISTENCE(
-      "CREATE-TABLE-AS-SELECT cannot create external table."), TABLE_ALREADY_EXISTS(
-      "Table already exists:", "42S02"), COLUMN_ALIAS_ALREADY_EXISTS(
-      "Column alias already exists:", "42S02"), UDTF_MULTIPLE_EXPR(
-      "Only a single expression in the SELECT clause is supported with UDTF's"), UDTF_REQUIRE_AS(
-      "UDTF's require an AS clause"), UDTF_NO_GROUP_BY(
-      "GROUP BY is not supported with a UDTF in the SELECT clause"), UDTF_NO_SORT_BY(
-      "SORT BY is not supported with a UDTF in the SELECT clause"), UDTF_NO_CLUSTER_BY(
-      "CLUSTER BY is not supported with a UDTF in the SELECT clause"), UDTF_NO_DISTRIBUTE_BY(
-      "DISTRUBTE BY is not supported with a UDTF in the SELECT clause"), UDTF_INVALID_LOCATION(
-      "UDTF's are not supported outside the SELECT clause, nor nested in expressions"), UDTF_LATERAL_VIEW(
-      "UDTF's cannot be in a select expression when there is a lateral view"), UDTF_ALIAS_MISMATCH(
-      "The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF"), LATERAL_VIEW_WITH_JOIN(
-      "Join with a lateral view is not supported"), LATERAL_VIEW_INVALID_CHILD(
-      "Lateral view AST with invalid child"), OUTPUT_SPECIFIED_MULTIPLE_TIMES(
-      "The same output cannot be present multiple times: "), INVALID_AS(
-      "AS clause has an invalid number of aliases"), VIEW_COL_MISMATCH(
-      "The number of columns produced by the SELECT clause does not match the number of column names specified by CREATE VIEW"), DML_AGAINST_VIEW(
-      "A view cannot be used as target table for LOAD or INSERT"), UNSUPPORTED_TYPE(
-      "DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use STRING instead.");
+  GENERIC_ERROR("Exception while processing"),
+  INVALID_TABLE("Table not found", "42S02"),
+  INVALID_COLUMN("Invalid Column Reference"),
+  INVALID_TABLE_OR_COLUMN("Invalid Table Alias or Column Reference"),
+  AMBIGUOUS_TABLE_OR_COLUMN("Ambiguous Table Alias or Column Reference"),
+  INVALID_PARTITION("Partition not found"),
+  AMBIGUOUS_COLUMN("Ambiguous Column Reference"),
+  AMBIGUOUS_TABLE_ALIAS("Ambiguous Table Alias"),
+  INVALID_TABLE_ALIAS("Invalid Table Alias"),
+  NO_TABLE_ALIAS("No Table Alias"),
+  INVALID_FUNCTION("Invalid Function"),
+  INVALID_FUNCTION_SIGNATURE("Function Argument Type Mismatch"),
+  INVALID_OPERATOR_SIGNATURE("Operator Argument Type Mismatch"),
+  INVALID_ARGUMENT("Wrong Arguments"),
+  INVALID_ARGUMENT_LENGTH("Arguments Length Mismatch", "21000"),
+  INVALID_ARGUMENT_TYPE("Argument Type Mismatch"),
+  INVALID_JOIN_CONDITION_1("Both Left and Right Aliases Encountered in Join"),
+  INVALID_JOIN_CONDITION_2("Neither Left nor Right Aliases Encountered in Join"),
+  INVALID_JOIN_CONDITION_3("OR not supported in Join currently"),
+  INVALID_TRANSFORM("TRANSFORM with Other Select Columns not Supported"),
+  DUPLICATE_GROUPBY_KEY("Repeated Key in Group By"),
+  UNSUPPORTED_MULTIPLE_DISTINCTS("DISTINCT on Different Columns not Supported"),
+  NO_SUBQUERY_ALIAS("No Alias For Subquery"),
+  NO_INSERT_INSUBQUERY("Cannot insert in a Subquery. Inserting to table "),
+  NON_KEY_EXPR_IN_GROUPBY("Expression Not In Group By Key"),
+  INVALID_XPATH("General . and [] Operators are Not Supported"),
+  INVALID_PATH("Invalid Path"), ILLEGAL_PATH("Path is not legal"),
+  INVALID_NUMERICAL_CONSTANT("Invalid Numerical Constant"),
+  INVALID_ARRAYINDEX_CONSTANT("Non Constant Expressions for Array Indexes not Supported"),
+  INVALID_MAPINDEX_CONSTANT("Non Constant Expression for Map Indexes not Supported"),
+  INVALID_MAPINDEX_TYPE("Map Key Type does not Match Index Expression Type"),
+  NON_COLLECTION_TYPE("[] not Valid on Non Collection Types"),
+  SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"),
+  COLUMN_REPEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"),
+  DUPLICATE_COLUMN_NAMES("Duplicate column name:"),
+  INVALID_BUCKET_NUMBER("Bucket number should be bigger than zero"),
+  COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in cluster and sort by"),
+  SAMPLE_RESTRICTION("Cannot Sample on More Than Two Columns"),
+  SAMPLE_COLUMN_NOT_FOUND("Sample Column Not Found"),
+  NO_PARTITION_PREDICATE("No Partition Predicate Found"),
+  INVALID_DOT(". operator is only supported on struct or list of struct types"),
+  INVALID_TBL_DDL_SERDE("Either list of columns or a custom serializer should be specified"),
+  TARGET_TABLE_COLUMN_MISMATCH(
+      "Cannot insert into target table because column number/types are different"),
+  TABLE_ALIAS_NOT_ALLOWED("Table Alias not Allowed in Sampling Clause"),
+  CLUSTERBY_DISTRIBUTEBY_CONFLICT("Cannot have both Cluster By and Distribute By Clauses"),
+  ORDERBY_DISTRIBUTEBY_CONFLICT("Cannot have both Order By and Distribute By Clauses"),
+  CLUSTERBY_SORTBY_CONFLICT("Cannot have both Cluster By and Sort By Clauses"),
+  ORDERBY_SORTBY_CONFLICT("Cannot have both Order By and Sort By Clauses"),
+  CLUSTERBY_ORDERBY_CONFLICT("Cannot have both Cluster By and Order By Clauses"),
+  NO_LIMIT_WITH_ORDERBY("In strict mode, limit must be specified if ORDER BY is present"),
+  NO_CARTESIAN_PRODUCT("In strict mode, cartesian product is not allowed. "
+      + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"),
+  UNION_NOTIN_SUBQ("Top level Union is not supported currently; use a subquery for the union"),
+  INVALID_INPUT_FORMAT_TYPE("Input Format must implement InputFormat"),
+  INVALID_OUTPUT_FORMAT_TYPE("Output Format must implement HiveOutputFormat, "
+      + "otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"),
+  NO_VALID_PARTN("The query does not reference any valid partition. "
+      + "To run this query, set hive.mapred.mode=nonstrict"),
+  NO_OUTER_MAPJOIN("Map Join cannot be performed with Outer join"),
+  INVALID_MAPJOIN_HINT("neither table specified as map-table"),
+  INVALID_MAPJOIN_TABLE("result of a union cannot be a map table"),
+  NON_BUCKETED_TABLE("Sampling Expression Needed for Non-Bucketed Table"),
+  BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR("Numberator should not be bigger than "
+      + "denaminator in sample clause for Table"),
+  NEED_PARTITION_ERROR("need to specify partition columns because the destination "
+      + "table is partitioned."),
+  CTAS_CTLT_COEXISTENCE("Create table command does not allow LIKE and AS-SELECT in "
+      + "the same command"),
+  LINES_TERMINATED_BY_NON_NEWLINE("LINES TERMINATED BY only supports newline '\\n' right now"),
+  CTAS_COLLST_COEXISTENCE("Create table as select command cannot specify the list of columns "
+      + "for the target table."),
+  CTLT_COLLST_COEXISTENCE("Create table like command cannot specify the list of columns for "
+      + "the target table."),
+  INVALID_SELECT_SCHEMA("Cannot derive schema from the select-clause."),
+  CTAS_PARCOL_COEXISTENCE("CREATE-TABLE-AS-SELECT does not support partitioning in the target "
+      + "table."),
+  CTAS_MULTI_LOADFILE("CREATE-TABLE-AS-SELECT results in multiple file load."),
+  CTAS_EXTTBL_COEXISTENCE("CREATE-TABLE-AS-SELECT cannot create external table."),
+  TABLE_ALREADY_EXISTS("Table already exists:", "42S02"),
+  COLUMN_ALIAS_ALREADY_EXISTS("Column alias already exists:", "42S02"),
+  UDTF_MULTIPLE_EXPR("Only a single expression in the SELECT clause is supported with UDTF's"),
+  UDTF_REQUIRE_AS("UDTF's require an AS clause"),
+  UDTF_NO_GROUP_BY("GROUP BY is not supported with a UDTF in the SELECT clause"),
+  UDTF_NO_SORT_BY("SORT BY is not supported with a UDTF in the SELECT clause"),
+  UDTF_NO_CLUSTER_BY("CLUSTER BY is not supported with a UDTF in the SELECT clause"),
+  UDTF_NO_DISTRIBUTE_BY("DISTRUBTE BY is not supported with a UDTF in the SELECT clause"),
+  UDTF_INVALID_LOCATION("UDTF's are not supported outside the SELECT clause, nor nested "
+      + "in expressions"),
+  UDTF_LATERAL_VIEW("UDTF's cannot be in a select expression when there is a lateral view"),
+  UDTF_ALIAS_MISMATCH("The number of aliases supplied in the AS clause does not match the "
+      + "number of columns output by the UDTF"),
+  LATERAL_VIEW_WITH_JOIN("Join with a lateral view is not supported"),
+  LATERAL_VIEW_INVALID_CHILD("Lateral view AST with invalid child"),
+  OUTPUT_SPECIFIED_MULTIPLE_TIMES("The same output cannot be present multiple times: "),
+  INVALID_AS("AS clause has an invalid number of aliases"),
+  VIEW_COL_MISMATCH("The number of columns produced by the SELECT clause does not match the "
+      + "number of column names specified by CREATE VIEW"),
+  DML_AGAINST_VIEW("A view cannot be used as target table for LOAD or INSERT"),
+  UNSUPPORTED_TYPE("DATE, DATETIME, and TIMESTAMP types aren't supported yet. Please use "
+      + "STRING instead.");
+
   private String mesg;
-  private String SQLState;
+  private String sqlState;
 
-  private static char SPACE = ' ';
-  private static Pattern ERROR_MESSAGE_PATTERN = Pattern
-      .compile(".*line [0-9]+:[0-9]+ (.*)");
+  private static final char SPACE = ' ';
+  private static final Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*line [0-9]+:[0-9]+ (.*)");
   private static Map<String, ErrorMsg> mesgToErrorMsgMap = new HashMap<String, ErrorMsg>();
   private static int minMesgLength = -1;
 
@@ -206,9 +225,9 @@
     this(mesg, "42000");
   }
 
-  ErrorMsg(String mesg, String SQLState) {
+  ErrorMsg(String mesg, String sqlState) {
     this.mesg = mesg;
-    this.SQLState = SQLState;
+    this.sqlState = sqlState;
   }
 
   private static int getLine(ASTNode tree) {
@@ -295,6 +314,6 @@
   }
 
   public String getSQLState() {
-    return SQLState;
+    return sqlState;
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Tue Feb  9 07:55:30 2010
@@ -28,6 +28,10 @@
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 
+/**
+ * ExplainSemanticAnalyzer.
+ *
+ */
 public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
 
   public ExplainSemanticAnalyzer(HiveConf conf) throws SemanticException {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Tue Feb  9 07:55:30 2010
@@ -22,10 +22,14 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
+import org.apache.hadoop.hive.ql.plan.FunctionWork;
 
+/**
+ * FunctionSemanticAnalyzer.
+ *
+ */
 public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer {
   private static final Log LOG = LogFactory
       .getLog("hive.ql.parse.FunctionSemanticAnalyzer");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java Tue Feb  9 07:55:30 2010
@@ -26,12 +26,12 @@
 import org.apache.hadoop.hive.ql.lib.Node;
 
 /**
- * Walks the operator tree in pre order fashion
+ * Walks the operator tree in pre order fashion.
  */
 public class GenMapRedWalker extends DefaultGraphWalker {
 
   /**
-   * constructor of the walker - the dispatcher is passed
+   * constructor of the walker - the dispatcher is passed.
    * 
    * @param disp
    *          the dispatcher to be called for each node visited
@@ -41,7 +41,7 @@
   }
 
   /**
-   * Walk the given operator
+   * Walk the given operator.
    * 
    * @param nd
    *          operator being walked
@@ -57,7 +57,7 @@
     // kids of reduce sink operator need not be traversed again
     if ((children == null)
         || ((nd instanceof ReduceSinkOperator) && (getDispatchedList()
-            .containsAll(children)))) {
+        .containsAll(children)))) {
       opStack.pop();
       return;
     }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java Tue Feb  9 07:55:30 2010
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-
 /**
  * Join conditions Descriptor implementation.
  * 
@@ -39,7 +38,7 @@
   }
 
   /**
-   * Constructor for a UNIQUEJOIN cond
+   * Constructor for a UNIQUEJOIN cond.
    * 
    * @param p
    *          true if table is preserved, false otherwise

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java Tue Feb  9 07:55:30 2010
@@ -18,6 +18,10 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+/**
+ * JoinType.
+ *
+ */
 public enum JoinType {
   INNER, LEFTOUTER, RIGHTOUTER, FULLOUTER, UNIQUE, LEFTSEMI
 };

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Tue Feb  9 07:55:30 2010
@@ -39,10 +39,14 @@
 import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
 import org.apache.hadoop.hive.ql.plan.MoveWork;
 
+/**
+ * LoadSemanticAnalyzer.
+ *
+ */
 public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
 
-  boolean isLocal;
-  boolean isOverWrite;
+  private boolean isLocal;
+  private boolean isOverWrite;
 
   public LoadSemanticAnalyzer(HiveConf conf) throws SemanticException {
     super(conf);
@@ -119,7 +123,7 @@
     try {
       FileStatus[] srcs = matchFilesOrDir(FileSystem.get(fromURI, conf),
           new Path(fromURI.getScheme(), fromURI.getAuthority(), fromURI
-              .getPath()));
+          .getPath()));
 
       if (srcs == null || srcs.length == 0) {
         throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(ast,
@@ -142,7 +146,7 @@
     // reject different scheme/authority in other cases.
     if (!isLocal
         && (!StringUtils.equals(fromURI.getScheme(), toURI.getScheme()) || !StringUtils
-            .equals(fromURI.getAuthority(), toURI.getAuthority()))) {
+        .equals(fromURI.getAuthority(), toURI.getAuthority()))) {
       String reason = "Move from: " + fromURI.toString() + " to: "
           + toURI.toString() + " is not valid. "
           + "Please check that values for params \"default.fs.name\" and "
@@ -153,12 +157,14 @@
 
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
-    isLocal = isOverWrite = false;
-    Tree from_t = ast.getChild(0);
-    Tree table_t = ast.getChild(1);
+    isLocal = false;
+    isOverWrite = false;
+    Tree fromTree = ast.getChild(0);
+    Tree tableTree = ast.getChild(1);
 
     if (ast.getChildCount() == 4) {
-      isOverWrite = isLocal = true;
+      isLocal = true;
+      isOverWrite = true;
     }
 
     if (ast.getChildCount() == 3) {
@@ -172,18 +178,18 @@
     // initialize load path
     URI fromURI;
     try {
-      String fromPath = stripQuotes(from_t.getText());
+      String fromPath = stripQuotes(fromTree.getText());
       fromURI = initializeFromURI(fromPath);
     } catch (IOException e) {
-      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(from_t, e
+      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(fromTree, e
           .getMessage()), e);
     } catch (URISyntaxException e) {
-      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(from_t, e
+      throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(fromTree, e
           .getMessage()), e);
     }
 
     // initialize destination table/partition
-    tableSpec ts = new tableSpec(db, conf, (ASTNode) table_t);
+    tableSpec ts = new tableSpec(db, conf, (ASTNode) tableTree);
 
     if (ts.tableHandle.isView()) {
       throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
@@ -198,7 +204,7 @@
     }
 
     // make sure the arguments make sense
-    applyConstraints(fromURI, toURI, from_t, isLocal);
+    applyConstraints(fromURI, toURI, fromTree, isLocal);
 
     Task<? extends Serializable> rTask = null;
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Tue Feb  9 07:55:30 2010
@@ -67,8 +67,8 @@
   private int destTableId;
   private UnionProcContext uCtx;
   private List<MapJoinOperator> listMapJoinOpsNoReducer; // list of map join
-                                                         // operators with no
-                                                         // reducer
+  // operators with no
+  // reducer
   private Map<GroupByOperator, Set<String>> groupOpToInputTables;
   private Map<String, PrunedPartitionList> prunedPartitions;
 
@@ -379,7 +379,7 @@
   }
 
   /**
-   * Sets the hasNonPartCols flag
+   * Sets the hasNonPartCols flag.
    * 
    * @param val
    */
@@ -388,7 +388,7 @@
   }
 
   /**
-   * Gets the value of the hasNonPartCols flag
+   * Gets the value of the hasNonPartCols flag.
    */
   public boolean getHasNonPartCols() {
     return hasNonPartCols;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Tue Feb  9 07:55:30 2010
@@ -37,9 +37,13 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.Context;
 
+/**
+ * ParseDriver.
+ *
+ */
 public class ParseDriver {
 
-  static final private Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver");
+  private static final Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver");
 
   private static HashMap<String, String> xlateMap;
   static {
@@ -197,27 +201,21 @@
     return ret;
   }
 
-  // This class provides and implementation for a case insensitive token checker
-  // for
-  // the lexical analysis part of antlr. By converting the token stream into
-  // upper case
-  // at the time when lexical rules are checked, this class ensures that the
-  // lexical rules
-  // need to just match the token with upper case letters as opposed to
-  // combination of upper
-  // case and lower case characteres. This is purely used for matching lexical
-  // rules. The
-  // actual token text is stored in the same way as the user input without
-  // actually converting
-  // it into an upper case. The token values are generated by the consume()
-  // function of the
-  // super class ANTLRStringStream. The LA() function is the lookahead funtion
-  // and is purely
-  // used for matching lexical rules. This also means that the grammar will only
-  // accept
-  // capitalized tokens in case it is run from other tools like antlrworks which
-  // do not
-  // have the ANTLRNoCaseStringStream implementation.
+  /**
+   * ANTLRNoCaseStringStream.
+   * 
+   */
+  //This class provides and implementation for a case insensitive token checker
+  //for the lexical analysis part of antlr. By converting the token stream into
+  //upper case at the time when lexical rules are checked, this class ensures that the
+  //lexical rules need to just match the token with upper case letters as opposed to
+  //combination of upper case and lower case characteres. This is purely used for matching lexical
+  //rules. The actual token text is stored in the same way as the user input without
+  //actually converting it into an upper case. The token values are generated by the consume()
+  //function of the super class ANTLRStringStream. The LA() function is the lookahead funtion
+  //and is purely used for matching lexical rules. This also means that the grammar will only
+  //accept capitalized tokens in case it is run from other tools like antlrworks which
+  //do not have the ANTLRNoCaseStringStream implementation.
   public class ANTLRNoCaseStringStream extends ANTLRStringStream {
 
     public ANTLRNoCaseStringStream(String input) {
@@ -237,6 +235,10 @@
     }
   }
 
+  /**
+   * HiveLexerX.
+   *
+   */
   public class HiveLexerX extends HiveLexer {
 
     private final ArrayList<ParseError> errors;
@@ -281,6 +283,10 @@
 
   }
 
+  /**
+   * HiveParserX.
+   *
+   */
   public class HiveParserX extends HiveParser {
 
     private final ArrayList<ParseError> errors;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseException.java Tue Feb  9 07:55:30 2010
@@ -20,6 +20,10 @@
 
 import java.util.ArrayList;
 
+/**
+ * ParseException.
+ *
+ */
 public class ParseException extends Exception {
 
   private static final long serialVersionUID = 1L;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Tue Feb  9 07:55:30 2010
@@ -19,27 +19,28 @@
 package org.apache.hadoop.hive.ql.parse;
 
 /**
- * Library of utility functions used in the parse code
+ * Library of utility functions used in the parse code.
  * 
  */
-public class ParseUtils {
+public final class ParseUtils {
 
   /**
-   * Tests whether the parse tree node is a join token
+   * Tests whether the parse tree node is a join token.
    * 
    * @param node
    *          The parse tree node
    * @return boolean
    */
   public static boolean isJoinToken(ASTNode node) {
-    if ((node.getToken().getType() == HiveParser.TOK_JOIN)
-        || (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
-        || (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
-        || (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN)) {
+    switch (node.getToken().getType()) {
+    case HiveParser.TOK_JOIN:
+    case HiveParser.TOK_LEFTOUTERJOIN:
+    case HiveParser.TOK_RIGHTOUTERJOIN:
+    case HiveParser.TOK_FULLOUTERJOIN:
       return true;
+    default:
+      return false;
     }
-
-    return false;
   }
 
   /**
@@ -57,4 +58,8 @@
     }
     return tree;
   }
+
+  private ParseUtils() {
+    // prevent instantiation
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrintOpTreeProcessor.java Tue Feb  9 07:55:30 2010
@@ -28,6 +28,10 @@
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 
+/**
+ * PrintOpTreeProcessor.
+ *
+ */
 public class PrintOpTreeProcessor implements NodeProcessor {
 
   private final PrintStream out;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java Tue Feb  9 07:55:30 2010
@@ -49,7 +49,7 @@
   }
 
   /**
-   * get confirmed partitions
+   * get confirmed partitions.
    * 
    * @return confirmedPartns confirmed paritions
    */
@@ -58,7 +58,7 @@
   }
 
   /**
-   * get unknown partitions
+   * get unknown partitions.
    * 
    * @return unknownPartns unknown paritions
    */
@@ -67,7 +67,7 @@
   }
 
   /**
-   * get denied partitions
+   * get denied partitions.
    * 
    * @return deniedPartns denied paritions
    */
@@ -76,7 +76,7 @@
   }
 
   /**
-   * set confirmed partitions
+   * set confirmed partitions.
    * 
    * @param confirmedPartns
    *          confirmed paritions
@@ -86,7 +86,7 @@
   }
 
   /**
-   * set unknown partitions
+   * set unknown partitions.
    * 
    * @param unknownPartns
    *          unknown partitions

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Tue Feb  9 07:55:30 2010
@@ -26,7 +26,7 @@
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 
 /**
- * Implementation of the query block
+ * Implementation of the query block.
  * 
  **/
 
@@ -46,7 +46,8 @@
   private String id;
   private boolean isQuery;
   private CreateTableDesc tblDesc = null; // table descriptor of the final
-                                          // results
+
+  // results
 
   public void print(String msg) {
     LOG.info(msg + "alias=" + qbp.getAlias());

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBExpr.java Tue Feb  9 07:55:30 2010
@@ -22,7 +22,7 @@
 import org.apache.commons.logging.LogFactory;
 
 /**
- * Implementation of the query block expression
+ * Implementation of the query block expression.
  * 
  **/
 
@@ -30,6 +30,10 @@
 
   private static final Log LOG = LogFactory.getLog("hive.ql.parse.QBExpr");
 
+  /**
+   * Opcode.
+   *
+   */
   public static enum Opcode {
     NULLOP, UNION, INTERSECT, DIFF
   };

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java Tue Feb  9 07:55:30 2010
@@ -24,7 +24,7 @@
 import java.util.Map.Entry;
 
 /**
- * Internal representation of the join tree
+ * Internal representation of the join tree.
  * 
  */
 public class QBJoinTree {
@@ -56,7 +56,7 @@
   private List<String> streamAliases;
 
   /**
-   * constructor
+   * constructor.
    */
   public QBJoinTree() {
     nextTag = 0;
@@ -66,7 +66,7 @@
   }
 
   /**
-   * returns left alias if any - this is used for merging later on
+   * returns left alias if any - this is used for merging later on.
    * 
    * @return left alias if any
    */
@@ -75,7 +75,7 @@
   }
 
   /**
-   * set left alias for the join expression
+   * set left alias for the join expression.
    * 
    * @param leftAlias
    *          String

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java Tue Feb  9 07:55:30 2010
@@ -26,7 +26,7 @@
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 /**
- * Implementation of the metadata information related to a query block
+ * Implementation of the metadata information related to a query block.
  * 
  **/
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java Tue Feb  9 07:55:30 2010
@@ -30,7 +30,7 @@
 import org.apache.commons.logging.LogFactory;
 
 /**
- * Implementation of the parse information related to a query block
+ * Implementation of the parse information related to a query block.
  * 
  **/
 
@@ -63,7 +63,7 @@
   private final HashMap<String, ASTNode> destToSortby;
 
   /**
-   * Maping from table/subquery aliases to all the associated lateral view nodes
+   * Maping from table/subquery aliases to all the associated lateral view nodes.
    */
   private final HashMap<String, ArrayList<ASTNode>> aliasToLateralViews;
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=907950&r1=907949&r2=907950&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Tue Feb  9 07:55:30 2010
@@ -18,11 +18,11 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
-import java.util.ArrayList;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -30,7 +30,7 @@
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 
 /**
- * Implementation of the Row Resolver
+ * Implementation of the Row Resolver.
  * 
  */
 public class RowResolver {
@@ -132,8 +132,7 @@
    * @return ColumnInfo
    * @throws SemanticException
    */
-  public ColumnInfo get(String tab_alias, String col_alias)
-      throws SemanticException {
+  public ColumnInfo get(String tab_alias, String col_alias) throws SemanticException {
     col_alias = col_alias.toLowerCase();
     ColumnInfo ret = null;
 
@@ -167,11 +166,11 @@
     return rowSchema.getSignature();
   }
 
-  public HashMap<String, ColumnInfo> getFieldMap(String tab_alias) {
-    if (tab_alias == null) {
+  public HashMap<String, ColumnInfo> getFieldMap(String tabAlias) {
+    if (tabAlias == null) {
       return rslvMap.get(null);
     } else {
-      return rslvMap.get(tab_alias.toLowerCase());
+      return rslvMap.get(tabAlias.toLowerCase());
     }
   }