You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/25 19:49:05 UTC

svn commit: r902921 [7/26] - in /hadoop/hive/trunk: ./ contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/ contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoo...

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java Mon Jan 25 18:48:58 2010
@@ -24,44 +24,44 @@
  * Join conditions Descriptor implementation.
  * 
  */
-public class joinCond implements Serializable {
+public class JoinCondDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private int left;
   private int right;
   private int type;
   private boolean preserved;
 
-  public joinCond() {
+  public JoinCondDesc() {
   }
 
-  public joinCond(int left, int right, int type) {
+  public JoinCondDesc(int left, int right, int type) {
     this.left = left;
     this.right = right;
     this.type = type;
   }
 
-  public joinCond(org.apache.hadoop.hive.ql.parse.joinCond condn) {
+  public JoinCondDesc(org.apache.hadoop.hive.ql.parse.JoinCond condn) {
     left = condn.getLeft();
     right = condn.getRight();
     preserved = condn.getPreserved();
     switch (condn.getJoinType()) {
     case INNER:
-      type = joinDesc.INNER_JOIN;
+      type = JoinDesc.INNER_JOIN;
       break;
     case LEFTOUTER:
-      type = joinDesc.LEFT_OUTER_JOIN;
+      type = JoinDesc.LEFT_OUTER_JOIN;
       break;
     case RIGHTOUTER:
-      type = joinDesc.RIGHT_OUTER_JOIN;
+      type = JoinDesc.RIGHT_OUTER_JOIN;
       break;
     case FULLOUTER:
-      type = joinDesc.FULL_OUTER_JOIN;
+      type = JoinDesc.FULL_OUTER_JOIN;
       break;
     case UNIQUE:
-      type = joinDesc.UNIQUE_JOIN;
+      type = JoinDesc.UNIQUE_JOIN;
       break;
     case LEFTSEMI:
-      type = joinDesc.LEFT_SEMI_JOIN;
+      type = JoinDesc.LEFT_SEMI_JOIN;
       break;
     default:
       assert false;
@@ -107,27 +107,27 @@
     this.type = type;
   }
 
-  @explain
+  @Explain
   public String getJoinCondString() {
     StringBuilder sb = new StringBuilder();
 
     switch (type) {
-    case joinDesc.INNER_JOIN:
+    case JoinDesc.INNER_JOIN:
       sb.append("Inner Join ");
       break;
-    case joinDesc.FULL_OUTER_JOIN:
+    case JoinDesc.FULL_OUTER_JOIN:
       sb.append("Outer Join ");
       break;
-    case joinDesc.LEFT_OUTER_JOIN:
+    case JoinDesc.LEFT_OUTER_JOIN:
       sb.append("Left Outer Join");
       break;
-    case joinDesc.RIGHT_OUTER_JOIN:
+    case JoinDesc.RIGHT_OUTER_JOIN:
       sb.append("Right Outer Join");
       break;
-    case joinDesc.UNIQUE_JOIN:
+    case JoinDesc.UNIQUE_JOIN:
       sb.append("Unique Join");
       break;
-    case joinDesc.LEFT_SEMI_JOIN:
+    case JoinDesc.LEFT_SEMI_JOIN:
       sb.append("Left Semi Join ");
       break;
     default:

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java Mon Jan 25 18:48:58 2010
@@ -28,8 +28,8 @@
  * Join operator Descriptor implementation.
  * 
  */
-@explain(displayName = "Join Operator")
-public class joinDesc implements Serializable {
+@Explain(displayName = "Join Operator")
+public class JoinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   public static final int INNER_JOIN = 0;
   public static final int LEFT_OUTER_JOIN = 1;
@@ -43,10 +43,10 @@
   private int skewKeyDefinition = -1;
   private Map<Byte, String> bigKeysDirMap;
   private Map<Byte, Map<Byte, String>> smallKeysDirMap;
-  private Map<Byte, tableDesc> skewKeysValuesTables;
+  private Map<Byte, TableDesc> skewKeysValuesTables;
 
   // alias to key mapping
-  private Map<Byte, List<exprNodeDesc>> exprs;
+  private Map<Byte, List<ExprNodeDesc>> exprs;
 
   // used for create joinOutputObjectInspector
   protected java.util.ArrayList<java.lang.String> outputColumnNames;
@@ -57,17 +57,17 @@
   // No outer join involved
   protected boolean noOuterJoin;
 
-  protected joinCond[] conds;
+  protected JoinCondDesc[] conds;
 
   protected Byte[] tagOrder;
-  private tableDesc keyTableDesc;
+  private TableDesc keyTableDesc;
 
-  public joinDesc() {
+  public JoinDesc() {
   }
 
-  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs,
+  public JoinDesc(final Map<Byte, List<ExprNodeDesc>> exprs,
       ArrayList<String> outputColumnNames, final boolean noOuterJoin,
-      final joinCond[] conds) {
+      final JoinCondDesc[] conds) {
     this.exprs = exprs;
     this.outputColumnNames = outputColumnNames;
     this.noOuterJoin = noOuterJoin;
@@ -79,17 +79,17 @@
     }
   }
 
-  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs,
+  public JoinDesc(final Map<Byte, List<ExprNodeDesc>> exprs,
       ArrayList<String> outputColumnNames) {
     this(exprs, outputColumnNames, true, null);
   }
 
-  public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs,
-      ArrayList<String> outputColumnNames, final joinCond[] conds) {
+  public JoinDesc(final Map<Byte, List<ExprNodeDesc>> exprs,
+      ArrayList<String> outputColumnNames, final JoinCondDesc[] conds) {
     this(exprs, outputColumnNames, false, conds);
   }
 
-  public Map<Byte, List<exprNodeDesc>> getExprs() {
+  public Map<Byte, List<ExprNodeDesc>> getExprs() {
     return exprs;
   }
 
@@ -101,7 +101,7 @@
     reversedExprs = reversed_Exprs;
   }
 
-  @explain(displayName = "condition expressions")
+  @Explain(displayName = "condition expressions")
   public Map<Byte, String> getExprsStringMap() {
     if (getExprs() == null) {
       return null;
@@ -109,11 +109,11 @@
 
     LinkedHashMap<Byte, String> ret = new LinkedHashMap<Byte, String>();
 
-    for (Map.Entry<Byte, List<exprNodeDesc>> ent : getExprs().entrySet()) {
+    for (Map.Entry<Byte, List<ExprNodeDesc>> ent : getExprs().entrySet()) {
       StringBuilder sb = new StringBuilder();
       boolean first = true;
       if (ent.getValue() != null) {
-        for (exprNodeDesc expr : ent.getValue()) {
+        for (ExprNodeDesc expr : ent.getValue()) {
           if (!first) {
             sb.append(" ");
           }
@@ -130,11 +130,11 @@
     return ret;
   }
 
-  public void setExprs(final Map<Byte, List<exprNodeDesc>> exprs) {
+  public void setExprs(final Map<Byte, List<ExprNodeDesc>> exprs) {
     this.exprs = exprs;
   }
 
-  @explain(displayName = "outputColumnNames")
+  @Explain(displayName = "outputColumnNames")
   public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }
@@ -152,25 +152,25 @@
     this.noOuterJoin = noOuterJoin;
   }
 
-  @explain(displayName = "condition map")
-  public List<joinCond> getCondsList() {
+  @Explain(displayName = "condition map")
+  public List<JoinCondDesc> getCondsList() {
     if (conds == null) {
       return null;
     }
 
-    ArrayList<joinCond> l = new ArrayList<joinCond>();
-    for (joinCond cond : conds) {
+    ArrayList<JoinCondDesc> l = new ArrayList<JoinCondDesc>();
+    for (JoinCondDesc cond : conds) {
       l.add(cond);
     }
 
     return l;
   }
 
-  public joinCond[] getConds() {
+  public JoinCondDesc[] getConds() {
     return conds;
   }
 
-  public void setConds(final joinCond[] conds) {
+  public void setConds(final JoinCondDesc[] conds) {
     this.conds = conds;
   }
 
@@ -193,7 +193,7 @@
     this.tagOrder = tagOrder;
   }
 
-  @explain(displayName = "handleSkewJoin")
+  @Explain(displayName = "handleSkewJoin")
   public boolean getHandleSkewJoin() {
     return handleSkewJoin;
   }
@@ -259,7 +259,7 @@
   /**
    * @return the table desc for storing skew keys and their corresponding value;
    */
-  public Map<Byte, tableDesc> getSkewKeysValuesTables() {
+  public Map<Byte, TableDesc> getSkewKeysValuesTables() {
     return skewKeysValuesTables;
   }
 
@@ -268,26 +268,26 @@
    *          set the table desc for storing skew keys and their corresponding
    *          value;
    */
-  public void setSkewKeysValuesTables(Map<Byte, tableDesc> skewKeysValuesTables) {
+  public void setSkewKeysValuesTables(Map<Byte, TableDesc> skewKeysValuesTables) {
     this.skewKeysValuesTables = skewKeysValuesTables;
   }
 
   public boolean isNoOuterJoin() {
-    for (org.apache.hadoop.hive.ql.plan.joinCond cond : conds) {
-      if (cond.getType() == joinDesc.FULL_OUTER_JOIN
-          || (cond.getType() == joinDesc.LEFT_OUTER_JOIN)
-          || cond.getType() == joinDesc.RIGHT_OUTER_JOIN) {
+    for (org.apache.hadoop.hive.ql.plan.JoinCondDesc cond : conds) {
+      if (cond.getType() == JoinDesc.FULL_OUTER_JOIN
+          || (cond.getType() == JoinDesc.LEFT_OUTER_JOIN)
+          || cond.getType() == JoinDesc.RIGHT_OUTER_JOIN) {
         return false;
       }
     }
     return true;
   }
 
-  public void setKeyTableDesc(tableDesc keyTblDesc) {
+  public void setKeyTableDesc(TableDesc keyTblDesc) {
     keyTableDesc = keyTblDesc;
   }
 
-  public tableDesc getKeyTableDesc() {
+  public TableDesc getKeyTableDesc() {
     return keyTableDesc;
   }
 }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/lateralViewJoinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java Mon Jan 25 18:48:58 2010
@@ -21,16 +21,16 @@
 import java.io.Serializable;
 import java.util.ArrayList;
 
-@explain(displayName = "Lateral View Join Operator")
-public class lateralViewJoinDesc implements Serializable {
+@Explain(displayName = "Lateral View Join Operator")
+public class LateralViewJoinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private ArrayList<String> outputInternalColNames;
 
-  public lateralViewJoinDesc() {
+  public LateralViewJoinDesc() {
   }
 
-  public lateralViewJoinDesc(ArrayList<String> outputInternalColNames) {
+  public LateralViewJoinDesc(ArrayList<String> outputInternalColNames) {
     this.outputInternalColNames = outputInternalColNames;
   }
 
@@ -38,7 +38,7 @@
     this.outputInternalColNames = outputInternalColNames;
   }
 
-  @explain(displayName = "outputColumnNames")
+  @Explain(displayName = "outputColumnNames")
   public ArrayList<String> getOutputInternalColNames() {
     return outputInternalColNames;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/limitDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java Mon Jan 25 18:48:58 2010
@@ -20,15 +20,15 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Limit")
-public class limitDesc implements Serializable {
+@Explain(displayName = "Limit")
+public class LimitDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private int limit;
 
-  public limitDesc() {
+  public LimitDesc() {
   }
 
-  public limitDesc(final int limit) {
+  public LimitDesc(final int limit) {
     this.limit = limit;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java Mon Jan 25 18:48:58 2010
@@ -20,19 +20,19 @@
 
 import java.io.Serializable;
 
-public class loadDesc implements Serializable {
+public class LoadDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String sourceDir;
 
-  public loadDesc() {
+  public LoadDesc() {
   }
 
-  public loadDesc(final String sourceDir) {
+  public LoadDesc(final String sourceDir) {
 
     this.sourceDir = sourceDir;
   }
 
-  @explain(displayName = "source", normalExplain = false)
+  @Explain(displayName = "source", normalExplain = false)
   public String getSourceDir() {
     return sourceDir;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java Mon Jan 25 18:48:58 2010
@@ -20,7 +20,7 @@
 
 import java.io.Serializable;
 
-public class loadFileDesc extends loadDesc implements Serializable {
+public class LoadFileDesc extends LoadDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String targetDir;
   private boolean isDfsDir;
@@ -28,10 +28,10 @@
   private String columns;
   private String columnTypes;
 
-  public loadFileDesc() {
+  public LoadFileDesc() {
   }
 
-  public loadFileDesc(final String sourceDir, final String targetDir,
+  public LoadFileDesc(final String sourceDir, final String targetDir,
       final boolean isDfsDir, final String columns, final String columnTypes) {
 
     super(sourceDir);
@@ -41,7 +41,7 @@
     this.columnTypes = columnTypes;
   }
 
-  @explain(displayName = "destination")
+  @Explain(displayName = "destination")
   public String getTargetDir() {
     return targetDir;
   }
@@ -50,7 +50,7 @@
     this.targetDir = targetDir;
   }
 
-  @explain(displayName = "hdfs directory")
+  @Explain(displayName = "hdfs directory")
   public boolean getIsDfsDir() {
     return isDfsDir;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/loadTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java Mon Jan 25 18:48:58 2010
@@ -21,21 +21,21 @@
 import java.io.Serializable;
 import java.util.HashMap;
 
-public class loadTableDesc extends org.apache.hadoop.hive.ql.plan.loadDesc
+public class LoadTableDesc extends org.apache.hadoop.hive.ql.plan.LoadDesc
     implements Serializable {
   private static final long serialVersionUID = 1L;
   private boolean replace;
   private String tmpDir;
 
   // TODO: the below seems like they should just be combined into partitionDesc
-  private org.apache.hadoop.hive.ql.plan.tableDesc table;
+  private org.apache.hadoop.hive.ql.plan.TableDesc table;
   private HashMap<String, String> partitionSpec;
 
-  public loadTableDesc() {
+  public LoadTableDesc() {
   }
 
-  public loadTableDesc(final String sourceDir, final String tmpDir,
-      final org.apache.hadoop.hive.ql.plan.tableDesc table,
+  public LoadTableDesc(final String sourceDir, final String tmpDir,
+      final org.apache.hadoop.hive.ql.plan.TableDesc table,
       final HashMap<String, String> partitionSpec, final boolean replace) {
 
     super(sourceDir);
@@ -45,13 +45,13 @@
     this.replace = replace;
   }
 
-  public loadTableDesc(final String sourceDir, final String tmpDir,
-      final org.apache.hadoop.hive.ql.plan.tableDesc table,
+  public LoadTableDesc(final String sourceDir, final String tmpDir,
+      final org.apache.hadoop.hive.ql.plan.TableDesc table,
       final HashMap<String, String> partitionSpec) {
     this(sourceDir, tmpDir, table, partitionSpec, true);
   }
 
-  @explain(displayName = "tmp directory", normalExplain = false)
+  @Explain(displayName = "tmp directory", normalExplain = false)
   public String getTmpDir() {
     return tmpDir;
   }
@@ -60,16 +60,16 @@
     tmpDir = tmp;
   }
 
-  @explain(displayName = "table")
-  public tableDesc getTable() {
+  @Explain(displayName = "table")
+  public TableDesc getTable() {
     return table;
   }
 
-  public void setTable(final org.apache.hadoop.hive.ql.plan.tableDesc table) {
+  public void setTable(final org.apache.hadoop.hive.ql.plan.TableDesc table) {
     this.table = table;
   }
 
-  @explain(displayName = "partition")
+  @Explain(displayName = "partition")
   public HashMap<String, String> getPartitionSpec() {
     return partitionSpec;
   }
@@ -78,7 +78,7 @@
     this.partitionSpec = partitionSpec;
   }
 
-  @explain(displayName = "replace")
+  @Explain(displayName = "replace")
   public boolean getReplace() {
     return replace;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java Mon Jan 25 18:48:58 2010
@@ -31,25 +31,25 @@
  * Map Join operator Descriptor implementation.
  * 
  */
-@explain(displayName = "Common Join Operator")
-public class mapJoinDesc extends joinDesc implements Serializable {
+@Explain(displayName = "Common Join Operator")
+public class MapJoinDesc extends JoinDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private Map<Byte, List<exprNodeDesc>> keys;
-  private tableDesc keyTblDesc;
-  private List<tableDesc> valueTblDescs;
+  private Map<Byte, List<ExprNodeDesc>> keys;
+  private TableDesc keyTblDesc;
+  private List<TableDesc> valueTblDescs;
 
   private int posBigTable;
 
   private Map<Byte, List<Integer>> retainList;
 
-  public mapJoinDesc() {
+  public MapJoinDesc() {
   }
 
-  public mapJoinDesc(final Map<Byte, List<exprNodeDesc>> keys,
-      final tableDesc keyTblDesc, final Map<Byte, List<exprNodeDesc>> values,
-      final List<tableDesc> valueTblDescs, ArrayList<String> outputColumnNames,
-      final int posBigTable, final joinCond[] conds) {
+  public MapJoinDesc(final Map<Byte, List<ExprNodeDesc>> keys,
+      final TableDesc keyTblDesc, final Map<Byte, List<ExprNodeDesc>> values,
+      final List<TableDesc> valueTblDescs, ArrayList<String> outputColumnNames,
+      final int posBigTable, final JoinCondDesc[] conds) {
     super(values, outputColumnNames, conds);
     this.keys = keys;
     this.keyTblDesc = keyTblDesc;
@@ -60,10 +60,10 @@
 
   private void initRetainExprList() {
     retainList = new HashMap<Byte, List<Integer>>();
-    Set<Entry<Byte, List<exprNodeDesc>>> set = super.getExprs().entrySet();
-    Iterator<Entry<Byte, List<exprNodeDesc>>> setIter = set.iterator();
+    Set<Entry<Byte, List<ExprNodeDesc>>> set = super.getExprs().entrySet();
+    Iterator<Entry<Byte, List<ExprNodeDesc>>> setIter = set.iterator();
     while (setIter.hasNext()) {
-      Entry<Byte, List<exprNodeDesc>> current = setIter.next();
+      Entry<Byte, List<ExprNodeDesc>> current = setIter.next();
       List<Integer> list = new ArrayList<Integer>();
       for (int i = 0; i < current.getValue().size(); i++) {
         list.add(i);
@@ -83,8 +83,8 @@
   /**
    * @return the keys
    */
-  @explain(displayName = "keys")
-  public Map<Byte, List<exprNodeDesc>> getKeys() {
+  @Explain(displayName = "keys")
+  public Map<Byte, List<ExprNodeDesc>> getKeys() {
     return keys;
   }
 
@@ -92,14 +92,14 @@
    * @param keys
    *          the keys to set
    */
-  public void setKeys(Map<Byte, List<exprNodeDesc>> keys) {
+  public void setKeys(Map<Byte, List<ExprNodeDesc>> keys) {
     this.keys = keys;
   }
 
   /**
    * @return the position of the big table not in memory
    */
-  @explain(displayName = "Position of Big Table")
+  @Explain(displayName = "Position of Big Table")
   public int getPosBigTable() {
     return posBigTable;
   }
@@ -115,7 +115,7 @@
   /**
    * @return the keyTblDesc
    */
-  public tableDesc getKeyTblDesc() {
+  public TableDesc getKeyTblDesc() {
     return keyTblDesc;
   }
 
@@ -123,14 +123,14 @@
    * @param keyTblDesc
    *          the keyTblDesc to set
    */
-  public void setKeyTblDesc(tableDesc keyTblDesc) {
+  public void setKeyTblDesc(TableDesc keyTblDesc) {
     this.keyTblDesc = keyTblDesc;
   }
 
   /**
    * @return the valueTblDescs
    */
-  public List<tableDesc> getValueTblDescs() {
+  public List<TableDesc> getValueTblDescs() {
     return valueTblDescs;
   }
 
@@ -138,7 +138,7 @@
    * @param valueTblDescs
    *          the valueTblDescs to set
    */
-  public void setValueTblDescs(List<tableDesc> valueTblDescs) {
+  public void setValueTblDescs(List<TableDesc> valueTblDescs) {
     this.valueTblDescs = valueTblDescs;
   }
 }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredLocalWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java Mon Jan 25 18:48:58 2010
@@ -23,24 +23,24 @@
 
 import org.apache.hadoop.hive.ql.exec.Operator;
 
-@explain(displayName = "Map Reduce Local Work")
-public class mapredLocalWork implements Serializable {
+@Explain(displayName = "Map Reduce Local Work")
+public class MapredLocalWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
-  private LinkedHashMap<String, fetchWork> aliasToFetchWork;
+  private LinkedHashMap<String, FetchWork> aliasToFetchWork;
 
-  public mapredLocalWork() {
+  public MapredLocalWork() {
   }
 
-  public mapredLocalWork(
+  public MapredLocalWork(
       final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
-      final LinkedHashMap<String, fetchWork> aliasToFetchWork) {
+      final LinkedHashMap<String, FetchWork> aliasToFetchWork) {
     this.aliasToWork = aliasToWork;
     this.aliasToFetchWork = aliasToFetchWork;
   }
 
-  @explain(displayName = "Alias -> Map Local Operator Tree")
+  @Explain(displayName = "Alias -> Map Local Operator Tree")
   public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
     return aliasToWork;
   }
@@ -53,8 +53,8 @@
   /**
    * @return the aliasToFetchWork
    */
-  @explain(displayName = "Alias -> Map Local Tables")
-  public LinkedHashMap<String, fetchWork> getAliasToFetchWork() {
+  @Explain(displayName = "Alias -> Map Local Tables")
+  public LinkedHashMap<String, FetchWork> getAliasToFetchWork() {
     return aliasToFetchWork;
   }
 
@@ -63,7 +63,7 @@
    *          the aliasToFetchWork to set
    */
   public void setAliasToFetchWork(
-      final LinkedHashMap<String, fetchWork> aliasToFetchWork) {
+      final LinkedHashMap<String, FetchWork> aliasToFetchWork) {
     this.aliasToFetchWork = aliasToFetchWork;
   }
 }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java Mon Jan 25 18:48:58 2010
@@ -28,8 +28,8 @@
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 
-@explain(displayName = "Map Reduce")
-public class mapredWork implements Serializable {
+@Explain(displayName = "Map Reduce")
+public class MapredWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private String command;
   // map side work
@@ -37,38 +37,38 @@
   // deterministic, to ease testing
   private LinkedHashMap<String, ArrayList<String>> pathToAliases;
 
-  private LinkedHashMap<String, partitionDesc> pathToPartitionInfo;
+  private LinkedHashMap<String, PartitionDesc> pathToPartitionInfo;
 
   private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
 
-  private LinkedHashMap<String, partitionDesc> aliasToPartnInfo;
+  private LinkedHashMap<String, PartitionDesc> aliasToPartnInfo;
 
   // map<->reduce interface
   // schema of the map-reduce 'key' object - this is homogeneous
-  private tableDesc keyDesc;
+  private TableDesc keyDesc;
 
   // schema of the map-reduce 'val' object - this is heterogeneous
-  private List<tableDesc> tagToValueDesc;
+  private List<TableDesc> tagToValueDesc;
 
   private Operator<?> reducer;
 
   private Integer numReduceTasks;
 
   private boolean needsTagging;
-  private mapredLocalWork mapLocalWork;
+  private MapredLocalWork mapLocalWork;
 
-  public mapredWork() {
-    aliasToPartnInfo = new LinkedHashMap<String, partitionDesc>();
+  public MapredWork() {
+    aliasToPartnInfo = new LinkedHashMap<String, PartitionDesc>();
   }
 
-  public mapredWork(
+  public MapredWork(
       final String command,
       final LinkedHashMap<String, ArrayList<String>> pathToAliases,
-      final LinkedHashMap<String, partitionDesc> pathToPartitionInfo,
+      final LinkedHashMap<String, PartitionDesc> pathToPartitionInfo,
       final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
-      final tableDesc keyDesc, List<tableDesc> tagToValueDesc,
+      final TableDesc keyDesc, List<TableDesc> tagToValueDesc,
       final Operator<?> reducer, final Integer numReduceTasks,
-      final mapredLocalWork mapLocalWork) {
+      final MapredLocalWork mapLocalWork) {
     this.command = command;
     this.pathToAliases = pathToAliases;
     this.pathToPartitionInfo = pathToPartitionInfo;
@@ -78,7 +78,7 @@
     this.reducer = reducer;
     this.numReduceTasks = numReduceTasks;
     this.mapLocalWork = mapLocalWork;
-    aliasToPartnInfo = new LinkedHashMap<String, partitionDesc>();
+    aliasToPartnInfo = new LinkedHashMap<String, PartitionDesc>();
   }
 
   public String getCommand() {
@@ -89,7 +89,7 @@
     this.command = command;
   }
 
-  @explain(displayName = "Path -> Alias", normalExplain = false)
+  @Explain(displayName = "Path -> Alias", normalExplain = false)
   public LinkedHashMap<String, ArrayList<String>> getPathToAliases() {
     return pathToAliases;
   }
@@ -99,20 +99,20 @@
     this.pathToAliases = pathToAliases;
   }
 
-  @explain(displayName = "Path -> Partition", normalExplain = false)
-  public LinkedHashMap<String, partitionDesc> getPathToPartitionInfo() {
+  @Explain(displayName = "Path -> Partition", normalExplain = false)
+  public LinkedHashMap<String, PartitionDesc> getPathToPartitionInfo() {
     return pathToPartitionInfo;
   }
 
   public void setPathToPartitionInfo(
-      final LinkedHashMap<String, partitionDesc> pathToPartitionInfo) {
+      final LinkedHashMap<String, PartitionDesc> pathToPartitionInfo) {
     this.pathToPartitionInfo = pathToPartitionInfo;
   }
 
   /**
    * @return the aliasToPartnInfo
    */
-  public LinkedHashMap<String, partitionDesc> getAliasToPartnInfo() {
+  public LinkedHashMap<String, PartitionDesc> getAliasToPartnInfo() {
     return aliasToPartnInfo;
   }
 
@@ -121,11 +121,11 @@
    *          the aliasToPartnInfo to set
    */
   public void setAliasToPartnInfo(
-      LinkedHashMap<String, partitionDesc> aliasToPartnInfo) {
+      LinkedHashMap<String, PartitionDesc> aliasToPartnInfo) {
     this.aliasToPartnInfo = aliasToPartnInfo;
   }
 
-  @explain(displayName = "Alias -> Map Operator Tree")
+  @Explain(displayName = "Alias -> Map Operator Tree")
   public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
     return aliasToWork;
   }
@@ -138,8 +138,8 @@
   /**
    * @return the mapredLocalWork
    */
-  @explain(displayName = "Local Work")
-  public mapredLocalWork getMapLocalWork() {
+  @Explain(displayName = "Local Work")
+  public MapredLocalWork getMapLocalWork() {
     return mapLocalWork;
   }
 
@@ -147,27 +147,27 @@
    * @param mapLocalWork
    *          the mapredLocalWork to set
    */
-  public void setMapLocalWork(final mapredLocalWork mapLocalWork) {
+  public void setMapLocalWork(final MapredLocalWork mapLocalWork) {
     this.mapLocalWork = mapLocalWork;
   }
 
-  public tableDesc getKeyDesc() {
+  public TableDesc getKeyDesc() {
     return keyDesc;
   }
 
-  public void setKeyDesc(final tableDesc keyDesc) {
+  public void setKeyDesc(final TableDesc keyDesc) {
     this.keyDesc = keyDesc;
   }
 
-  public List<tableDesc> getTagToValueDesc() {
+  public List<TableDesc> getTagToValueDesc() {
     return tagToValueDesc;
   }
 
-  public void setTagToValueDesc(final List<tableDesc> tagToValueDesc) {
+  public void setTagToValueDesc(final List<TableDesc> tagToValueDesc) {
     this.tagToValueDesc = tagToValueDesc;
   }
 
-  @explain(displayName = "Reduce Operator Tree")
+  @Explain(displayName = "Reduce Operator Tree")
   public Operator<?> getReducer() {
     return reducer;
   }
@@ -194,7 +194,7 @@
 
   @SuppressWarnings("nls")
   public void addMapWork(String path, String alias, Operator<?> work,
-      partitionDesc pd) {
+      PartitionDesc pd) {
     ArrayList<String> curAliases = pathToAliases.get(path);
     if (curAliases == null) {
       assert (pathToPartitionInfo.get(path) == null);
@@ -258,7 +258,7 @@
     if (pathToPartitionInfo == null) {
       return;
     }
-    for (Map.Entry<String, partitionDesc> entry : pathToPartitionInfo
+    for (Map.Entry<String, PartitionDesc> entry : pathToPartitionInfo
         .entrySet()) {
       entry.getValue().deriveBaseFileName(entry.getKey());
     }
@@ -268,7 +268,7 @@
     setAliases();
   }
 
-  @explain(displayName = "Needs Tagging", normalExplain = false)
+  @Explain(displayName = "Needs Tagging", normalExplain = false)
   public boolean getNeedsTagging() {
     return needsTagging;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/moveWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java Mon Jan 25 18:48:58 2010
@@ -24,11 +24,11 @@
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 
-@explain(displayName = "Move Operator")
-public class moveWork implements Serializable {
+@Explain(displayName = "Move Operator")
+public class MoveWork implements Serializable {
   private static final long serialVersionUID = 1L;
-  private loadTableDesc loadTableWork;
-  private loadFileDesc loadFileWork;
+  private LoadTableDesc loadTableWork;
+  private LoadFileDesc loadFileWork;
 
   private boolean checkFileFormat;
 
@@ -41,16 +41,16 @@
    */
   protected Set<WriteEntity> outputs;
 
-  public moveWork() {
+  public MoveWork() {
   }
 
-  public moveWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
+  public MoveWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
     this.inputs = inputs;
     this.outputs = outputs;
   }
 
-  public moveWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      final loadTableDesc loadTableWork, final loadFileDesc loadFileWork,
+  public MoveWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
+      final LoadTableDesc loadTableWork, final LoadFileDesc loadFileWork,
       boolean checkFileFormat) {
     this(inputs, outputs);
     this.loadTableWork = loadTableWork;
@@ -58,21 +58,21 @@
     this.checkFileFormat = checkFileFormat;
   }
 
-  @explain(displayName = "tables")
-  public loadTableDesc getLoadTableWork() {
+  @Explain(displayName = "tables")
+  public LoadTableDesc getLoadTableWork() {
     return loadTableWork;
   }
 
-  public void setLoadTableWork(final loadTableDesc loadTableWork) {
+  public void setLoadTableWork(final LoadTableDesc loadTableWork) {
     this.loadTableWork = loadTableWork;
   }
 
-  @explain(displayName = "files")
-  public loadFileDesc getLoadFileWork() {
+  @Explain(displayName = "files")
+  public LoadFileDesc getLoadFileWork() {
     return loadFileWork;
   }
 
-  public void setLoadFileWork(final loadFileDesc loadFileWork) {
+  public void setLoadFileWork(final LoadFileDesc loadFileWork) {
     this.loadFileWork = loadFileWork;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java Mon Jan 25 18:48:58 2010
@@ -31,10 +31,10 @@
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.mapred.InputFormat;
 
-@explain(displayName = "Partition")
-public class partitionDesc implements Serializable, Cloneable {
+@Explain(displayName = "Partition")
+public class PartitionDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 2L;
-  private tableDesc table;
+  private TableDesc table;
   private java.util.LinkedHashMap<String, String> partSpec;
   private java.lang.Class<? extends org.apache.hadoop.hive.serde2.Deserializer> deserializerClass;
   private Class<? extends InputFormat> inputFileFormatClass;
@@ -43,15 +43,15 @@
   private String serdeClassName;
   private transient String baseFileName;
 
-  public partitionDesc() {
+  public PartitionDesc() {
   }
 
-  public partitionDesc(final tableDesc table,
+  public PartitionDesc(final TableDesc table,
       final java.util.LinkedHashMap<String, String> partSpec) {
     this(table, partSpec, null, null, null, null, null);
   }
 
-  public partitionDesc(final tableDesc table,
+  public PartitionDesc(final TableDesc table,
       final java.util.LinkedHashMap<String, String> partSpec,
       final Class<? extends Deserializer> serdeClass,
       final Class<? extends InputFormat> inputFileFormatClass,
@@ -72,7 +72,7 @@
     }
   }
 
-  public partitionDesc(final org.apache.hadoop.hive.ql.metadata.Partition part)
+  public PartitionDesc(final org.apache.hadoop.hive.ql.metadata.Partition part)
       throws HiveException {
     table = Utilities.getTableDesc(part.getTable());
     partSpec = part.getSpec();
@@ -85,16 +85,16 @@
     ;
   }
 
-  @explain(displayName = "")
-  public tableDesc getTableDesc() {
+  @Explain(displayName = "")
+  public TableDesc getTableDesc() {
     return table;
   }
 
-  public void setTableDesc(final tableDesc table) {
+  public void setTableDesc(final TableDesc table) {
     this.table = table;
   }
 
-  @explain(displayName = "partition values")
+  @Explain(displayName = "partition values")
   public java.util.LinkedHashMap<String, String> getPartSpec() {
     return partSpec;
   }
@@ -148,7 +148,7 @@
         .getOutputFormatSubstitute(outputFileFormatClass);
   }
 
-  @explain(displayName = "properties", normalExplain = false)
+  @Explain(displayName = "properties", normalExplain = false)
   public java.util.Properties getProperties() {
     if (table != null) {
       return table.getProperties();
@@ -163,7 +163,7 @@
   /**
    * @return the serdeClassName
    */
-  @explain(displayName = "serde")
+  @Explain(displayName = "serde")
   public String getSerdeClassName() {
     if (serdeClassName == null && table != null) {
       setSerdeClassName(table.getSerdeClassName());
@@ -179,30 +179,30 @@
     this.serdeClassName = serdeClassName;
   }
 
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getTableName() {
     return getProperties().getProperty(
         org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);
   }
 
-  @explain(displayName = "input format")
+  @Explain(displayName = "input format")
   public String getInputFileFormatClassName() {
     return getInputFileFormatClass().getName();
   }
 
-  @explain(displayName = "output format")
+  @Explain(displayName = "output format")
   public String getOutputFileFormatClassName() {
     return getOutputFileFormatClass().getName();
   }
 
-  @explain(displayName = "base file name", normalExplain = false)
+  @Explain(displayName = "base file name", normalExplain = false)
   public String getBaseFileName() {
     return baseFileName;
   }
 
   @Override
-  public partitionDesc clone() {
-    partitionDesc ret = new partitionDesc();
+  public PartitionDesc clone() {
+    PartitionDesc ret = new PartitionDesc();
 
     ret.setSerdeClassName(serdeClassName);
     ret.setDeserializerClass(deserializerClass);
@@ -217,7 +217,7 @@
       }
       ret.setProperties(newProp);
     }
-    ret.table = (tableDesc) table.clone();
+    ret.table = (TableDesc) table.clone();
     // The partition spec is not present
     if (partSpec != null) {
       ret.partSpec = new java.util.LinkedHashMap<String, String>();

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Mon Jan 25 18:48:58 2010
@@ -59,11 +59,11 @@
   };
 
   @SuppressWarnings("nls")
-  public static mapredWork getMapRedWork() {
-    return new mapredWork("", new LinkedHashMap<String, ArrayList<String>>(),
-        new LinkedHashMap<String, partitionDesc>(),
+  public static MapredWork getMapRedWork() {
+    return new MapredWork("", new LinkedHashMap<String, ArrayList<String>>(),
+        new LinkedHashMap<String, PartitionDesc>(),
         new LinkedHashMap<String, Operator<? extends Serializable>>(),
-        new tableDesc(), new ArrayList<tableDesc>(), null, Integer.valueOf(1),
+        new TableDesc(), new ArrayList<TableDesc>(), null, Integer.valueOf(1),
         null);
   }
 
@@ -71,7 +71,7 @@
    * Generate the table descriptor of MetadataTypedColumnsetSerDe with the
    * separatorCode and column names (comma separated string).
    */
-  public static tableDesc getDefaultTableDesc(String separatorCode,
+  public static TableDesc getDefaultTableDesc(String separatorCode,
       String columns) {
     return getDefaultTableDesc(separatorCode, columns, false);
   }
@@ -80,7 +80,7 @@
    * Generate the table descriptor of given serde with the separatorCode and
    * column names (comma separated string).
    */
-  public static tableDesc getTableDesc(
+  public static TableDesc getTableDesc(
       Class<? extends Deserializer> serdeClass, String separatorCode,
       String columns) {
     return getTableDesc(serdeClass, separatorCode, columns, false);
@@ -91,7 +91,7 @@
    * separatorCode and column names (comma separated string), and whether the
    * last column should take the rest of the line.
    */
-  public static tableDesc getDefaultTableDesc(String separatorCode,
+  public static TableDesc getDefaultTableDesc(String separatorCode,
       String columns, boolean lastColumnTakesRestOfTheLine) {
     return getDefaultTableDesc(separatorCode, columns, null,
         lastColumnTakesRestOfTheLine);
@@ -102,7 +102,7 @@
    * and column names (comma separated string), and whether the last column
    * should take the rest of the line.
    */
-  public static tableDesc getTableDesc(
+  public static TableDesc getTableDesc(
       Class<? extends Deserializer> serdeClass, String separatorCode,
       String columns, boolean lastColumnTakesRestOfTheLine) {
     return getTableDesc(serdeClass, separatorCode, columns, null,
@@ -114,20 +114,20 @@
    * separatorCode and column names (comma separated string), and whether the
    * last column should take the rest of the line.
    */
-  public static tableDesc getDefaultTableDesc(String separatorCode,
+  public static TableDesc getDefaultTableDesc(String separatorCode,
       String columns, String columnTypes, boolean lastColumnTakesRestOfTheLine) {
     return getTableDesc(LazySimpleSerDe.class, separatorCode, columns,
         columnTypes, lastColumnTakesRestOfTheLine);
   }
 
-  public static tableDesc getTableDesc(
+  public static TableDesc getTableDesc(
       Class<? extends Deserializer> serdeClass, String separatorCode,
       String columns, String columnTypes, boolean lastColumnTakesRestOfTheLine) {
     return getTableDesc(serdeClass, separatorCode, columns, columnTypes,
         lastColumnTakesRestOfTheLine, false);
   }
 
-  public static tableDesc getTableDesc(
+  public static TableDesc getTableDesc(
       Class<? extends Deserializer> serdeClass, String separatorCode,
       String columns, String columnTypes, boolean lastColumnTakesRestOfTheLine,
       boolean useJSONForLazy) {
@@ -158,14 +158,14 @@
       properties.setProperty(Constants.SERIALIZATION_USE_JSON_OBJECTS, "true");
     }
 
-    return new tableDesc(serdeClass, TextInputFormat.class,
+    return new TableDesc(serdeClass, TextInputFormat.class,
         IgnoreKeyTextOutputFormat.class, properties);
   }
 
   /**
    * Generate a table descriptor from a createTableDesc.
    */
-  public static tableDesc getTableDesc(createTableDesc crtTblDesc, String cols,
+  public static TableDesc getTableDesc(CreateTableDesc crtTblDesc, String cols,
       String colTypes) {
 
     Class<? extends Deserializer> serdeClass = LazySimpleSerDe.class;
@@ -173,7 +173,7 @@
     String columns = cols;
     String columnTypes = colTypes;
     boolean lastColumnTakesRestOfTheLine = false;
-    tableDesc ret;
+    TableDesc ret;
 
     try {
       if (crtTblDesc.getSerName() != null) {
@@ -232,8 +232,8 @@
    * does not support a table with a single column "col" with type
    * "array<string>".
    */
-  public static tableDesc getDefaultTableDesc(String separatorCode) {
-    return new tableDesc(MetadataTypedColumnsetSerDe.class,
+  public static TableDesc getDefaultTableDesc(String separatorCode) {
+    return new TableDesc(MetadataTypedColumnsetSerDe.class,
         TextInputFormat.class, IgnoreKeyTextOutputFormat.class, Utilities
             .makeProperties(
                 org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT,
@@ -243,9 +243,9 @@
   /**
    * Generate the table descriptor for reduce key.
    */
-  public static tableDesc getReduceKeyTableDesc(List<FieldSchema> fieldSchemas,
+  public static TableDesc getReduceKeyTableDesc(List<FieldSchema> fieldSchemas,
       String order) {
-    return new tableDesc(BinarySortableSerDe.class,
+    return new TableDesc(BinarySortableSerDe.class,
         SequenceFileInputFormat.class, SequenceFileOutputFormat.class,
         Utilities.makeProperties(Constants.LIST_COLUMNS, MetaStoreUtils
             .getColumnNamesFromFieldSchema(fieldSchemas),
@@ -257,8 +257,8 @@
   /**
    * Generate the table descriptor for Map-side join key.
    */
-  public static tableDesc getMapJoinKeyTableDesc(List<FieldSchema> fieldSchemas) {
-    return new tableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+  public static TableDesc getMapJoinKeyTableDesc(List<FieldSchema> fieldSchemas) {
+    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties("columns",
             MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
             "columns.types", MetaStoreUtils
@@ -269,9 +269,9 @@
   /**
    * Generate the table descriptor for Map-side join key.
    */
-  public static tableDesc getMapJoinValueTableDesc(
+  public static TableDesc getMapJoinValueTableDesc(
       List<FieldSchema> fieldSchemas) {
-    return new tableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties("columns",
             MetaStoreUtils.getColumnNamesFromFieldSchema(fieldSchemas),
             "columns.types", MetaStoreUtils
@@ -282,9 +282,9 @@
   /**
    * Generate the table descriptor for intermediate files.
    */
-  public static tableDesc getIntermediateFileTableDesc(
+  public static TableDesc getIntermediateFileTableDesc(
       List<FieldSchema> fieldSchemas) {
-    return new tableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties(
             Constants.LIST_COLUMNS, MetaStoreUtils
                 .getColumnNamesFromFieldSchema(fieldSchemas),
@@ -296,8 +296,8 @@
   /**
    * Generate the table descriptor for intermediate files.
    */
-  public static tableDesc getReduceValueTableDesc(List<FieldSchema> fieldSchemas) {
-    return new tableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
+  public static TableDesc getReduceValueTableDesc(List<FieldSchema> fieldSchemas) {
+    return new TableDesc(LazyBinarySerDe.class, SequenceFileInputFormat.class,
         SequenceFileOutputFormat.class, Utilities.makeProperties(
             Constants.LIST_COLUMNS, MetaStoreUtils
                 .getColumnNamesFromFieldSchema(fieldSchemas),
@@ -310,7 +310,7 @@
    * Convert the ColumnList to FieldSchema list.
    */
   public static List<FieldSchema> getFieldSchemasFromColumnList(
-      List<exprNodeDesc> cols, List<String> outputColumnNames, int start,
+      List<ExprNodeDesc> cols, List<String> outputColumnNames, int start,
       String fieldPrefix) {
     List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
     for (int i = 0; i < cols.size(); i++) {
@@ -324,7 +324,7 @@
    * Convert the ColumnList to FieldSchema list.
    */
   public static List<FieldSchema> getFieldSchemasFromColumnList(
-      List<exprNodeDesc> cols, String fieldPrefix) {
+      List<ExprNodeDesc> cols, String fieldPrefix) {
     List<FieldSchema> schemas = new ArrayList<FieldSchema>(cols.size());
     for (int i = 0; i < cols.size(); i++) {
       schemas.add(MetaStoreUtils.getFieldSchemaFromTypeInfo(fieldPrefix + i,
@@ -393,12 +393,12 @@
    *          input data size.
    * @return The reduceSinkDesc object.
    */
-  public static reduceSinkDesc getReduceSinkDesc(
-      ArrayList<exprNodeDesc> keyCols, ArrayList<exprNodeDesc> valueCols,
+  public static ReduceSinkDesc getReduceSinkDesc(
+      ArrayList<ExprNodeDesc> keyCols, ArrayList<ExprNodeDesc> valueCols,
       List<String> outputColumnNames, boolean includeKeyCols, int tag,
-      ArrayList<exprNodeDesc> partitionCols, String order, int numReducers) {
-    tableDesc keyTable = null;
-    tableDesc valueTable = null;
+      ArrayList<ExprNodeDesc> partitionCols, String order, int numReducers) {
+    TableDesc keyTable = null;
+    TableDesc valueTable = null;
     ArrayList<String> outputKeyCols = new ArrayList<String>();
     ArrayList<String> outputValCols = new ArrayList<String>();
     if (includeKeyCols) {
@@ -419,7 +419,7 @@
           valueCols, outputColumnNames, 0, ""));
       outputValCols.addAll(outputColumnNames);
     }
-    return new reduceSinkDesc(keyCols, valueCols, outputKeyCols, outputValCols,
+    return new ReduceSinkDesc(keyCols, valueCols, outputKeyCols, outputValCols,
         tag, partitionCols, numReducers, keyTable,
         // Revert to DynamicSerDe:
         // getBinaryTableDesc(getFieldSchemasFromColumnList(valueCols,
@@ -446,22 +446,22 @@
    *          input data size.
    * @return The reduceSinkDesc object.
    */
-  public static reduceSinkDesc getReduceSinkDesc(
-      ArrayList<exprNodeDesc> keyCols, ArrayList<exprNodeDesc> valueCols,
+  public static ReduceSinkDesc getReduceSinkDesc(
+      ArrayList<ExprNodeDesc> keyCols, ArrayList<ExprNodeDesc> valueCols,
       List<String> outputColumnNames, boolean includeKey, int tag,
       int numPartitionFields, int numReducers) {
-    ArrayList<exprNodeDesc> partitionCols = null;
+    ArrayList<ExprNodeDesc> partitionCols = null;
 
     if (numPartitionFields >= keyCols.size()) {
       partitionCols = keyCols;
     } else if (numPartitionFields >= 0) {
-      partitionCols = new ArrayList<exprNodeDesc>(numPartitionFields);
+      partitionCols = new ArrayList<ExprNodeDesc>(numPartitionFields);
       for (int i = 0; i < numPartitionFields; i++) {
         partitionCols.add(keyCols.get(i));
       }
     } else {
       // numPartitionFields = -1 means random partitioning
-      partitionCols = new ArrayList<exprNodeDesc>(1);
+      partitionCols = new ArrayList<ExprNodeDesc>(1);
       partitionCols.add(TypeCheckProcFactory.DefaultExprProcessor
           .getFuncExprNodeDesc("rand"));
     }

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Mon Jan 25 18:48:58 2010
@@ -20,27 +20,27 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Reduce Output Operator")
-public class reduceSinkDesc implements Serializable {
+@Explain(displayName = "Reduce Output Operator")
+public class ReduceSinkDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   /**
    * Key columns are passed to reducer in the "key".
    */
-  private java.util.ArrayList<exprNodeDesc> keyCols;
+  private java.util.ArrayList<ExprNodeDesc> keyCols;
   private java.util.ArrayList<java.lang.String> outputKeyColumnNames;
   /**
    * Value columns are passed to reducer in the "value".
    */
-  private java.util.ArrayList<exprNodeDesc> valueCols;
+  private java.util.ArrayList<ExprNodeDesc> valueCols;
   private java.util.ArrayList<java.lang.String> outputValueColumnNames;
   /**
    * Describe how to serialize the key.
    */
-  private tableDesc keySerializeInfo;
+  private TableDesc keySerializeInfo;
   /**
    * Describe how to serialize the value.
    */
-  private tableDesc valueSerializeInfo;
+  private TableDesc valueSerializeInfo;
 
   /**
    * The tag for this reducesink descriptor.
@@ -52,19 +52,19 @@
    * Partition columns decide the reducer that the current row goes to.
    * Partition columns are not passed to reducer.
    */
-  private java.util.ArrayList<exprNodeDesc> partitionCols;
+  private java.util.ArrayList<ExprNodeDesc> partitionCols;
 
   private int numReducers;
 
-  public reduceSinkDesc() {
+  public ReduceSinkDesc() {
   }
 
-  public reduceSinkDesc(java.util.ArrayList<exprNodeDesc> keyCols,
-      java.util.ArrayList<exprNodeDesc> valueCols,
+  public ReduceSinkDesc(java.util.ArrayList<ExprNodeDesc> keyCols,
+      java.util.ArrayList<ExprNodeDesc> valueCols,
       java.util.ArrayList<java.lang.String> outputKeyColumnNames,
       java.util.ArrayList<java.lang.String> outputValueolumnNames, int tag,
-      java.util.ArrayList<exprNodeDesc> partitionCols, int numReducers,
-      final tableDesc keySerializeInfo, final tableDesc valueSerializeInfo) {
+      java.util.ArrayList<ExprNodeDesc> partitionCols, int numReducers,
+      final TableDesc keySerializeInfo, final TableDesc valueSerializeInfo) {
     this.keyCols = keyCols;
     this.valueCols = valueCols;
     this.outputKeyColumnNames = outputKeyColumnNames;
@@ -94,35 +94,35 @@
     this.outputValueColumnNames = outputValueColumnNames;
   }
 
-  @explain(displayName = "key expressions")
-  public java.util.ArrayList<exprNodeDesc> getKeyCols() {
+  @Explain(displayName = "key expressions")
+  public java.util.ArrayList<ExprNodeDesc> getKeyCols() {
     return keyCols;
   }
 
-  public void setKeyCols(final java.util.ArrayList<exprNodeDesc> keyCols) {
+  public void setKeyCols(final java.util.ArrayList<ExprNodeDesc> keyCols) {
     this.keyCols = keyCols;
   }
 
-  @explain(displayName = "value expressions")
-  public java.util.ArrayList<exprNodeDesc> getValueCols() {
+  @Explain(displayName = "value expressions")
+  public java.util.ArrayList<ExprNodeDesc> getValueCols() {
     return valueCols;
   }
 
-  public void setValueCols(final java.util.ArrayList<exprNodeDesc> valueCols) {
+  public void setValueCols(final java.util.ArrayList<ExprNodeDesc> valueCols) {
     this.valueCols = valueCols;
   }
 
-  @explain(displayName = "Map-reduce partition columns")
-  public java.util.ArrayList<exprNodeDesc> getPartitionCols() {
+  @Explain(displayName = "Map-reduce partition columns")
+  public java.util.ArrayList<ExprNodeDesc> getPartitionCols() {
     return partitionCols;
   }
 
   public void setPartitionCols(
-      final java.util.ArrayList<exprNodeDesc> partitionCols) {
+      final java.util.ArrayList<ExprNodeDesc> partitionCols) {
     this.partitionCols = partitionCols;
   }
 
-  @explain(displayName = "tag")
+  @Explain(displayName = "tag")
   public int getTag() {
     return tag;
   }
@@ -145,19 +145,19 @@
     this.numReducers = numReducers;
   }
 
-  public tableDesc getKeySerializeInfo() {
+  public TableDesc getKeySerializeInfo() {
     return keySerializeInfo;
   }
 
-  public void setKeySerializeInfo(tableDesc keySerializeInfo) {
+  public void setKeySerializeInfo(TableDesc keySerializeInfo) {
     this.keySerializeInfo = keySerializeInfo;
   }
 
-  public tableDesc getValueSerializeInfo() {
+  public TableDesc getValueSerializeInfo() {
     return valueSerializeInfo;
   }
 
-  public void setValueSerializeInfo(tableDesc valueSerializeInfo) {
+  public void setValueSerializeInfo(TableDesc valueSerializeInfo) {
     this.valueSerializeInfo = valueSerializeInfo;
   }
 
@@ -168,7 +168,7 @@
    *         of the same length as key columns, that consists of only "+"
    *         (ascending order) and "-" (descending order).
    */
-  @explain(displayName = "sort order")
+  @Explain(displayName = "sort order")
   public String getOrder() {
     return keySerializeInfo.getProperties().getProperty(
         org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER);

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/schemaDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/schemaDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/schemaDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java Mon Jan 25 18:48:58 2010
@@ -20,14 +20,14 @@
 
 import java.io.Serializable;
 
-public class schemaDesc implements Serializable {
+public class SchemaDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String schema;
 
-  public schemaDesc() {
+  public SchemaDesc() {
   }
 
-  public schemaDesc(final String schema) {
+  public SchemaDesc(final String schema) {
     this.schema = schema;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/scriptDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java Mon Jan 25 18:48:58 2010
@@ -23,24 +23,24 @@
 import org.apache.hadoop.hive.ql.exec.RecordReader;
 import org.apache.hadoop.hive.ql.exec.RecordWriter;
 
-@explain(displayName = "Transform Operator")
-public class scriptDesc implements Serializable {
+@Explain(displayName = "Transform Operator")
+public class ScriptDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String scriptCmd;
   // Describe how to deserialize data back from user script
-  private tableDesc scriptOutputInfo;
+  private TableDesc scriptOutputInfo;
   private Class<? extends RecordWriter> inRecordWriterClass;
 
   // Describe how to serialize data out to user script
-  private tableDesc scriptInputInfo;
+  private TableDesc scriptInputInfo;
   private Class<? extends RecordReader> outRecordReaderClass;
 
-  public scriptDesc() {
+  public ScriptDesc() {
   }
 
-  public scriptDesc(final String scriptCmd, final tableDesc scriptInputInfo,
+  public ScriptDesc(final String scriptCmd, final TableDesc scriptInputInfo,
       final Class<? extends RecordWriter> inRecordWriterClass,
-      final tableDesc scriptOutputInfo,
+      final TableDesc scriptOutputInfo,
       final Class<? extends RecordReader> outRecordReaderClass) {
 
     this.scriptCmd = scriptCmd;
@@ -50,7 +50,7 @@
     this.outRecordReaderClass = outRecordReaderClass;
   }
 
-  @explain(displayName = "command")
+  @Explain(displayName = "command")
   public String getScriptCmd() {
     return scriptCmd;
   }
@@ -59,20 +59,20 @@
     this.scriptCmd = scriptCmd;
   }
 
-  @explain(displayName = "output info")
-  public tableDesc getScriptOutputInfo() {
+  @Explain(displayName = "output info")
+  public TableDesc getScriptOutputInfo() {
     return scriptOutputInfo;
   }
 
-  public void setScriptOutputInfo(final tableDesc scriptOutputInfo) {
+  public void setScriptOutputInfo(final TableDesc scriptOutputInfo) {
     this.scriptOutputInfo = scriptOutputInfo;
   }
 
-  public tableDesc getScriptInputInfo() {
+  public TableDesc getScriptInputInfo() {
     return scriptInputInfo;
   }
 
-  public void setScriptInputInfo(tableDesc scriptInputInfo) {
+  public void setScriptInputInfo(TableDesc scriptInputInfo) {
     this.scriptInputInfo = scriptInputInfo;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/selectDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java Mon Jan 25 18:48:58 2010
@@ -20,29 +20,29 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Select Operator")
-public class selectDesc implements Serializable {
+@Explain(displayName = "Select Operator")
+public class SelectDesc implements Serializable {
   private static final long serialVersionUID = 1L;
-  private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList;
+  private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList;
   private java.util.ArrayList<java.lang.String> outputColumnNames;
   private boolean selectStar;
   private boolean selStarNoCompute;
 
-  public selectDesc() {
+  public SelectDesc() {
   }
 
-  public selectDesc(final boolean selStarNoCompute) {
+  public SelectDesc(final boolean selStarNoCompute) {
     this.selStarNoCompute = selStarNoCompute;
   }
 
-  public selectDesc(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList,
+  public SelectDesc(
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
       final java.util.ArrayList<java.lang.String> outputColumnNames) {
     this(colList, outputColumnNames, false);
   }
 
-  public selectDesc(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList,
+  public SelectDesc(
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
       java.util.ArrayList<java.lang.String> outputColumnNames,
       final boolean selectStar) {
     this.colList = colList;
@@ -50,25 +50,25 @@
     this.outputColumnNames = outputColumnNames;
   }
 
-  public selectDesc(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList,
+  public SelectDesc(
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
       final boolean selectStar, final boolean selStarNoCompute) {
     this.colList = colList;
     this.selectStar = selectStar;
     this.selStarNoCompute = selStarNoCompute;
   }
 
-  @explain(displayName = "expressions")
-  public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> getColList() {
+  @Explain(displayName = "expressions")
+  public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> getColList() {
     return colList;
   }
 
   public void setColList(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.exprNodeDesc> colList) {
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList) {
     this.colList = colList;
   }
 
-  @explain(displayName = "outputColumnNames")
+  @Explain(displayName = "outputColumnNames")
   public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }
@@ -78,7 +78,7 @@
     this.outputColumnNames = outputColumnNames;
   }
 
-  @explain(displayName = "SELECT * ")
+  @Explain(displayName = "SELECT * ")
   public String explainNoCompute() {
     if (isSelStarNoCompute()) {
       return "(no compute)";

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showFunctionsDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showFunctionsDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showFunctionsDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java Mon Jan 25 18:48:58 2010
@@ -22,8 +22,8 @@
 
 import org.apache.hadoop.fs.Path;
 
-@explain(displayName = "Show Functions")
-public class showFunctionsDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Show Functions")
+public class ShowFunctionsDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String pattern;
   Path resFile;
@@ -47,7 +47,7 @@
   /**
    * @param resFile
    */
-  public showFunctionsDesc(Path resFile) {
+  public ShowFunctionsDesc(Path resFile) {
     this.resFile = resFile;
     pattern = null;
   }
@@ -56,7 +56,7 @@
    * @param pattern
    *          names of tables to show
    */
-  public showFunctionsDesc(Path resFile, String pattern) {
+  public ShowFunctionsDesc(Path resFile, String pattern) {
     this.resFile = resFile;
     this.pattern = pattern;
   }
@@ -64,7 +64,7 @@
   /**
    * @return the pattern
    */
-  @explain(displayName = "pattern")
+  @Explain(displayName = "pattern")
   public String getPattern() {
     return pattern;
   }
@@ -84,7 +84,7 @@
     return resFile;
   }
 
-  @explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", normalExplain = false)
   public String getResFileString() {
     return getResFile().getName();
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showPartitionsDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java Mon Jan 25 18:48:58 2010
@@ -22,8 +22,8 @@
 
 import org.apache.hadoop.fs.Path;
 
-@explain(displayName = "Show Partitions")
-public class showPartitionsDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Show Partitions")
+public class ShowPartitionsDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String tabName;
   Path resFile;
@@ -50,7 +50,7 @@
    * @param resFile
    *          File to store the results in
    */
-  public showPartitionsDesc(String tabName, Path resFile) {
+  public ShowPartitionsDesc(String tabName, Path resFile) {
     this.tabName = tabName;
     this.resFile = resFile;
   }
@@ -58,7 +58,7 @@
   /**
    * @return the name of the table
    */
-  @explain(displayName = "table")
+  @Explain(displayName = "table")
   public String getTabName() {
     return tabName;
   }
@@ -78,7 +78,7 @@
     return resFile;
   }
 
-  @explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", normalExplain = false)
   public String getResFileString() {
     return getResFile().getName();
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTableStatusDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTableStatusDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTableStatusDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java Mon Jan 25 18:48:58 2010
@@ -23,8 +23,8 @@
 
 import org.apache.hadoop.fs.Path;
 
-@explain(displayName = "Show Table Status")
-public class showTableStatusDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Show Table Status")
+public class ShowTableStatusDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String pattern;
   Path resFile;
@@ -52,7 +52,7 @@
    * @param pattern
    *          names of tables to show
    */
-  public showTableStatusDesc(Path resFile, String dbName, String pattern) {
+  public ShowTableStatusDesc(Path resFile, String dbName, String pattern) {
     this.dbName = dbName;
     this.resFile = resFile;
     this.pattern = pattern;
@@ -67,7 +67,7 @@
    * @param part
    *          partition specification
    */
-  public showTableStatusDesc(Path resFile, String dbName, String pattern,
+  public ShowTableStatusDesc(Path resFile, String dbName, String pattern,
       HashMap<String, String> partSpec) {
     this.dbName = dbName;
     this.resFile = resFile;
@@ -78,7 +78,7 @@
   /**
    * @return the pattern
    */
-  @explain(displayName = "pattern")
+  @Explain(displayName = "pattern")
   public String getPattern() {
     return pattern;
   }
@@ -98,7 +98,7 @@
     return resFile;
   }
 
-  @explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", normalExplain = false)
   public String getResFileString() {
     return getResFile().getName();
   }
@@ -114,7 +114,7 @@
   /**
    * @return the database name
    */
-  @explain(displayName = "database")
+  @Explain(displayName = "database")
   public String getDbName() {
     return dbName;
   }
@@ -130,7 +130,7 @@
   /**
    * @return the partSpec
    */
-  @explain(displayName = "partition")
+  @Explain(displayName = "partition")
   public HashMap<String, String> getPartSpec() {
     return partSpec;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/showTablesDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java Mon Jan 25 18:48:58 2010
@@ -22,8 +22,8 @@
 
 import org.apache.hadoop.fs.Path;
 
-@explain(displayName = "Show Tables")
-public class showTablesDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Show Tables")
+public class ShowTablesDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String pattern;
   Path resFile;
@@ -47,7 +47,7 @@
   /**
    * @param resFile
    */
-  public showTablesDesc(Path resFile) {
+  public ShowTablesDesc(Path resFile) {
     this.resFile = resFile;
     pattern = null;
   }
@@ -56,7 +56,7 @@
    * @param pattern
    *          names of tables to show
    */
-  public showTablesDesc(Path resFile, String pattern) {
+  public ShowTablesDesc(Path resFile, String pattern) {
     this.resFile = resFile;
     this.pattern = pattern;
   }
@@ -64,7 +64,7 @@
   /**
    * @return the pattern
    */
-  @explain(displayName = "pattern")
+  @Explain(displayName = "pattern")
   public String getPattern() {
     return pattern;
   }
@@ -84,7 +84,7 @@
     return resFile;
   }
 
-  @explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", normalExplain = false)
   public String getResFileString() {
     return getResFile().getName();
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Mon Jan 25 18:48:58 2010
@@ -27,7 +27,7 @@
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.mapred.InputFormat;
 
-public class tableDesc implements Serializable, Cloneable {
+public class TableDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
   private Class<? extends Deserializer> deserializerClass;
   private Class<? extends InputFormat> inputFileFormatClass;
@@ -35,10 +35,10 @@
   private java.util.Properties properties;
   private String serdeClassName;
 
-  public tableDesc() {
+  public TableDesc() {
   }
 
-  public tableDesc(final Class<? extends Deserializer> serdeClass,
+  public TableDesc(final Class<? extends Deserializer> serdeClass,
       final Class<? extends InputFormat> inputFileFormatClass,
       final Class<?> class1, final java.util.Properties properties) {
     deserializerClass = serdeClass;
@@ -87,7 +87,7 @@
         .getOutputFormatSubstitute(outputFileFormatClass);
   }
 
-  @explain(displayName = "properties", normalExplain = false)
+  @Explain(displayName = "properties", normalExplain = false)
   public java.util.Properties getProperties() {
     return properties;
   }
@@ -99,7 +99,7 @@
   /**
    * @return the serdeClassName
    */
-  @explain(displayName = "serde")
+  @Explain(displayName = "serde")
   public String getSerdeClassName() {
     return serdeClassName;
   }
@@ -112,25 +112,25 @@
     this.serdeClassName = serdeClassName;
   }
 
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getTableName() {
     return properties
         .getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);
   }
 
-  @explain(displayName = "input format")
+  @Explain(displayName = "input format")
   public String getInputFileFormatClassName() {
     return getInputFileFormatClass().getName();
   }
 
-  @explain(displayName = "output format")
+  @Explain(displayName = "output format")
   public String getOutputFileFormatClassName() {
     return getOutputFileFormatClass().getName();
   }
 
   @Override
   public Object clone() {
-    tableDesc ret = new tableDesc();
+    TableDesc ret = new TableDesc();
     ret.setSerdeClassName(serdeClassName);
     ret.setDeserializerClass(deserializerClass);
     ret.setInputFileFormatClass(inputFileFormatClass);

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/tableScanDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Mon Jan 25 18:48:58 2010
@@ -25,21 +25,21 @@
  * of map-reduce framework. So, nothing is stored in the descriptor. But, more
  * things will be added here as table scan is invoked as part of local work.
  **/
-@explain(displayName = "TableScan")
-public class tableScanDesc implements Serializable {
+@Explain(displayName = "TableScan")
+public class TableScanDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private String alias;
 
   @SuppressWarnings("nls")
-  public tableScanDesc() {
+  public TableScanDesc() {
   }
 
-  public tableScanDesc(final String alias) {
+  public TableScanDesc(final String alias) {
     this.alias = alias;
   }
 
-  @explain(displayName = "alias")
+  @Explain(displayName = "alias")
   public String getAlias() {
     return alias;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/udtfDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/udtfDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/udtfDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java Mon Jan 25 18:48:58 2010
@@ -28,16 +28,16 @@
  * time.
  * 
  */
-@explain(displayName = "UDTF Operator")
-public class udtfDesc implements Serializable {
+@Explain(displayName = "UDTF Operator")
+public class UDTFDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private GenericUDTF genericUDTF;
 
-  public udtfDesc() {
+  public UDTFDesc() {
   }
 
-  public udtfDesc(final GenericUDTF genericUDTF) {
+  public UDTFDesc(final GenericUDTF genericUDTF) {
     this.genericUDTF = genericUDTF;
   }
 
@@ -49,7 +49,7 @@
     this.genericUDTF = genericUDTF;
   }
 
-  @explain(displayName = "function name")
+  @Explain(displayName = "function name")
   public String getUDTFName() {
     return genericUDTF.toString();
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/unionDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/unionDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/unionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java Mon Jan 25 18:48:58 2010
@@ -24,14 +24,14 @@
  * unionDesc is a empty class currently. However, union has more than one input
  * (as compared with forward), and therefore, we need a separate class.
  **/
-@explain(displayName = "Union")
-public class unionDesc implements Serializable {
+@Explain(displayName = "Union")
+public class UnionDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   transient private int numInputs;
 
   @SuppressWarnings("nls")
-  public unionDesc() {
+  public UnionDesc() {
     numInputs = 2;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo =