You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/25 19:49:05 UTC

svn commit: r902921 [6/26] - in /hadoop/hive/trunk: ./ contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/ contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoo...

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java Mon Jan 25 18:48:58 2010
@@ -20,7 +20,7 @@
 
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 
-public class aggregationDesc implements java.io.Serializable {
+public class AggregationDesc implements java.io.Serializable {
   private static final long serialVersionUID = 1L;
   private String genericUDAFName;
 
@@ -32,16 +32,16 @@
    * deserialized. This is exactly what we want.
    */
   private GenericUDAFEvaluator genericUDAFEvaluator;
-  private java.util.ArrayList<exprNodeDesc> parameters;
+  private java.util.ArrayList<ExprNodeDesc> parameters;
   private boolean distinct;
   private GenericUDAFEvaluator.Mode mode;
 
-  public aggregationDesc() {
+  public AggregationDesc() {
   }
 
-  public aggregationDesc(final String genericUDAFName,
+  public AggregationDesc(final String genericUDAFName,
       final GenericUDAFEvaluator genericUDAFEvaluator,
-      final java.util.ArrayList<exprNodeDesc> parameters,
+      final java.util.ArrayList<ExprNodeDesc> parameters,
       final boolean distinct, final GenericUDAFEvaluator.Mode mode) {
     this.genericUDAFName = genericUDAFName;
     this.genericUDAFEvaluator = genericUDAFEvaluator;
@@ -67,11 +67,11 @@
     return genericUDAFEvaluator;
   }
 
-  public java.util.ArrayList<exprNodeDesc> getParameters() {
+  public java.util.ArrayList<ExprNodeDesc> getParameters() {
     return parameters;
   }
 
-  public void setParameters(final java.util.ArrayList<exprNodeDesc> parameters) {
+  public void setParameters(final java.util.ArrayList<ExprNodeDesc> parameters) {
     this.parameters = parameters;
   }
 
@@ -91,7 +91,7 @@
     return mode;
   }
 
-  @explain(displayName = "expr")
+  @Explain(displayName = "expr")
   public String getExprString() {
     StringBuilder sb = new StringBuilder();
     sb.append(genericUDAFName);
@@ -100,7 +100,7 @@
       sb.append("DISTINCT ");
     }
     boolean first = true;
-    for (exprNodeDesc exp : parameters) {
+    for (ExprNodeDesc exp : parameters) {
       if (first) {
         first = false;
       } else {

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/alterTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java Mon Jan 25 18:48:58 2010
@@ -26,8 +26,8 @@
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 
-@explain(displayName = "Alter Table")
-public class alterTableDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Alter Table")
+public class AlterTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   public static enum alterTableTypes {
@@ -63,7 +63,7 @@
    * @param newComment
    * @param newType
    */
-  public alterTableDesc(String tblName, String oldColName, String newColName,
+  public AlterTableDesc(String tblName, String oldColName, String newColName,
       String newType, String newComment, boolean first, String afterCol) {
     super();
     oldName = tblName;
@@ -82,7 +82,7 @@
    * @param newName
    *          new name of the table
    */
-  public alterTableDesc(String oldName, String newName) {
+  public AlterTableDesc(String oldName, String newName) {
     op = alterTableTypes.RENAME;
     this.oldName = oldName;
     this.newName = newName;
@@ -94,7 +94,7 @@
    * @param newCols
    *          new columns to be added
    */
-  public alterTableDesc(String name, List<FieldSchema> newCols,
+  public AlterTableDesc(String name, List<FieldSchema> newCols,
       alterTableTypes alterType) {
     op = alterType;
     oldName = name;
@@ -105,7 +105,7 @@
    * @param alterType
    *          type of alter op
    */
-  public alterTableDesc(alterTableTypes alterType) {
+  public AlterTableDesc(alterTableTypes alterType) {
     op = alterType;
   }
 
@@ -118,7 +118,7 @@
    * @param outputFormat
    *          new table output format
    */
-  public alterTableDesc(String name, String inputFormat, String outputFormat,
+  public AlterTableDesc(String name, String inputFormat, String outputFormat,
       String serdeName) {
     super();
     op = alterTableTypes.ADDFILEFORMAT;
@@ -128,7 +128,7 @@
     this.serdeName = serdeName;
   }
 
-  public alterTableDesc(String tableName, int numBuckets,
+  public AlterTableDesc(String tableName, int numBuckets,
       List<String> bucketCols, List<Order> sortCols) {
     oldName = tableName;
     op = alterTableTypes.ADDCLUSTERSORTCOLUMN;
@@ -140,7 +140,7 @@
   /**
    * @return the old name of the table
    */
-  @explain(displayName = "old name")
+  @Explain(displayName = "old name")
   public String getOldName() {
     return oldName;
   }
@@ -156,7 +156,7 @@
   /**
    * @return the newName
    */
-  @explain(displayName = "new name")
+  @Explain(displayName = "new name")
   public String getNewName() {
     return newName;
   }
@@ -176,7 +176,7 @@
     return op;
   }
 
-  @explain(displayName = "type")
+  @Explain(displayName = "type")
   public String getAlterTableTypeString() {
     switch (op) {
     case RENAME:
@@ -205,7 +205,7 @@
     return newCols;
   }
 
-  @explain(displayName = "new columns")
+  @Explain(displayName = "new columns")
   public List<String> getNewColsString() {
     return Utilities.getFieldSchemaString(getNewCols());
   }
@@ -221,7 +221,7 @@
   /**
    * @return the serdeName
    */
-  @explain(displayName = "deserializer library")
+  @Explain(displayName = "deserializer library")
   public String getSerdeName() {
     return serdeName;
   }
@@ -237,7 +237,7 @@
   /**
    * @return the props
    */
-  @explain(displayName = "properties")
+  @Explain(displayName = "properties")
   public Map<String, String> getProps() {
     return props;
   }
@@ -253,7 +253,7 @@
   /**
    * @return the input format
    */
-  @explain(displayName = "input format")
+  @Explain(displayName = "input format")
   public String getInputFormat() {
     return inputFormat;
   }
@@ -269,7 +269,7 @@
   /**
    * @return the output format
    */
-  @explain(displayName = "output format")
+  @Explain(displayName = "output format")
   public String getOutputFormat() {
     return outputFormat;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/collectDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java Mon Jan 25 18:48:58 2010
@@ -20,15 +20,15 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Collect")
-public class collectDesc implements Serializable {
+@Explain(displayName = "Collect")
+public class CollectDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   Integer bufferSize;
 
-  public collectDesc() {
+  public CollectDesc() {
   }
 
-  public collectDesc(final Integer bufferSize) {
+  public CollectDesc(final Integer bufferSize) {
     this.bufferSize = bufferSize;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java Mon Jan 25 18:48:58 2010
@@ -117,7 +117,7 @@
         if ((currAvgSz < avgConditionSize) && (fStats.length > 1)) {
           // also set the number of reducers
           Task<? extends Serializable> tsk = ctx.getListTasks().get(1);
-          mapredWork work = (mapredWork) tsk.getWork();
+          MapredWork work = (MapredWork) tsk.getWork();
 
           int maxReducers = conf.getIntVar(HiveConf.ConfVars.MAXREDUCERS);
           int reducers = (int) ((totalSz + trgtSize - 1) / trgtSize);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalWork.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalWork.java Mon Jan 25 18:48:58 2010
@@ -21,7 +21,7 @@
 import java.io.Serializable;
 import java.util.List;
 
-@explain(displayName = "Conditional Operator")
+@Explain(displayName = "Conditional Operator")
 public class ConditionalWork implements Serializable {
   private static final long serialVersionUID = 1L;
   List<? extends Serializable> listWorks;

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/copyWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java Mon Jan 25 18:48:58 2010
@@ -20,21 +20,21 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Copy")
-public class copyWork implements Serializable {
+@Explain(displayName = "Copy")
+public class CopyWork implements Serializable {
   private static final long serialVersionUID = 1L;
   private String fromPath;
   private String toPath;
 
-  public copyWork() {
+  public CopyWork() {
   }
 
-  public copyWork(final String fromPath, final String toPath) {
+  public CopyWork(final String fromPath, final String toPath) {
     this.fromPath = fromPath;
     this.toPath = toPath;
   }
 
-  @explain(displayName = "source")
+  @Explain(displayName = "source")
   public String getFromPath() {
     return fromPath;
   }
@@ -43,7 +43,7 @@
     this.fromPath = fromPath;
   }
 
-  @explain(displayName = "destination")
+  @Explain(displayName = "destination")
   public String getToPath() {
     return toPath;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createFunctionDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createFunctionDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createFunctionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java Mon Jan 25 18:48:58 2010
@@ -20,19 +20,19 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Create Function")
-public class createFunctionDesc implements Serializable {
+@Explain(displayName = "Create Function")
+public class CreateFunctionDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private String functionName;
   private String className;
 
-  public createFunctionDesc(String functionName, String className) {
+  public CreateFunctionDesc(String functionName, String className) {
     this.functionName = functionName;
     this.className = className;
   }
 
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getFunctionName() {
     return functionName;
   }
@@ -41,7 +41,7 @@
     this.functionName = functionName;
   }
 
-  @explain(displayName = "class")
+  @Explain(displayName = "class")
   public String getClassName() {
     return className;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Mon Jan 25 18:48:58 2010
@@ -26,8 +26,8 @@
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 
-@explain(displayName = "Create Table")
-public class createTableDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Create Table")
+public class CreateTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String tableName;
   boolean isExternal;
@@ -49,7 +49,7 @@
   Map<String, String> mapProp;
   boolean ifNotExists;
 
-  public createTableDesc(String tableName, boolean isExternal,
+  public CreateTableDesc(String tableName, boolean isExternal,
       List<FieldSchema> cols, List<FieldSchema> partCols,
       List<String> bucketCols, List<Order> sortCols, int numBuckets,
       String fieldDelim, String fieldEscape, String collItemDelim,
@@ -77,7 +77,7 @@
     this.ifNotExists = ifNotExists;
   }
 
-  @explain(displayName = "if not exists")
+  @Explain(displayName = "if not exists")
   public boolean getIfNotExists() {
     return ifNotExists;
   }
@@ -86,7 +86,7 @@
     this.ifNotExists = ifNotExists;
   }
 
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getTableName() {
     return tableName;
   }
@@ -99,7 +99,7 @@
     return cols;
   }
 
-  @explain(displayName = "columns")
+  @Explain(displayName = "columns")
   public List<String> getColsString() {
     return Utilities.getFieldSchemaString(getCols());
   }
@@ -112,7 +112,7 @@
     return partCols;
   }
 
-  @explain(displayName = "partition columns")
+  @Explain(displayName = "partition columns")
   public List<String> getPartColsString() {
     return Utilities.getFieldSchemaString(getPartCols());
   }
@@ -121,7 +121,7 @@
     this.partCols = partCols;
   }
 
-  @explain(displayName = "bucket columns")
+  @Explain(displayName = "bucket columns")
   public List<String> getBucketCols() {
     return bucketCols;
   }
@@ -130,7 +130,7 @@
     this.bucketCols = bucketCols;
   }
 
-  @explain(displayName = "# buckets")
+  @Explain(displayName = "# buckets")
   public int getNumBuckets() {
     return numBuckets;
   }
@@ -139,7 +139,7 @@
     this.numBuckets = numBuckets;
   }
 
-  @explain(displayName = "field delimiter")
+  @Explain(displayName = "field delimiter")
   public String getFieldDelim() {
     return fieldDelim;
   }
@@ -148,7 +148,7 @@
     this.fieldDelim = fieldDelim;
   }
 
-  @explain(displayName = "field escape")
+  @Explain(displayName = "field escape")
   public String getFieldEscape() {
     return fieldEscape;
   }
@@ -157,7 +157,7 @@
     this.fieldEscape = fieldEscape;
   }
 
-  @explain(displayName = "collection delimiter")
+  @Explain(displayName = "collection delimiter")
   public String getCollItemDelim() {
     return collItemDelim;
   }
@@ -166,7 +166,7 @@
     this.collItemDelim = collItemDelim;
   }
 
-  @explain(displayName = "map key delimiter")
+  @Explain(displayName = "map key delimiter")
   public String getMapKeyDelim() {
     return mapKeyDelim;
   }
@@ -175,7 +175,7 @@
     this.mapKeyDelim = mapKeyDelim;
   }
 
-  @explain(displayName = "line delimiter")
+  @Explain(displayName = "line delimiter")
   public String getLineDelim() {
     return lineDelim;
   }
@@ -184,7 +184,7 @@
     this.lineDelim = lineDelim;
   }
 
-  @explain(displayName = "comment")
+  @Explain(displayName = "comment")
   public String getComment() {
     return comment;
   }
@@ -193,7 +193,7 @@
     this.comment = comment;
   }
 
-  @explain(displayName = "input format")
+  @Explain(displayName = "input format")
   public String getInputFormat() {
     return inputFormat;
   }
@@ -202,7 +202,7 @@
     this.inputFormat = inputFormat;
   }
 
-  @explain(displayName = "output format")
+  @Explain(displayName = "output format")
   public String getOutputFormat() {
     return outputFormat;
   }
@@ -211,7 +211,7 @@
     this.outputFormat = outputFormat;
   }
 
-  @explain(displayName = "location")
+  @Explain(displayName = "location")
   public String getLocation() {
     return location;
   }
@@ -220,7 +220,7 @@
     this.location = location;
   }
 
-  @explain(displayName = "isExternal")
+  @Explain(displayName = "isExternal")
   public boolean isExternal() {
     return isExternal;
   }
@@ -232,7 +232,7 @@
   /**
    * @return the sortCols
    */
-  @explain(displayName = "sort columns")
+  @Explain(displayName = "sort columns")
   public List<Order> getSortCols() {
     return sortCols;
   }
@@ -248,7 +248,7 @@
   /**
    * @return the serDeName
    */
-  @explain(displayName = "serde name")
+  @Explain(displayName = "serde name")
   public String getSerName() {
     return serName;
   }
@@ -264,7 +264,7 @@
   /**
    * @return the serDe properties
    */
-  @explain(displayName = "serde properties")
+  @Explain(displayName = "serde properties")
   public Map<String, String> getMapProp() {
     return mapProp;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableLikeDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableLikeDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createTableLikeDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java Mon Jan 25 18:48:58 2010
@@ -20,8 +20,8 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Create Table")
-public class createTableLikeDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Create Table")
+public class CreateTableLikeDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String tableName;
   boolean isExternal;
@@ -29,7 +29,7 @@
   boolean ifNotExists;
   String likeTableName;
 
-  public createTableLikeDesc(String tableName, boolean isExternal,
+  public CreateTableLikeDesc(String tableName, boolean isExternal,
       String location, boolean ifNotExists, String likeTableName) {
     this.tableName = tableName;
     this.isExternal = isExternal;
@@ -38,7 +38,7 @@
     this.likeTableName = likeTableName;
   }
 
-  @explain(displayName = "if not exists")
+  @Explain(displayName = "if not exists")
   public boolean getIfNotExists() {
     return ifNotExists;
   }
@@ -47,7 +47,7 @@
     this.ifNotExists = ifNotExists;
   }
 
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getTableName() {
     return tableName;
   }
@@ -56,7 +56,7 @@
     this.tableName = tableName;
   }
 
-  @explain(displayName = "location")
+  @Explain(displayName = "location")
   public String getLocation() {
     return location;
   }
@@ -65,7 +65,7 @@
     this.location = location;
   }
 
-  @explain(displayName = "isExternal")
+  @Explain(displayName = "isExternal")
   public boolean isExternal() {
     return isExternal;
   }
@@ -74,7 +74,7 @@
     this.isExternal = isExternal;
   }
 
-  @explain(displayName = "like")
+  @Explain(displayName = "like")
   public String getLikeTableName() {
     return likeTableName;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createViewDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createViewDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createViewDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java Mon Jan 25 18:48:58 2010
@@ -24,8 +24,8 @@
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 
-@explain(displayName = "Create View")
-public class createViewDesc implements Serializable {
+@Explain(displayName = "Create View")
+public class CreateViewDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private String viewName;
@@ -35,7 +35,7 @@
   private String comment;
   private boolean ifNotExists;
 
-  public createViewDesc(String viewName, List<FieldSchema> schema,
+  public CreateViewDesc(String viewName, List<FieldSchema> schema,
       String comment, boolean ifNotExists) {
     this.viewName = viewName;
     this.schema = schema;
@@ -43,7 +43,7 @@
     this.ifNotExists = ifNotExists;
   }
 
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getViewName() {
     return viewName;
   }
@@ -52,7 +52,7 @@
     this.viewName = viewName;
   }
 
-  @explain(displayName = "original text")
+  @Explain(displayName = "original text")
   public String getViewOriginalText() {
     return originalText;
   }
@@ -61,7 +61,7 @@
     this.originalText = originalText;
   }
 
-  @explain(displayName = "expanded text")
+  @Explain(displayName = "expanded text")
   public String getViewExpandedText() {
     return expandedText;
   }
@@ -70,7 +70,7 @@
     this.expandedText = expandedText;
   }
 
-  @explain(displayName = "columns")
+  @Explain(displayName = "columns")
   public List<String> getSchemaString() {
     return Utilities.getFieldSchemaString(schema);
   }
@@ -83,7 +83,7 @@
     this.schema = schema;
   }
 
-  @explain(displayName = "comment")
+  @Explain(displayName = "comment")
   public String getComment() {
     return comment;
   }
@@ -92,7 +92,7 @@
     this.comment = comment;
   }
 
-  @explain(displayName = "if not exists")
+  @Explain(displayName = "if not exists")
   public boolean getIfNotExists() {
     return ifNotExists;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ddlDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ddlDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ddlDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java Mon Jan 25 18:48:58 2010
@@ -20,6 +20,6 @@
 
 import java.io.Serializable;
 
-public abstract class ddlDesc implements Serializable {
+public abstract class DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Mon Jan 25 18:48:58 2010
@@ -26,19 +26,19 @@
 
 public class DDLWork implements Serializable {
   private static final long serialVersionUID = 1L;
-  private createTableDesc createTblDesc;
-  private createTableLikeDesc createTblLikeDesc;
-  private createViewDesc createVwDesc;
-  private dropTableDesc dropTblDesc;
-  private alterTableDesc alterTblDesc;
-  private showTablesDesc showTblsDesc;
-  private showFunctionsDesc showFuncsDesc;
-  private descFunctionDesc descFunctionDesc;
-  private showPartitionsDesc showPartsDesc;
-  private descTableDesc descTblDesc;
+  private CreateTableDesc createTblDesc;
+  private CreateTableLikeDesc createTblLikeDesc;
+  private CreateViewDesc createVwDesc;
+  private DropTableDesc dropTblDesc;
+  private AlterTableDesc alterTblDesc;
+  private ShowTablesDesc showTblsDesc;
+  private ShowFunctionsDesc showFuncsDesc;
+  private DescFunctionDesc descFunctionDesc;
+  private ShowPartitionsDesc showPartsDesc;
+  private DescTableDesc descTblDesc;
   private AddPartitionDesc addPartitionDesc;
   private MsckDesc msckDesc;
-  private showTableStatusDesc showTblStatusDesc;
+  private ShowTableStatusDesc showTblStatusDesc;
 
   /**
    * ReadEntitites that are passed to the hooks.
@@ -62,7 +62,7 @@
    *          alter table descriptor
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      alterTableDesc alterTblDesc) {
+      AlterTableDesc alterTblDesc) {
     this(inputs, outputs);
     this.alterTblDesc = alterTblDesc;
   }
@@ -72,7 +72,7 @@
    *          create table descriptor
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      createTableDesc createTblDesc) {
+      CreateTableDesc createTblDesc) {
     this(inputs, outputs);
 
     this.createTblDesc = createTblDesc;
@@ -83,7 +83,7 @@
    *          create table like descriptor
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      createTableLikeDesc createTblLikeDesc) {
+      CreateTableLikeDesc createTblLikeDesc) {
     this(inputs, outputs);
 
     this.createTblLikeDesc = createTblLikeDesc;
@@ -94,7 +94,7 @@
    *          create view descriptor
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      createViewDesc createVwDesc) {
+      CreateViewDesc createVwDesc) {
     this(inputs, outputs);
 
     this.createVwDesc = createVwDesc;
@@ -105,7 +105,7 @@
    *          drop table descriptor
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      dropTableDesc dropTblDesc) {
+      DropTableDesc dropTblDesc) {
     this(inputs, outputs);
 
     this.dropTblDesc = dropTblDesc;
@@ -115,7 +115,7 @@
    * @param descTblDesc
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      descTableDesc descTblDesc) {
+      DescTableDesc descTblDesc) {
     this(inputs, outputs);
 
     this.descTblDesc = descTblDesc;
@@ -125,7 +125,7 @@
    * @param showTblsDesc
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      showTablesDesc showTblsDesc) {
+      ShowTablesDesc showTblsDesc) {
     this(inputs, outputs);
 
     this.showTblsDesc = showTblsDesc;
@@ -135,7 +135,7 @@
    * @param showFuncsDesc
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      showFunctionsDesc showFuncsDesc) {
+      ShowFunctionsDesc showFuncsDesc) {
     this(inputs, outputs);
 
     this.showFuncsDesc = showFuncsDesc;
@@ -145,7 +145,7 @@
    * @param descFuncDesc
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      descFunctionDesc descFuncDesc) {
+      DescFunctionDesc descFuncDesc) {
     this(inputs, outputs);
 
     descFunctionDesc = descFuncDesc;
@@ -155,7 +155,7 @@
    * @param showPartsDesc
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      showPartitionsDesc showPartsDesc) {
+      ShowPartitionsDesc showPartsDesc) {
     this(inputs, outputs);
 
     this.showPartsDesc = showPartsDesc;
@@ -184,7 +184,7 @@
    *          show table status descriptor
    */
   public DDLWork(Set<ReadEntity> inputs, Set<WriteEntity> outputs,
-      showTableStatusDesc showTblStatusDesc) {
+      ShowTableStatusDesc showTblStatusDesc) {
     this(inputs, outputs);
 
     this.showTblStatusDesc = showTblStatusDesc;
@@ -193,8 +193,8 @@
   /**
    * @return the createTblDesc
    */
-  @explain(displayName = "Create Table Operator")
-  public createTableDesc getCreateTblDesc() {
+  @Explain(displayName = "Create Table Operator")
+  public CreateTableDesc getCreateTblDesc() {
     return createTblDesc;
   }
 
@@ -202,15 +202,15 @@
    * @param createTblDesc
    *          the createTblDesc to set
    */
-  public void setCreateTblDesc(createTableDesc createTblDesc) {
+  public void setCreateTblDesc(CreateTableDesc createTblDesc) {
     this.createTblDesc = createTblDesc;
   }
 
   /**
    * @return the createTblDesc
    */
-  @explain(displayName = "Create Table Operator")
-  public createTableLikeDesc getCreateTblLikeDesc() {
+  @Explain(displayName = "Create Table Operator")
+  public CreateTableLikeDesc getCreateTblLikeDesc() {
     return createTblLikeDesc;
   }
 
@@ -218,15 +218,15 @@
    * @param createTblLikeDesc
    *          the createTblDesc to set
    */
-  public void setCreateTblLikeDesc(createTableLikeDesc createTblLikeDesc) {
+  public void setCreateTblLikeDesc(CreateTableLikeDesc createTblLikeDesc) {
     this.createTblLikeDesc = createTblLikeDesc;
   }
 
   /**
    * @return the createTblDesc
    */
-  @explain(displayName = "Create View Operator")
-  public createViewDesc getCreateViewDesc() {
+  @Explain(displayName = "Create View Operator")
+  public CreateViewDesc getCreateViewDesc() {
     return createVwDesc;
   }
 
@@ -234,15 +234,15 @@
    * @param createVwDesc
    *          the createViewDesc to set
    */
-  public void setCreateViewDesc(createViewDesc createVwDesc) {
+  public void setCreateViewDesc(CreateViewDesc createVwDesc) {
     this.createVwDesc = createVwDesc;
   }
 
   /**
    * @return the dropTblDesc
    */
-  @explain(displayName = "Drop Table Operator")
-  public dropTableDesc getDropTblDesc() {
+  @Explain(displayName = "Drop Table Operator")
+  public DropTableDesc getDropTblDesc() {
     return dropTblDesc;
   }
 
@@ -250,15 +250,15 @@
    * @param dropTblDesc
    *          the dropTblDesc to set
    */
-  public void setDropTblDesc(dropTableDesc dropTblDesc) {
+  public void setDropTblDesc(DropTableDesc dropTblDesc) {
     this.dropTblDesc = dropTblDesc;
   }
 
   /**
    * @return the alterTblDesc
    */
-  @explain(displayName = "Alter Table Operator")
-  public alterTableDesc getAlterTblDesc() {
+  @Explain(displayName = "Alter Table Operator")
+  public AlterTableDesc getAlterTblDesc() {
     return alterTblDesc;
   }
 
@@ -266,15 +266,15 @@
    * @param alterTblDesc
    *          the alterTblDesc to set
    */
-  public void setAlterTblDesc(alterTableDesc alterTblDesc) {
+  public void setAlterTblDesc(AlterTableDesc alterTblDesc) {
     this.alterTblDesc = alterTblDesc;
   }
 
   /**
    * @return the showTblsDesc
    */
-  @explain(displayName = "Show Table Operator")
-  public showTablesDesc getShowTblsDesc() {
+  @Explain(displayName = "Show Table Operator")
+  public ShowTablesDesc getShowTblsDesc() {
     return showTblsDesc;
   }
 
@@ -282,23 +282,23 @@
    * @param showTblsDesc
    *          the showTblsDesc to set
    */
-  public void setShowTblsDesc(showTablesDesc showTblsDesc) {
+  public void setShowTblsDesc(ShowTablesDesc showTblsDesc) {
     this.showTblsDesc = showTblsDesc;
   }
 
   /**
    * @return the showFuncsDesc
    */
-  @explain(displayName = "Show Function Operator")
-  public showFunctionsDesc getShowFuncsDesc() {
+  @Explain(displayName = "Show Function Operator")
+  public ShowFunctionsDesc getShowFuncsDesc() {
     return showFuncsDesc;
   }
 
   /**
    * @return the descFuncDesc
    */
-  @explain(displayName = "Show Function Operator")
-  public descFunctionDesc getDescFunctionDesc() {
+  @Explain(displayName = "Show Function Operator")
+  public DescFunctionDesc getDescFunctionDesc() {
     return descFunctionDesc;
   }
 
@@ -306,7 +306,7 @@
    * @param showFuncsDesc
    *          the showFuncsDesc to set
    */
-  public void setShowFuncsDesc(showFunctionsDesc showFuncsDesc) {
+  public void setShowFuncsDesc(ShowFunctionsDesc showFuncsDesc) {
     this.showFuncsDesc = showFuncsDesc;
   }
 
@@ -314,15 +314,15 @@
    * @param descFuncDesc
    *          the showFuncsDesc to set
    */
-  public void setDescFuncDesc(descFunctionDesc descFuncDesc) {
+  public void setDescFuncDesc(DescFunctionDesc descFuncDesc) {
     descFunctionDesc = descFuncDesc;
   }
 
   /**
    * @return the showPartsDesc
    */
-  @explain(displayName = "Show Partitions Operator")
-  public showPartitionsDesc getShowPartsDesc() {
+  @Explain(displayName = "Show Partitions Operator")
+  public ShowPartitionsDesc getShowPartsDesc() {
     return showPartsDesc;
   }
 
@@ -330,15 +330,15 @@
    * @param showPartsDesc
    *          the showPartsDesc to set
    */
-  public void setShowPartsDesc(showPartitionsDesc showPartsDesc) {
+  public void setShowPartsDesc(ShowPartitionsDesc showPartsDesc) {
     this.showPartsDesc = showPartsDesc;
   }
 
   /**
    * @return the descTblDesc
    */
-  @explain(displayName = "Describe Table Operator")
-  public descTableDesc getDescTblDesc() {
+  @Explain(displayName = "Describe Table Operator")
+  public DescTableDesc getDescTblDesc() {
     return descTblDesc;
   }
 
@@ -346,7 +346,7 @@
    * @param descTblDesc
    *          the descTblDesc to set
    */
-  public void setDescTblDesc(descTableDesc descTblDesc) {
+  public void setDescTblDesc(DescTableDesc descTblDesc) {
     this.descTblDesc = descTblDesc;
   }
 
@@ -383,7 +383,7 @@
   /**
    * @return show table descriptor
    */
-  public showTableStatusDesc getShowTblStatusDesc() {
+  public ShowTableStatusDesc getShowTblStatusDesc() {
     return showTblStatusDesc;
   }
 
@@ -391,7 +391,7 @@
    * @param showTblStatusDesc
    *          show table descriptor
    */
-  public void setShowTblStatusDesc(showTableStatusDesc showTblStatusDesc) {
+  public void setShowTblStatusDesc(ShowTableStatusDesc showTblStatusDesc) {
     this.showTblStatusDesc = showTblStatusDesc;
   }
 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descFunctionDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descFunctionDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descFunctionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java Mon Jan 25 18:48:58 2010
@@ -22,8 +22,8 @@
 
 import org.apache.hadoop.fs.Path;
 
-@explain(displayName = "Describe Function")
-public class descFunctionDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Describe Function")
+public class DescFunctionDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String name;
   Path resFile;
@@ -57,7 +57,7 @@
   /**
    * @param resFile
    */
-  public descFunctionDesc(Path resFile) {
+  public DescFunctionDesc(Path resFile) {
     this.resFile = resFile;
     name = null;
   }
@@ -66,7 +66,7 @@
    * @param name
    *          of the function to describe
    */
-  public descFunctionDesc(Path resFile, String name, boolean isExtended) {
+  public DescFunctionDesc(Path resFile, String name, boolean isExtended) {
     this.isExtended = isExtended;
     this.resFile = resFile;
     this.name = name;
@@ -75,7 +75,7 @@
   /**
    * @return the name
    */
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getName() {
     return name;
   }
@@ -95,7 +95,7 @@
     return resFile;
   }
 
-  @explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", normalExplain = false)
   public String getResFileString() {
     return getResFile().getName();
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/descTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java Mon Jan 25 18:48:58 2010
@@ -23,8 +23,8 @@
 
 import org.apache.hadoop.fs.Path;
 
-@explain(displayName = "Describe Table")
-public class descTableDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Describe Table")
+public class DescTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   String tableName;
@@ -54,7 +54,7 @@
    * @param resFile
    * @param tableName
    */
-  public descTableDesc(Path resFile, String tableName,
+  public DescTableDesc(Path resFile, String tableName,
       HashMap<String, String> partSpec, boolean isExt) {
     this.isExt = isExt;
     this.partSpec = partSpec;
@@ -80,7 +80,7 @@
   /**
    * @return the tableName
    */
-  @explain(displayName = "table")
+  @Explain(displayName = "table")
   public String getTableName() {
     return tableName;
   }
@@ -96,7 +96,7 @@
   /**
    * @return the partSpec
    */
-  @explain(displayName = "partition")
+  @Explain(displayName = "partition")
   public HashMap<String, String> getPartSpec() {
     return partSpec;
   }
@@ -116,7 +116,7 @@
     return resFile;
   }
 
-  @explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", normalExplain = false)
   public String getResFileString() {
     return getResFile().getName();
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropFunctionDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropFunctionDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropFunctionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java Mon Jan 25 18:48:58 2010
@@ -20,17 +20,17 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Drop Function")
-public class dropFunctionDesc implements Serializable {
+@Explain(displayName = "Drop Function")
+public class DropFunctionDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private String functionName;
 
-  public dropFunctionDesc(String functionName) {
+  public DropFunctionDesc(String functionName) {
     this.functionName = functionName;
   }
 
-  @explain(displayName = "name")
+  @Explain(displayName = "name")
   public String getFunctionName() {
     return functionName;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java Mon Jan 25 18:48:58 2010
@@ -22,8 +22,8 @@
 import java.util.List;
 import java.util.Map;
 
-@explain(displayName = "Drop Table")
-public class dropTableDesc extends ddlDesc implements Serializable {
+@Explain(displayName = "Drop Table")
+public class DropTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   String tableName;
@@ -33,13 +33,13 @@
   /**
    * @param tableName
    */
-  public dropTableDesc(String tableName, boolean expectView) {
+  public DropTableDesc(String tableName, boolean expectView) {
     this.tableName = tableName;
     partSpecs = null;
     this.expectView = expectView;
   }
 
-  public dropTableDesc(String tableName, List<Map<String, String>> partSpecs) {
+  public DropTableDesc(String tableName, List<Map<String, String>> partSpecs) {
     this.tableName = tableName;
     this.partSpecs = partSpecs;
     expectView = false;
@@ -48,7 +48,7 @@
   /**
    * @return the tableName
    */
-  @explain(displayName = "table")
+  @Explain(displayName = "table")
   public String getTableName() {
     return tableName;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explain.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java Mon Jan 25 18:48:58 2010
@@ -22,7 +22,7 @@
 import java.lang.annotation.RetentionPolicy;
 
 @Retention(RetentionPolicy.RUNTIME)
-public @interface explain {
+public @interface Explain {
   String displayName() default "";
 
   boolean normalExplain() default true;

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explainWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java Mon Jan 25 18:48:58 2010
@@ -24,7 +24,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.Task;
 
-public class explainWork implements Serializable {
+public class ExplainWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private Path resFile;
@@ -32,10 +32,10 @@
   private String astStringTree;
   boolean extended;
 
-  public explainWork() {
+  public ExplainWork() {
   }
 
-  public explainWork(Path resFile,
+  public ExplainWork(Path resFile,
       List<Task<? extends Serializable>> rootTasks, String astStringTree,
       boolean extended) {
     this.resFile = resFile;

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/explosionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java Mon Jan 25 18:48:58 2010
@@ -20,16 +20,16 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Explosion")
-public class explosionDesc implements Serializable {
+@Explain(displayName = "Explosion")
+public class ExplosionDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String fieldName;
   private int position;
 
-  public explosionDesc() {
+  public ExplosionDesc() {
   }
 
-  public explosionDesc(final String fieldName, final int position) {
+  public ExplosionDesc(final String fieldName, final int position) {
     this.fieldName = fieldName;
     this.position = position;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeColumnDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java Mon Jan 25 18:48:58 2010
@@ -25,7 +25,7 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
-public class exprNodeColumnDesc extends exprNodeDesc implements Serializable {
+public class ExprNodeColumnDesc extends ExprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   /**
@@ -43,10 +43,10 @@
    */
   private boolean isPartitionCol;
 
-  public exprNodeColumnDesc() {
+  public ExprNodeColumnDesc() {
   }
 
-  public exprNodeColumnDesc(TypeInfo typeInfo, String column, String tabAlias,
+  public ExprNodeColumnDesc(TypeInfo typeInfo, String column, String tabAlias,
       boolean isPartitionCol) {
     super(typeInfo);
     this.column = column;
@@ -54,7 +54,7 @@
     this.isPartitionCol = isPartitionCol;
   }
 
-  public exprNodeColumnDesc(Class<?> c, String column, String tabAlias,
+  public ExprNodeColumnDesc(Class<?> c, String column, String tabAlias,
       boolean isPartitionCol) {
     super(TypeInfoFactory.getPrimitiveTypeInfoFromJavaPrimitive(c));
     this.column = column;
@@ -91,7 +91,7 @@
     return "Column[" + column + "]";
   }
 
-  @explain(displayName = "expr")
+  @Explain(displayName = "expr")
   @Override
   public String getExprString() {
     return getColumn();
@@ -105,16 +105,16 @@
   }
 
   @Override
-  public exprNodeDesc clone() {
-    return new exprNodeColumnDesc(typeInfo, column, tabAlias, isPartitionCol);
+  public ExprNodeDesc clone() {
+    return new ExprNodeColumnDesc(typeInfo, column, tabAlias, isPartitionCol);
   }
 
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeColumnDesc)) {
+    if (!(o instanceof ExprNodeColumnDesc)) {
       return false;
     }
-    exprNodeColumnDesc dest = (exprNodeColumnDesc) o;
+    ExprNodeColumnDesc dest = (ExprNodeColumnDesc) o;
     if (!column.equals(dest.getColumn())) {
       return false;
     }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java
------------------------------------------------------------------------------
    svn:executable = *

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java Mon Jan 25 18:48:58 2010
@@ -27,19 +27,19 @@
 /**
  * A constant expression.
  */
-public class exprNodeConstantDesc extends exprNodeDesc implements Serializable {
+public class ExprNodeConstantDesc extends ExprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private Object value;
 
-  public exprNodeConstantDesc() {
+  public ExprNodeConstantDesc() {
   }
 
-  public exprNodeConstantDesc(TypeInfo typeInfo, Object value) {
+  public ExprNodeConstantDesc(TypeInfo typeInfo, Object value) {
     super(typeInfo);
     this.value = value;
   }
 
-  public exprNodeConstantDesc(Object value) {
+  public ExprNodeConstantDesc(Object value) {
     this(TypeInfoFactory
         .getPrimitiveTypeInfoFromJavaPrimitive(value.getClass()), value);
   }
@@ -57,7 +57,7 @@
     return "Const " + typeInfo.toString() + " " + value;
   }
 
-  @explain(displayName = "expr")
+  @Explain(displayName = "expr")
   @Override
   public String getExprString() {
     if (value == null) {
@@ -72,16 +72,16 @@
   }
 
   @Override
-  public exprNodeDesc clone() {
-    return new exprNodeConstantDesc(typeInfo, value);
+  public ExprNodeDesc clone() {
+    return new ExprNodeConstantDesc(typeInfo, value);
   }
 
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeConstantDesc)) {
+    if (!(o instanceof ExprNodeConstantDesc)) {
       return false;
     }
-    exprNodeConstantDesc dest = (exprNodeConstantDesc) o;
+    ExprNodeConstantDesc dest = (ExprNodeConstantDesc) o;
     if (!typeInfo.equals(dest.getTypeInfo())) {
       return false;
     }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
------------------------------------------------------------------------------
    svn:executable = *

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java Mon Jan 25 18:48:58 2010
@@ -24,14 +24,14 @@
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
-public abstract class exprNodeDesc implements Serializable, Node {
+public abstract class ExprNodeDesc implements Serializable, Node {
   private static final long serialVersionUID = 1L;
   TypeInfo typeInfo;
 
-  public exprNodeDesc() {
+  public ExprNodeDesc() {
   }
 
-  public exprNodeDesc(TypeInfo typeInfo) {
+  public ExprNodeDesc(TypeInfo typeInfo) {
     this.typeInfo = typeInfo;
     if (typeInfo == null) {
       throw new RuntimeException("typeInfo cannot be null!");
@@ -39,7 +39,7 @@
   }
 
   @Override
-  public abstract exprNodeDesc clone();
+  public abstract ExprNodeDesc clone();
 
   // Cant use equals because the walker depends on them being object equal
   // The default graph walker processes a node after its kids have been
@@ -60,7 +60,7 @@
     return null;
   }
 
-  @explain(displayName = "type")
+  @Explain(displayName = "type")
   public String getTypeString() {
     return typeInfo.getTypeName();
   }
@@ -70,7 +70,7 @@
   }
 
   @Override
-  public List<exprNodeDesc> getChildren() {
+  public List<ExprNodeDesc> getChildren() {
     return null;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java
------------------------------------------------------------------------------
    svn:executable = *

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java Mon Jan 25 18:48:58 2010
@@ -25,9 +25,9 @@
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
-public class exprNodeFieldDesc extends exprNodeDesc implements Serializable {
+public class ExprNodeFieldDesc extends ExprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
-  exprNodeDesc desc;
+  ExprNodeDesc desc;
   String fieldName;
 
   // Used to support a.b where a is a list of struct that contains a field
@@ -35,10 +35,10 @@
   // a.b will return an array that contains field b of all elements of array a.
   Boolean isList;
 
-  public exprNodeFieldDesc() {
+  public ExprNodeFieldDesc() {
   }
 
-  public exprNodeFieldDesc(TypeInfo typeInfo, exprNodeDesc desc,
+  public ExprNodeFieldDesc(TypeInfo typeInfo, ExprNodeDesc desc,
       String fieldName, Boolean isList) {
     super(typeInfo);
     this.desc = desc;
@@ -47,17 +47,17 @@
   }
 
   @Override
-  public List<exprNodeDesc> getChildren() {
-    List<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
+  public List<ExprNodeDesc> getChildren() {
+    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(2);
     children.add(desc);
     return children;
   }
 
-  public exprNodeDesc getDesc() {
+  public ExprNodeDesc getDesc() {
     return desc;
   }
 
-  public void setDesc(exprNodeDesc desc) {
+  public void setDesc(ExprNodeDesc desc) {
     this.desc = desc;
   }
 
@@ -82,7 +82,7 @@
     return desc.toString() + "." + fieldName;
   }
 
-  @explain(displayName = "expr")
+  @Explain(displayName = "expr")
   @Override
   public String getExprString() {
     return desc.getExprString() + "." + fieldName;
@@ -98,16 +98,16 @@
   }
 
   @Override
-  public exprNodeDesc clone() {
-    return new exprNodeFieldDesc(typeInfo, desc, fieldName, isList);
+  public ExprNodeDesc clone() {
+    return new ExprNodeFieldDesc(typeInfo, desc, fieldName, isList);
   }
 
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeFieldDesc)) {
+    if (!(o instanceof ExprNodeFieldDesc)) {
       return false;
     }
-    exprNodeFieldDesc dest = (exprNodeFieldDesc) o;
+    ExprNodeFieldDesc dest = (ExprNodeFieldDesc) o;
     if (!typeInfo.equals(dest.getTypeInfo())) {
       return false;
     }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java
------------------------------------------------------------------------------
    svn:executable = *

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java Mon Jan 25 18:48:58 2010
@@ -34,7 +34,7 @@
 /**
  * Describes a GenericFunc node.
  */
-public class exprNodeGenericFuncDesc extends exprNodeDesc implements
+public class ExprNodeGenericFuncDesc extends ExprNodeDesc implements
     Serializable {
 
   private static final long serialVersionUID = 1L;
@@ -47,13 +47,13 @@
    * exactly what we want.
    */
   private GenericUDF genericUDF;
-  private List<exprNodeDesc> childExprs;
+  private List<ExprNodeDesc> childExprs;
 
-  public exprNodeGenericFuncDesc() {
+  public ExprNodeGenericFuncDesc() {
   }
 
-  public exprNodeGenericFuncDesc(TypeInfo typeInfo, GenericUDF genericUDF,
-      List<exprNodeDesc> children) {
+  public ExprNodeGenericFuncDesc(TypeInfo typeInfo, GenericUDF genericUDF,
+      List<ExprNodeDesc> children) {
     super(typeInfo);
     assert (genericUDF != null);
     this.genericUDF = genericUDF;
@@ -68,16 +68,16 @@
     this.genericUDF = genericUDF;
   }
 
-  public List<exprNodeDesc> getChildExprs() {
+  public List<ExprNodeDesc> getChildExprs() {
     return childExprs;
   }
 
-  public void setChildExprs(List<exprNodeDesc> children) {
+  public void setChildExprs(List<ExprNodeDesc> children) {
     childExprs = children;
   }
 
   @Override
-  public List<exprNodeDesc> getChildren() {
+  public List<ExprNodeDesc> getChildren() {
     return childExprs;
   }
 
@@ -97,7 +97,7 @@
     return sb.toString();
   }
 
-  @explain(displayName = "expr")
+  @Explain(displayName = "expr")
   @Override
   public String getExprString() {
     // Get the children expr strings
@@ -125,12 +125,12 @@
   }
 
   @Override
-  public exprNodeDesc clone() {
-    List<exprNodeDesc> cloneCh = new ArrayList<exprNodeDesc>(childExprs.size());
-    for (exprNodeDesc ch : childExprs) {
+  public ExprNodeDesc clone() {
+    List<ExprNodeDesc> cloneCh = new ArrayList<ExprNodeDesc>(childExprs.size());
+    for (ExprNodeDesc ch : childExprs) {
       cloneCh.add(ch.clone());
     }
-    exprNodeGenericFuncDesc clone = new exprNodeGenericFuncDesc(typeInfo,
+    ExprNodeGenericFuncDesc clone = new ExprNodeGenericFuncDesc(typeInfo,
         FunctionRegistry.cloneGenericUDF(genericUDF), cloneCh);
     return clone;
   }
@@ -141,8 +141,8 @@
    * 
    * @throws UDFArgumentException
    */
-  public static exprNodeGenericFuncDesc newInstance(GenericUDF genericUDF,
-      List<exprNodeDesc> children) throws UDFArgumentException {
+  public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF,
+      List<ExprNodeDesc> children) throws UDFArgumentException {
     ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
     for (int i = 0; i < childrenOIs.length; i++) {
       childrenOIs[i] = TypeInfoUtils
@@ -151,16 +151,16 @@
     }
 
     ObjectInspector oi = genericUDF.initialize(childrenOIs);
-    return new exprNodeGenericFuncDesc(TypeInfoUtils
+    return new ExprNodeGenericFuncDesc(TypeInfoUtils
         .getTypeInfoFromObjectInspector(oi), genericUDF, children);
   }
 
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeGenericFuncDesc)) {
+    if (!(o instanceof ExprNodeGenericFuncDesc)) {
       return false;
     }
-    exprNodeGenericFuncDesc dest = (exprNodeGenericFuncDesc) o;
+    ExprNodeGenericFuncDesc dest = (ExprNodeGenericFuncDesc) o;
     if (!typeInfo.equals(dest.getTypeInfo())
         || !genericUDF.getClass().equals(dest.getGenericUDF().getClass())) {
       return false;

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeNullDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java Mon Jan 25 18:48:58 2010
@@ -23,11 +23,11 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.NullWritable;
 
-public class exprNodeNullDesc extends exprNodeDesc implements Serializable {
+public class ExprNodeNullDesc extends ExprNodeDesc implements Serializable {
 
   private static final long serialVersionUID = 1L;
 
-  public exprNodeNullDesc() {
+  public ExprNodeNullDesc() {
     super(TypeInfoFactory
         .getPrimitiveTypeInfoFromPrimitiveWritable(NullWritable.class));
   }
@@ -36,23 +36,23 @@
     return null;
   }
 
-  @explain(displayName = "expr")
+  @Explain(displayName = "expr")
   @Override
   public String getExprString() {
     return "null";
   }
 
   @Override
-  public exprNodeDesc clone() {
-    return new exprNodeNullDesc();
+  public ExprNodeDesc clone() {
+    return new ExprNodeNullDesc();
   }
 
   @Override
   public boolean isSame(Object o) {
-    if (!(o instanceof exprNodeNullDesc)) {
+    if (!(o instanceof ExprNodeNullDesc)) {
       return false;
     }
-    if (!typeInfo.equals(((exprNodeNullDesc) o).getTypeInfo())) {
+    if (!typeInfo.equals(((ExprNodeNullDesc) o).getTypeInfo())) {
       return false;
     }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/extractDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java Mon Jan 25 18:48:58 2010
@@ -20,23 +20,23 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Extract")
-public class extractDesc implements Serializable {
+@Explain(displayName = "Extract")
+public class ExtractDesc implements Serializable {
   private static final long serialVersionUID = 1L;
-  private exprNodeDesc col;
+  private ExprNodeDesc col;
 
-  public extractDesc() {
+  public ExtractDesc() {
   }
 
-  public extractDesc(final exprNodeDesc col) {
+  public ExtractDesc(final ExprNodeDesc col) {
     this.col = col;
   }
 
-  public exprNodeDesc getCol() {
+  public ExprNodeDesc getCol() {
     return col;
   }
 
-  public void setCol(final exprNodeDesc col) {
+  public void setCol(final ExprNodeDesc col) {
     this.col = col;
   }
 }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fetchWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java Mon Jan 25 18:48:58 2010
@@ -24,15 +24,15 @@
 
 import org.apache.hadoop.fs.Path;
 
-@explain(displayName = "Fetch Operator")
-public class fetchWork implements Serializable {
+@Explain(displayName = "Fetch Operator")
+public class FetchWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private String tblDir;
-  private tableDesc tblDesc;
+  private TableDesc tblDesc;
 
   private List<String> partDir;
-  private List<partitionDesc> partDesc;
+  private List<PartitionDesc> partDesc;
 
   private int limit;
 
@@ -41,24 +41,24 @@
    */
   private String serializationNullFormat = "NULL";
 
-  public fetchWork() {
+  public FetchWork() {
   }
 
-  public fetchWork(String tblDir, tableDesc tblDesc) {
+  public FetchWork(String tblDir, TableDesc tblDesc) {
     this(tblDir, tblDesc, -1);
   }
 
-  public fetchWork(String tblDir, tableDesc tblDesc, int limit) {
+  public FetchWork(String tblDir, TableDesc tblDesc, int limit) {
     this.tblDir = tblDir;
     this.tblDesc = tblDesc;
     this.limit = limit;
   }
 
-  public fetchWork(List<String> partDir, List<partitionDesc> partDesc) {
+  public FetchWork(List<String> partDir, List<PartitionDesc> partDesc) {
     this(partDir, partDesc, -1);
   }
 
-  public fetchWork(List<String> partDir, List<partitionDesc> partDesc, int limit) {
+  public FetchWork(List<String> partDir, List<PartitionDesc> partDesc, int limit) {
     this.partDir = partDir;
     this.partDesc = partDesc;
     this.limit = limit;
@@ -97,7 +97,7 @@
   /**
    * @return the tblDesc
    */
-  public tableDesc getTblDesc() {
+  public TableDesc getTblDesc() {
     return tblDesc;
   }
 
@@ -105,7 +105,7 @@
    * @param tblDesc
    *          the tblDesc to set
    */
-  public void setTblDesc(tableDesc tblDesc) {
+  public void setTblDesc(TableDesc tblDesc) {
     this.tblDesc = tblDesc;
   }
 
@@ -117,7 +117,7 @@
   }
 
   public List<Path> getPartDirPath() {
-    return fetchWork.convertStringToPathArray(partDir);
+    return FetchWork.convertStringToPathArray(partDir);
   }
 
   public static List<String> convertPathToStringArray(List<Path> paths) {
@@ -157,7 +157,7 @@
   /**
    * @return the partDesc
    */
-  public List<partitionDesc> getPartDesc() {
+  public List<PartitionDesc> getPartDesc() {
     return partDesc;
   }
 
@@ -165,14 +165,14 @@
    * @param partDesc
    *          the partDesc to set
    */
-  public void setPartDesc(List<partitionDesc> partDesc) {
+  public void setPartDesc(List<PartitionDesc> partDesc) {
     this.partDesc = partDesc;
   }
 
   /**
    * @return the limit
    */
-  @explain(displayName = "limit")
+  @Explain(displayName = "limit")
   public int getLimit() {
     return limit;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/fileSinkDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java Mon Jan 25 18:48:58 2010
@@ -20,20 +20,20 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "File Output Operator")
-public class fileSinkDesc implements Serializable {
+@Explain(displayName = "File Output Operator")
+public class FileSinkDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String dirName;
-  private tableDesc tableInfo;
+  private TableDesc tableInfo;
   private boolean compressed;
   private int destTableId;
   private String compressCodec;
   private String compressType;
 
-  public fileSinkDesc() {
+  public FileSinkDesc() {
   }
 
-  public fileSinkDesc(final String dirName, final tableDesc tableInfo,
+  public FileSinkDesc(final String dirName, final TableDesc tableInfo,
       final boolean compressed, int destTableId) {
 
     this.dirName = dirName;
@@ -42,7 +42,7 @@
     this.destTableId = destTableId;
   }
 
-  public fileSinkDesc(final String dirName, final tableDesc tableInfo,
+  public FileSinkDesc(final String dirName, final TableDesc tableInfo,
       final boolean compressed) {
 
     this.dirName = dirName;
@@ -51,7 +51,7 @@
     destTableId = 0;
   }
 
-  @explain(displayName = "directory", normalExplain = false)
+  @Explain(displayName = "directory", normalExplain = false)
   public String getDirName() {
     return dirName;
   }
@@ -60,16 +60,16 @@
     this.dirName = dirName;
   }
 
-  @explain(displayName = "table")
-  public tableDesc getTableInfo() {
+  @Explain(displayName = "table")
+  public TableDesc getTableInfo() {
     return tableInfo;
   }
 
-  public void setTableInfo(final tableDesc tableInfo) {
+  public void setTableInfo(final TableDesc tableInfo) {
     this.tableInfo = tableInfo;
   }
 
-  @explain(displayName = "compressed")
+  @Explain(displayName = "compressed")
   public boolean getCompressed() {
     return compressed;
   }
@@ -78,7 +78,7 @@
     this.compressed = compressed;
   }
 
-  @explain(displayName = "GlobalTableId")
+  @Explain(displayName = "GlobalTableId")
   public int getDestTableId() {
     return destTableId;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/filterDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java Mon Jan 25 18:48:58 2010
@@ -21,8 +21,8 @@
 import java.io.Serializable;
 import java.util.List;
 
-@explain(displayName = "Filter Operator")
-public class filterDesc implements Serializable {
+@Explain(displayName = "Filter Operator")
+public class FilterDesc implements Serializable {
 
   /**
    * sampleDesc is used to keep track of the sampling descriptor
@@ -61,40 +61,40 @@
   }
 
   private static final long serialVersionUID = 1L;
-  private org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate;
+  private org.apache.hadoop.hive.ql.plan.ExprNodeDesc predicate;
   private boolean isSamplingPred;
   private transient sampleDesc sampleDescr;
 
-  public filterDesc() {
+  public FilterDesc() {
   }
 
-  public filterDesc(
-      final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate,
+  public FilterDesc(
+      final org.apache.hadoop.hive.ql.plan.ExprNodeDesc predicate,
       boolean isSamplingPred) {
     this.predicate = predicate;
     this.isSamplingPred = isSamplingPred;
     sampleDescr = null;
   }
 
-  public filterDesc(
-      final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate,
+  public FilterDesc(
+      final org.apache.hadoop.hive.ql.plan.ExprNodeDesc predicate,
       boolean isSamplingPred, final sampleDesc sampleDescr) {
     this.predicate = predicate;
     this.isSamplingPred = isSamplingPred;
     this.sampleDescr = sampleDescr;
   }
 
-  @explain(displayName = "predicate")
-  public org.apache.hadoop.hive.ql.plan.exprNodeDesc getPredicate() {
+  @Explain(displayName = "predicate")
+  public org.apache.hadoop.hive.ql.plan.ExprNodeDesc getPredicate() {
     return predicate;
   }
 
   public void setPredicate(
-      final org.apache.hadoop.hive.ql.plan.exprNodeDesc predicate) {
+      final org.apache.hadoop.hive.ql.plan.ExprNodeDesc predicate) {
     this.predicate = predicate;
   }
 
-  @explain(displayName = "isSamplingPred", normalExplain = false)
+  @Explain(displayName = "isSamplingPred", normalExplain = false)
   public boolean getIsSamplingPred() {
     return isSamplingPred;
   }
@@ -103,7 +103,7 @@
     this.isSamplingPred = isSamplingPred;
   }
 
-  @explain(displayName = "sampleDesc", normalExplain = false)
+  @Explain(displayName = "sampleDesc", normalExplain = false)
   public sampleDesc getSampleDescr() {
     return sampleDescr;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/forwardDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java Mon Jan 25 18:48:58 2010
@@ -20,12 +20,12 @@
 
 import java.io.Serializable;
 
-@explain(displayName = "Forward")
-public class forwardDesc implements Serializable {
+@Explain(displayName = "Forward")
+public class ForwardDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
   @SuppressWarnings("nls")
-  public forwardDesc() {
+  public ForwardDesc() {
     // throw new
     // RuntimeException("This class does not need to be instantiated");
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java Mon Jan 25 18:48:58 2010
@@ -22,30 +22,30 @@
 
 public class FunctionWork implements Serializable {
   private static final long serialVersionUID = 1L;
-  private createFunctionDesc createFunctionDesc;
-  private dropFunctionDesc dropFunctionDesc;
+  private CreateFunctionDesc createFunctionDesc;
+  private DropFunctionDesc dropFunctionDesc;
 
-  public FunctionWork(createFunctionDesc createFunctionDesc) {
+  public FunctionWork(CreateFunctionDesc createFunctionDesc) {
     this.createFunctionDesc = createFunctionDesc;
   }
 
-  public FunctionWork(dropFunctionDesc dropFunctionDesc) {
+  public FunctionWork(DropFunctionDesc dropFunctionDesc) {
     this.dropFunctionDesc = dropFunctionDesc;
   }
 
-  public createFunctionDesc getCreateFunctionDesc() {
+  public CreateFunctionDesc getCreateFunctionDesc() {
     return createFunctionDesc;
   }
 
-  public void setCreateFunctionDesc(createFunctionDesc createFunctionDesc) {
+  public void setCreateFunctionDesc(CreateFunctionDesc createFunctionDesc) {
     this.createFunctionDesc = createFunctionDesc;
   }
 
-  public dropFunctionDesc getDropFunctionDesc() {
+  public DropFunctionDesc getDropFunctionDesc() {
     return dropFunctionDesc;
   }
 
-  public void setDropFunctionDesc(dropFunctionDesc dropFunctionDesc) {
+  public void setDropFunctionDesc(DropFunctionDesc dropFunctionDesc) {
     this.dropFunctionDesc = dropFunctionDesc;
   }
 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java Mon Jan 25 18:48:58 2010
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-@explain(displayName = "Group By Operator")
-public class groupByDesc implements java.io.Serializable {
+@Explain(displayName = "Group By Operator")
+public class GroupByDesc implements java.io.Serializable {
   /**
    * Group-by Mode: COMPLETE: complete 1-phase aggregation: iterate, terminate
    * PARTIAL1: partial aggregation - first phase: iterate, terminatePartial
@@ -40,28 +40,28 @@
   private boolean groupKeyNotReductionKey;
   private boolean bucketGroup;
 
-  private java.util.ArrayList<exprNodeDesc> keys;
-  private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators;
+  private java.util.ArrayList<ExprNodeDesc> keys;
+  private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators;
   private java.util.ArrayList<java.lang.String> outputColumnNames;
 
-  public groupByDesc() {
+  public GroupByDesc() {
   }
 
-  public groupByDesc(
+  public GroupByDesc(
       final Mode mode,
       final java.util.ArrayList<java.lang.String> outputColumnNames,
-      final java.util.ArrayList<exprNodeDesc> keys,
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators,
+      final java.util.ArrayList<ExprNodeDesc> keys,
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
       final boolean groupKeyNotReductionKey) {
     this(mode, outputColumnNames, keys, aggregators, groupKeyNotReductionKey,
         false);
   }
 
-  public groupByDesc(
+  public GroupByDesc(
       final Mode mode,
       final java.util.ArrayList<java.lang.String> outputColumnNames,
-      final java.util.ArrayList<exprNodeDesc> keys,
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators,
+      final java.util.ArrayList<ExprNodeDesc> keys,
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
       final boolean groupKeyNotReductionKey, final boolean bucketGroup) {
     this.mode = mode;
     this.outputColumnNames = outputColumnNames;
@@ -75,7 +75,7 @@
     return mode;
   }
 
-  @explain(displayName = "mode")
+  @Explain(displayName = "mode")
   public String getModeString() {
     switch (mode) {
     case COMPLETE:
@@ -101,16 +101,16 @@
     this.mode = mode;
   }
 
-  @explain(displayName = "keys")
-  public java.util.ArrayList<exprNodeDesc> getKeys() {
+  @Explain(displayName = "keys")
+  public java.util.ArrayList<ExprNodeDesc> getKeys() {
     return keys;
   }
 
-  public void setKeys(final java.util.ArrayList<exprNodeDesc> keys) {
+  public void setKeys(final java.util.ArrayList<ExprNodeDesc> keys) {
     this.keys = keys;
   }
 
-  @explain(displayName = "outputColumnNames")
+  @Explain(displayName = "outputColumnNames")
   public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }
@@ -120,13 +120,13 @@
     this.outputColumnNames = outputColumnNames;
   }
 
-  @explain(displayName = "aggregations")
-  public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> getAggregators() {
+  @Explain(displayName = "aggregations")
+  public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> getAggregators() {
     return aggregators;
   }
 
   public void setAggregators(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.aggregationDesc> aggregators) {
+      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators) {
     this.aggregators = aggregators;
   }
 
@@ -138,7 +138,7 @@
     this.groupKeyNotReductionKey = groupKeyNotReductionKey;
   }
 
-  @explain(displayName = "bucketGroup")
+  @Explain(displayName = "bucketGroup")
   public boolean getBucketGroup() {
     return bucketGroup;
   }

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
------------------------------------------------------------------------------
    svn:mergeinfo =