You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/25 19:49:05 UTC

svn commit: r902921 [4/26] - in /hadoop/hive/trunk: ./ contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/ contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoo...

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Mon Jan 25 18:48:58 2010
@@ -26,7 +26,7 @@
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
-import org.apache.hadoop.hive.ql.plan.explainWork;
+import org.apache.hadoop.hive.ql.plan.ExplainWork;
 
 public class ExplainSemanticAnalyzer extends BaseSemanticAnalyzer {
 
@@ -60,7 +60,7 @@
       tasks.add(fetchTask);
     }
 
-    rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), tasks,
+    rootTasks.add(TaskFactory.get(new ExplainWork(ctx.getResFile(), tasks,
         ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf));
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Mon Jan 25 18:48:58 2010
@@ -23,8 +23,8 @@
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
-import org.apache.hadoop.hive.ql.plan.createFunctionDesc;
-import org.apache.hadoop.hive.ql.plan.dropFunctionDesc;
+import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
+import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
 
 public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer {
   private static final Log LOG = LogFactory
@@ -49,13 +49,13 @@
   private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
     String className = unescapeSQLString(ast.getChild(1).getText());
-    createFunctionDesc desc = new createFunctionDesc(functionName, className);
+    CreateFunctionDesc desc = new CreateFunctionDesc(functionName, className);
     rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
   }
 
   private void analyzeDropFunction(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
-    dropFunctionDesc desc = new dropFunctionDesc(functionName);
+    DropFunctionDesc desc = new DropFunctionDesc(functionName);
     rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
   }
 }

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java Mon Jan 25 18:48:58 2010
@@ -23,16 +23,16 @@
  * Join conditions Descriptor implementation.
  * 
  */
-public class joinCond {
+public class JoinCond {
   private int left;
   private int right;
-  private joinType joinType;
+  private JoinType joinType;
   private boolean preserved;
 
-  public joinCond() {
+  public JoinCond() {
   }
 
-  public joinCond(int left, int right, joinType joinType) {
+  public JoinCond(int left, int right, JoinType joinType) {
     this.left = left;
     this.right = right;
     this.joinType = joinType;
@@ -44,8 +44,8 @@
    * @param p
    *          true if table is preserved, false otherwise
    */
-  public joinCond(boolean p) {
-    joinType = org.apache.hadoop.hive.ql.parse.joinType.UNIQUE;
+  public JoinCond(boolean p) {
+    joinType = org.apache.hadoop.hive.ql.parse.JoinType.UNIQUE;
     preserved = p;
   }
 
@@ -72,11 +72,11 @@
     this.right = right;
   }
 
-  public joinType getJoinType() {
+  public JoinType getJoinType() {
     return joinType;
   }
 
-  public void setJoinType(final joinType joinType) {
+  public void setJoinType(final JoinType joinType) {
     this.joinType = joinType;
   }
 

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Copied: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java (from r902715, hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java)
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java?p2=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java&p1=hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java&r1=902715&r2=902921&rev=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java Mon Jan 25 18:48:58 2010
@@ -18,6 +18,6 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-public enum joinType {
+public enum JoinType {
   INNER, LEFTOUTER, RIGHTOUTER, FULLOUTER, UNIQUE, LEFTSEMI
 };

Propchange: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java
------------------------------------------------------------------------------
    svn:mergeinfo = 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Mon Jan 25 18:48:58 2010
@@ -35,9 +35,9 @@
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.plan.copyWork;
-import org.apache.hadoop.hive.ql.plan.loadTableDesc;
-import org.apache.hadoop.hive.ql.plan.moveWork;
+import org.apache.hadoop.hive.ql.plan.CopyWork;
+import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
+import org.apache.hadoop.hive.ql.plan.MoveWork;
 
 public class LoadSemanticAnalyzer extends BaseSemanticAnalyzer {
 
@@ -210,7 +210,7 @@
       // that's just a test case.
       String copyURIStr = ctx.getExternalTmpFileURI(toURI);
       URI copyURI = URI.create(copyURIStr);
-      rTask = TaskFactory.get(new copyWork(fromURI.toString(), copyURIStr),
+      rTask = TaskFactory.get(new CopyWork(fromURI.toString(), copyURIStr),
           conf);
       fromURI = copyURI;
     }
@@ -218,16 +218,16 @@
     // create final load/move work
 
     String loadTmpPath = ctx.getExternalTmpFileURI(toURI);
-    loadTableDesc loadTableWork = new loadTableDesc(fromURI.toString(),
+    LoadTableDesc loadTableWork = new LoadTableDesc(fromURI.toString(),
         loadTmpPath, Utilities.getTableDesc(ts.tableHandle),
         (ts.partSpec != null) ? ts.partSpec : new HashMap<String, String>(),
         isOverWrite);
 
     if (rTask != null) {
-      rTask.addDependentTask(TaskFactory.get(new moveWork(getInputs(),
+      rTask.addDependentTask(TaskFactory.get(new MoveWork(getInputs(),
           getOutputs(), loadTableWork, null, true), conf));
     } else {
-      rTask = TaskFactory.get(new moveWork(getInputs(), getOutputs(),
+      rTask = TaskFactory.get(new MoveWork(getInputs(), getOutputs(),
           loadTableWork, null, true), conf);
     }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Mon Jan 25 18:48:58 2010
@@ -34,10 +34,10 @@
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.loadFileDesc;
-import org.apache.hadoop.hive.ql.plan.loadTableDesc;
-import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
+import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
 
 /**
  * Parse Context: The current parse context. This is passed to the optimizer
@@ -52,15 +52,15 @@
 public class ParseContext {
   private QB qb;
   private ASTNode ast;
-  private HashMap<TableScanOperator, exprNodeDesc> opToPartPruner;
+  private HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
   private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
   private HashMap<String, Operator<? extends Serializable>> topOps;
   private HashMap<String, Operator<? extends Serializable>> topSelOps;
   private LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx;
   private Map<JoinOperator, QBJoinTree> joinContext;
   private HashMap<TableScanOperator, Table> topToTable;
-  private List<loadTableDesc> loadTableWork;
-  private List<loadFileDesc> loadFileWork;
+  private List<LoadTableDesc> loadTableWork;
+  private List<LoadFileDesc> loadFileWork;
   private Context ctx;
   private HiveConf conf;
   private HashMap<String, String> idToTableNameMap;
@@ -120,13 +120,13 @@
       HiveConf conf,
       QB qb,
       ASTNode ast,
-      HashMap<TableScanOperator, exprNodeDesc> opToPartPruner,
+      HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner,
       HashMap<String, Operator<? extends Serializable>> topOps,
       HashMap<String, Operator<? extends Serializable>> topSelOps,
       LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtx,
       Map<JoinOperator, QBJoinTree> joinContext,
       HashMap<TableScanOperator, Table> topToTable,
-      List<loadTableDesc> loadTableWork, List<loadFileDesc> loadFileWork,
+      List<LoadTableDesc> loadTableWork, List<LoadFileDesc> loadFileWork,
       Context ctx, HashMap<String, String> idToTableNameMap, int destTableId,
       UnionProcContext uCtx, List<MapJoinOperator> listMapJoinOpsNoReducer,
       Map<GroupByOperator, Set<String>> groupOpToInputTables,
@@ -218,7 +218,7 @@
   /**
    * @return the opToPartPruner
    */
-  public HashMap<TableScanOperator, exprNodeDesc> getOpToPartPruner() {
+  public HashMap<TableScanOperator, ExprNodeDesc> getOpToPartPruner() {
     return opToPartPruner;
   }
 
@@ -227,7 +227,7 @@
    *          the opToPartPruner to set
    */
   public void setOpToPartPruner(
-      HashMap<TableScanOperator, exprNodeDesc> opToPartPruner) {
+      HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner) {
     this.opToPartPruner = opToPartPruner;
   }
 
@@ -296,7 +296,7 @@
   /**
    * @return the loadTableWork
    */
-  public List<loadTableDesc> getLoadTableWork() {
+  public List<LoadTableDesc> getLoadTableWork() {
     return loadTableWork;
   }
 
@@ -304,14 +304,14 @@
    * @param loadTableWork
    *          the loadTableWork to set
    */
-  public void setLoadTableWork(List<loadTableDesc> loadTableWork) {
+  public void setLoadTableWork(List<LoadTableDesc> loadTableWork) {
     this.loadTableWork = loadTableWork;
   }
 
   /**
    * @return the loadFileWork
    */
-  public List<loadFileDesc> getLoadFileWork() {
+  public List<LoadFileDesc> getLoadFileWork() {
     return loadFileWork;
   }
 
@@ -319,7 +319,7 @@
    * @param loadFileWork
    *          the loadFileWork to set
    */
-  public void setLoadFileWork(List<loadFileDesc> loadFileWork) {
+  public void setLoadFileWork(List<LoadFileDesc> loadFileWork) {
     this.loadFileWork = loadFileWork;
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Mon Jan 25 18:48:58 2010
@@ -23,7 +23,7 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.plan.createTableDesc;
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 
 /**
  * Implementation of the query block
@@ -45,7 +45,7 @@
   private QBJoinTree qbjoin;
   private String id;
   private boolean isQuery;
-  private createTableDesc tblDesc = null; // table descriptor of the final
+  private CreateTableDesc tblDesc = null; // table descriptor of the final
                                           // results
 
   public void print(String msg) {
@@ -172,11 +172,11 @@
     return qbp.isSelectStarQuery() && aliasToSubq.isEmpty() && !isCTAS();
   }
 
-  public createTableDesc getTableDesc() {
+  public CreateTableDesc getTableDesc() {
     return tblDesc;
   }
 
-  public void setTableDesc(createTableDesc desc) {
+  public void setTableDesc(CreateTableDesc desc) {
     tblDesc = desc;
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java?rev=902921&r1=902920&r2=902921&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java Mon Jan 25 18:48:58 2010
@@ -35,7 +35,7 @@
   private QBJoinTree joinSrc;
   private String[] baseSrc;
   private int nextTag;
-  private joinCond[] joinCond;
+  private JoinCond[] joinCond;
   private boolean noOuterJoin;
   private boolean noSemiJoin;
 
@@ -133,11 +133,11 @@
     return "$INTNAME";
   }
 
-  public joinCond[] getJoinCond() {
+  public JoinCond[] getJoinCond() {
     return joinCond;
   }
 
-  public void setJoinCond(joinCond[] joinCond) {
+  public void setJoinCond(JoinCond[] joinCond) {
     this.joinCond = joinCond;
   }