You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/08/28 05:15:19 UTC

svn commit: r1621031 [3/10] - in /hive/branches/cbo: ./ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ data/files/ hbase-handler/src/test/results/negative/ hcatalog/core/sr...

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java Thu Aug 28 03:15:13 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.io.sar
 import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.common.type.HiveChar;
@@ -50,6 +51,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 import java.math.BigDecimal;
+import java.sql.Timestamp;
 import java.util.ArrayDeque;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -107,6 +109,12 @@ final class SearchArgumentImpl implement
 
     @Override
     public Object getLiteral() {
+      // To get around a kryo 2.22 bug while deserialize a Timestamp into Date
+      // (https://github.com/EsotericSoftware/kryo/issues/88)
+      // When we see a Date, convert back into Timestamp
+      if (literal instanceof java.util.Date) {
+        return new Timestamp(((java.util.Date)literal).getTime());
+      }
       return literal;
     }
 
@@ -317,6 +325,8 @@ final class SearchArgumentImpl implement
             return PredicateLeaf.Type.FLOAT;
           case DATE:
             return PredicateLeaf.Type.DATE;
+          case TIMESTAMP:
+            return PredicateLeaf.Type.TIMESTAMP;
           case DECIMAL:
             return PredicateLeaf.Type.DECIMAL;
           default:
@@ -354,6 +364,7 @@ final class SearchArgumentImpl implement
         case FLOAT:
           return ((Number) lit.getValue()).doubleValue();
         case DATE:
+        case TIMESTAMP:
         case DECIMAL:
           return lit;
         default:
@@ -948,6 +959,7 @@ final class SearchArgumentImpl implement
           literal instanceof Long ||
           literal instanceof Double ||
           literal instanceof DateWritable ||
+          literal instanceof Timestamp ||
           literal instanceof HiveDecimal ||
           literal instanceof BigDecimal) {
         return literal;
@@ -981,7 +993,9 @@ final class SearchArgumentImpl implement
         return PredicateLeaf.Type.FLOAT;
       } else if (literal instanceof DateWritable) {
         return PredicateLeaf.Type.DATE;
-      } else if (literal instanceof HiveDecimal ||
+      } else if (literal instanceof Timestamp) {
+        return PredicateLeaf.Type.TIMESTAMP;
+      }else if (literal instanceof HiveDecimal ||
           literal instanceof BigDecimal) {
         return PredicateLeaf.Type.DECIMAL;
       }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java Thu Aug 28 03:15:13 2014
@@ -233,6 +233,10 @@ public class Partition implements Serial
     return ret;
   }
 
+  public Path getPartitionPath() {
+    return getDataLocation();
+  }
+
   public Path getDataLocation() {
     if (table.isPartitioned()) {
       return new Path(tPartition.getSd().getLocation());

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Thu Aug 28 03:15:13 2014
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.QueryPr
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
@@ -61,7 +62,6 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -317,7 +317,7 @@ public abstract class BaseSemanticAnalyz
       return new String[] {dbName, tableName};
     }
     String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText());
-    return new String[]{SessionState.get().getCurrentDatabase(), tableName};
+    return Utilities.getDbTableName(tableName);
   }
 
   public static String getDotName(String[] qname) throws SemanticException {
@@ -646,6 +646,20 @@ public abstract class BaseSemanticAnalyz
       this(db, conf, ast, true, false);
     }
 
+    public tableSpec(Hive db, HiveConf conf, String tableName, Map<String, String> partSpec)
+        throws HiveException {
+      this.tableName = tableName;
+      this.partSpec = partSpec;
+      this.tableHandle = db.getTable(tableName);
+      if (partSpec != null) {
+        this.specType = SpecType.STATIC_PARTITION;
+        this.partHandle = db.getPartition(tableHandle, partSpec, false);
+        this.partitions = Arrays.asList(partHandle);
+      } else {
+        this.specType = SpecType.TABLE_ONLY;
+      }
+    }
+
     public tableSpec(Hive db, HiveConf conf, ASTNode ast, boolean allowDynamicPartitionsSpec,
         boolean allowPartialPartitionsSpec) throws SemanticException {
       assert (ast.getToken().getType() == HiveParser.TOK_TAB
@@ -1188,21 +1202,26 @@ public abstract class BaseSemanticAnalyz
   }
 
   protected Database getDatabase(String dbName, boolean throwException) throws SemanticException {
+    Database database;
     try {
-      Database database = db.getDatabase(dbName);
-      if (database == null && throwException) {
-        throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName));
-      }
-      return database;
-    } catch (HiveException e) {
+      database = db.getDatabase(dbName);
+    } catch (Exception e) {
       throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName), e);
     }
+    if (database == null && throwException) {
+      throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName));
+    }
+    return database;
   }
 
   protected Table getTable(String[] qualified) throws SemanticException {
     return getTable(qualified[0], qualified[1], true);
   }
 
+  protected Table getTable(String[] qualified, boolean throwException) throws SemanticException {
+    return getTable(qualified[0], qualified[1], throwException);
+  }
+
   protected Table getTable(String tblName) throws SemanticException {
     return getTable(null, tblName, true);
   }
@@ -1213,43 +1232,46 @@ public abstract class BaseSemanticAnalyz
 
   protected Table getTable(String database, String tblName, boolean throwException)
       throws SemanticException {
+    Table tab;
     try {
-      Table tab = database == null ? db.getTable(tblName, false)
+      tab = database == null ? db.getTable(tblName, false)
           : db.getTable(database, tblName, false);
-      if (tab == null && throwException) {
-        throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
-      }
-      return tab;
-    } catch (HiveException e) {
+    } catch (Exception e) {
       throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName), e);
     }
+    if (tab == null && throwException) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+    }
+    return tab;
   }
 
   protected Partition getPartition(Table table, Map<String, String> partSpec,
       boolean throwException) throws SemanticException {
+    Partition partition;
     try {
-      Partition partition = db.getPartition(table, partSpec, false);
-      if (partition == null && throwException) {
-        throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
-      }
-      return partition;
-    } catch (HiveException e) {
+      partition = db.getPartition(table, partSpec, false);
+    } catch (Exception e) {
       throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
     }
+    if (partition == null && throwException) {
+      throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
+    }
+    return partition;
   }
 
   protected List<Partition> getPartitions(Table table, Map<String, String> partSpec,
       boolean throwException) throws SemanticException {
+    List<Partition> partitions;
     try {
-      List<Partition> partitions = partSpec == null ? db.getPartitions(table) :
+      partitions = partSpec == null ? db.getPartitions(table) :
           db.getPartitions(table, partSpec);
-      if (partitions.isEmpty() && throwException) {
-        throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
-      }
-      return partitions;
-    } catch (HiveException e) {
+    } catch (Exception e) {
       throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
     }
+    if (partitions.isEmpty() && throwException) {
+      throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
+    }
+    return partitions;
   }
 
   protected String toMessage(ErrorMsg message, Object detail) {

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Thu Aug 28 03:15:13 2014
@@ -249,39 +249,67 @@ public class DDLSemanticAnalyzer extends
   }
 
   @Override
-  public void analyzeInternal(ASTNode ast) throws SemanticException {
+  public void analyzeInternal(ASTNode input) throws SemanticException {
 
-    switch (ast.getToken().getType()) {
-    case HiveParser.TOK_ALTERTABLE_PARTITION: {
-      ASTNode tablePart = (ASTNode) ast.getChild(0);
-      TablePartition tblPart = new TablePartition(tablePart);
-      String tableName = tblPart.tableName;
-      HashMap<String, String> partSpec = tblPart.partSpec;
-      ast = (ASTNode) ast.getChild(1);
-      if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
+    ASTNode ast = input;
+    switch (ast.getType()) {
+    case HiveParser.TOK_ALTERTABLE: {
+      ast = (ASTNode) input.getChild(1);
+      String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0));
+      String tableName = getDotName(qualified);
+      HashMap<String, String> partSpec = DDLSemanticAnalyzer.getPartSpec((ASTNode) input.getChild(2));
+      if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAME) {
+        analyzeAlterTableRename(qualified, ast, false);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_TOUCH) {
+        analyzeAlterTableTouch(qualified, ast);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) {
+        analyzeAlterTableArchive(qualified, ast, false);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) {
+        analyzeAlterTableArchive(qualified, ast, true);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) {
+        analyzeAlterTableModifyCols(qualified, ast, AlterTableTypes.ADDCOLS);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) {
+        analyzeAlterTableModifyCols(qualified, ast, AlterTableTypes.REPLACECOLS);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) {
+        analyzeAlterTableRenameCol(qualified, ast);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) {
+        analyzeAlterTableAddParts(qualified, ast, false);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) {
+        analyzeAlterTableDropParts(qualified, ast, false);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PARTCOLTYPE) {
+        analyzeAlterTablePartColType(qualified, ast);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) {
+        analyzeAlterTableProps(qualified, ast, false, false);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPROPERTIES) {
+        analyzeAlterTableProps(qualified, ast, false, true);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_SKEWED) {
+        analyzeAltertableSkewedby(qualified, ast);
+      } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION) {
+        analyzeExchangePartition(qualified, ast);
+      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
         analyzeAlterTableFileFormat(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PROTECTMODE) {
         analyzeAlterTableProtectMode(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION) {
         analyzeAlterTableLocation(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_MERGEFILES) {
-        analyzeAlterTablePartMergeFiles(tablePart, ast, tableName, partSpec);
+        analyzeAlterTablePartMergeFiles(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) {
         analyzeAlterTableSerde(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) {
         analyzeAlterTableSerdeProps(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) {
         analyzeAlterTableRenamePart(ast, tableName, partSpec);
-      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTBLPART_SKEWED_LOCATION) {
+      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION) {
         analyzeAlterTableSkewedLocation(ast, tableName, partSpec);
-      } else if (ast.getToken().getType() == HiveParser.TOK_TABLEBUCKETS) {
+      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_BUCKETS) {
         analyzeAlterTableBucketNum(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) {
         analyzeAlterTableClusterSort(ast, tableName, partSpec);
-      } else if (ast.getToken().getType() == HiveParser.TOK_COMPACT) {
+      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_COMPACT) {
         analyzeAlterTableCompact(ast, tableName, partSpec);
       } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS){
-        analyzeAlterTableUpdateStats(ast,tblPart);
+        analyzeAlterTableUpdateStats(ast, tableName, partSpec);
       }
       break;
     }
@@ -360,66 +388,22 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_DROPVIEW:
       analyzeDropTable(ast, true);
       break;
-    case HiveParser.TOK_ALTERVIEW_PROPERTIES:
-      analyzeAlterTableProps(ast, true, false);
-      break;
-    case HiveParser.TOK_DROPVIEW_PROPERTIES:
-      analyzeAlterTableProps(ast, true, true);
-      break;
-    case HiveParser.TOK_ALTERVIEW_ADDPARTS:
-      // for ALTER VIEW ADD PARTITION, we wrapped the ADD to discriminate
-      // view from table; unwrap it now
-      analyzeAlterTableAddParts((ASTNode) ast.getChild(0), true);
-      break;
-    case HiveParser.TOK_ALTERVIEW_DROPPARTS:
-      // for ALTER VIEW DROP PARTITION, we wrapped the DROP to discriminate
-      // view from table; unwrap it now
-      analyzeAlterTableDropParts((ASTNode) ast.getChild(0), true);
-      break;
-    case HiveParser.TOK_ALTERVIEW_RENAME:
-      // for ALTER VIEW RENAME, we wrapped the RENAME to discriminate
-      // view from table; unwrap it now
-      analyzeAlterTableRename(((ASTNode) ast.getChild(0)), true);
-      break;
-    case HiveParser.TOK_ALTERTABLE_RENAME:
-      analyzeAlterTableRename(ast, false);
-      break;
-    case HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS:
-      analyzeAlterTableUpdateStats(ast, null);
-      break;
-    case HiveParser.TOK_ALTERTABLE_TOUCH:
-      analyzeAlterTableTouch(ast);
-      break;
-    case HiveParser.TOK_ALTERTABLE_ARCHIVE:
-      analyzeAlterTableArchive(ast, false);
-      break;
-    case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
-      analyzeAlterTableArchive(ast, true);
-      break;
-    case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-      analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS);
-      break;
-    case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
-      analyzeAlterTableModifyCols(ast, AlterTableTypes.REPLACECOLS);
-      break;
-    case HiveParser.TOK_ALTERTABLE_RENAMECOL:
-      analyzeAlterTableRenameCol(ast);
-      break;
-    case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      analyzeAlterTableAddParts(ast, false);
-      break;
-    case HiveParser.TOK_ALTERTABLE_DROPPARTS:
-      analyzeAlterTableDropParts(ast, false);
-      break;
-    case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
-      analyzeAlterTablePartColType(ast);
-      break;
-    case HiveParser.TOK_ALTERTABLE_PROPERTIES:
-      analyzeAlterTableProps(ast, false, false);
-      break;
-    case HiveParser.TOK_DROPTABLE_PROPERTIES:
-      analyzeAlterTableProps(ast, false, true);
+    case HiveParser.TOK_ALTERVIEW: {
+      String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
+      ast = (ASTNode) ast.getChild(1);
+      if (ast.getType() == HiveParser.TOK_ALTERVIEW_PROPERTIES) {
+        analyzeAlterTableProps(qualified, ast, true, false);
+      } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPROPERTIES) {
+        analyzeAlterTableProps(qualified, ast, true, true);
+      } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_ADDPARTS) {
+        analyzeAlterTableAddParts(qualified, ast, true);
+      } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_DROPPARTS) {
+        analyzeAlterTableDropParts(qualified, ast, true);
+      } else if (ast.getType() == HiveParser.TOK_ALTERVIEW_RENAME) {
+        analyzeAlterTableRename(qualified, ast, true);
+      }
       break;
+    }
     case HiveParser.TOK_ALTERINDEX_REBUILD:
       analyzeAlterIndexRebuild(ast);
       break;
@@ -499,12 +483,6 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_REVOKE:
       analyzeRevoke(ast);
       break;
-    case HiveParser.TOK_ALTERTABLE_SKEWED:
-      analyzeAltertableSkewedby(ast);
-      break;
-   case HiveParser.TOK_EXCHANGEPARTITION:
-      analyzeExchangePartition(ast);
-      break;
    case HiveParser.TOK_SHOW_SET_ROLE:
      analyzeSetShowRole(ast);
      break;
@@ -516,20 +494,14 @@ public class DDLSemanticAnalyzer extends
     }
   }
 
-  private void analyzeAlterTableUpdateStats(ASTNode ast, TablePartition tblPart)
+  private void analyzeAlterTableUpdateStats(ASTNode ast, String tblName, Map<String, String> partSpec)
       throws SemanticException {
-    String tblName = null;
-    String colName = null;
-    Map<String, String> mapProp = null;
-    Map<String, String> partSpec = null;
+    String colName = getUnescapedName((ASTNode) ast.getChild(0));
+    Map<String, String> mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0));
+
+    Table tbl = getTable(tblName);
     String partName = null;
-    if (tblPart == null) {
-      tblName = getUnescapedName((ASTNode) ast.getChild(0));
-      colName = getUnescapedName((ASTNode) ast.getChild(1));
-      mapProp = getProps((ASTNode) (ast.getChild(2)).getChild(0));
-    } else {
-      tblName = tblPart.tableName;
-      partSpec = tblPart.partSpec;
+    if (partSpec != null) {
       try {
         partName = Warehouse.makePartName(partSpec, false);
       } catch (MetaException e) {
@@ -537,15 +509,6 @@ public class DDLSemanticAnalyzer extends
         throw new SemanticException("partition " + partSpec.toString()
             + " not found");
       }
-      colName = getUnescapedName((ASTNode) ast.getChild(0));
-      mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0));
-    }
-
-    Table tbl = null;
-    try {
-      tbl = db.getTable(tblName);
-    } catch (HiveException e) {
-      throw new SemanticException("table " + tbl + " not found");
     }
 
     String colType = null;
@@ -711,12 +674,12 @@ public class DDLSemanticAnalyzer extends
     addAlterDbDesc(alterDesc);
   }
 
-  private void analyzeExchangePartition(ASTNode ast) throws SemanticException {
-    Table destTable =  getTable(getUnescapedName((ASTNode)ast.getChild(0)));
-    Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(2)));
+  private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException {
+    Table destTable = getTable(qualified);
+    Table sourceTable = getTable(getUnescapedName((ASTNode)ast.getChild(1)));
 
     // Get the partition specs
-    Map<String, String> partSpecs = getPartSpec((ASTNode) ast.getChild(1));
+    Map<String, String> partSpecs = getPartSpec((ASTNode) ast.getChild(0));
     validatePartitionValues(partSpecs);
     boolean sameColumns = MetaStoreUtils.compareFieldColumns(
         destTable.getAllCols(), sourceTable.getAllCols());
@@ -1237,8 +1200,7 @@ public class DDLSemanticAnalyzer extends
       if (indexTableName != null) {
         indexTbl = getTable(Utilities.getDbTableName(index.getDbName(), indexTableName));
       }
-      String baseTblName = index.getOrigTableName();
-      Table baseTbl = getTable(baseTblName);
+      Table baseTbl = getTable(new String[] {index.getDbName(), index.getOrigTableName()});
 
       String handlerCls = index.getIndexHandlerClass();
       HiveIndexHandler handler = HiveUtils.getIndexHandler(conf, handlerCls);
@@ -1331,16 +1293,16 @@ public class DDLSemanticAnalyzer extends
     }
   }
 
-  private void analyzeAlterTableProps(ASTNode ast, boolean expectView, boolean isUnset)
+  private void analyzeAlterTableProps(String[] qualified, ASTNode ast, boolean expectView, boolean isUnset)
       throws SemanticException {
 
-    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
-    HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
+    String tableName = getDotName(qualified);
+    HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0))
         .getChild(0));
     AlterTableDesc alterTblDesc = null;
     if (isUnset == true) {
       alterTblDesc = new AlterTableDesc(AlterTableTypes.DROPPROPS, expectView);
-      if (ast.getChild(2) != null) {
+      if (ast.getChild(1) != null) {
         alterTblDesc.setDropIfExists(true);
       }
     } else {
@@ -1527,7 +1489,7 @@ public class DDLSemanticAnalyzer extends
         alterTblDesc), conf));
   }
 
-  private void analyzeAlterTablePartMergeFiles(ASTNode tablePartAST, ASTNode ast,
+  private void analyzeAlterTablePartMergeFiles(ASTNode ast,
       String tableName, HashMap<String, String> partSpec)
       throws SemanticException {
     AlterTablePartMergeFilesDesc mergeDesc = new AlterTablePartMergeFilesDesc(
@@ -1639,7 +1601,7 @@ public class DDLSemanticAnalyzer extends
         StatsWork statDesc;
         if (oldTblPartLoc.equals(newTblPartLoc)) {
           // If we're merging to the same location, we can avoid some metastore calls
-          tableSpec tablepart = new tableSpec(this.db, conf, tablePartAST);
+          tableSpec tablepart = new tableSpec(db, conf, tableName, partSpec);
           statDesc = new StatsWork(tablepart);
         } else {
           statDesc = new StatsWork(ltd);
@@ -1672,7 +1634,7 @@ public class DDLSemanticAnalyzer extends
       alterTblDesc = new AlterTableDesc(tableName, true, partSpec);
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc), conf));
       break;
-    case HiveParser.TOK_TABLEBUCKETS:
+    case HiveParser.TOK_ALTERTABLE_BUCKETS:
       ASTNode buckets = (ASTNode) ast.getChild(0);
       List<String> bucketCols = getColumnNames((ASTNode) buckets.getChild(0));
       List<Order> sortCols = new ArrayList<Order>();
@@ -2502,9 +2464,9 @@ public class DDLSemanticAnalyzer extends
   }
 
 
-  private void analyzeAlterTableRename(ASTNode ast, boolean expectView) throws SemanticException {
-    String[] source = getQualifiedTableName((ASTNode) ast.getChild(0));
-    String[] target = getQualifiedTableName((ASTNode) ast.getChild(1));
+  private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expectView)
+      throws SemanticException {
+    String[] target = getQualifiedTableName((ASTNode) ast.getChild(0));
 
     String sourceName = getDotName(source);
     String targetName = getDotName(target);
@@ -2515,22 +2477,21 @@ public class DDLSemanticAnalyzer extends
         alterTblDesc), conf));
   }
 
-  private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException {
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
+  private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast) throws SemanticException {
     String newComment = null;
     String newType = null;
-    newType = getTypeStringFromAST((ASTNode) ast.getChild(3));
+    newType = getTypeStringFromAST((ASTNode) ast.getChild(2));
     boolean first = false;
     String flagCol = null;
     ASTNode positionNode = null;
-    if (ast.getChildCount() == 6) {
-      newComment = unescapeSQLString(ast.getChild(4).getText());
-      positionNode = (ASTNode) ast.getChild(5);
-    } else if (ast.getChildCount() == 5) {
-      if (ast.getChild(4).getType() == HiveParser.StringLiteral) {
-        newComment = unescapeSQLString(ast.getChild(4).getText());
+    if (ast.getChildCount() == 5) {
+      newComment = unescapeSQLString(ast.getChild(3).getText());
+      positionNode = (ASTNode) ast.getChild(4);
+    } else if (ast.getChildCount() == 4) {
+      if (ast.getChild(3).getType() == HiveParser.StringLiteral) {
+        newComment = unescapeSQLString(ast.getChild(3).getText());
       } else {
-        positionNode = (ASTNode) ast.getChild(4);
+        positionNode = (ASTNode) ast.getChild(3);
       }
     }
 
@@ -2542,8 +2503,8 @@ public class DDLSemanticAnalyzer extends
       }
     }
 
-    String oldColName = ast.getChild(1).getText();
-    String newColName = ast.getChild(2).getText();
+    String oldColName = ast.getChild(0).getText();
+    String newColName = ast.getChild(1).getText();
 
     /* Validate the operation of renaming a column name. */
     Table tab = getTable(qualified);
@@ -2603,12 +2564,11 @@ public class DDLSemanticAnalyzer extends
         alterBucketNum), conf));
   }
 
-  private void analyzeAlterTableModifyCols(ASTNode ast,
+  private void analyzeAlterTableModifyCols(String[] qualified, ASTNode ast,
       AlterTableTypes alterType) throws SemanticException {
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
 
     String tblName = getDotName(qualified);
-    List<FieldSchema> newCols = getColumns((ASTNode) ast.getChild(1));
+    List<FieldSchema> newCols = getColumns((ASTNode) ast.getChild(0));
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols,
         alterType);
 
@@ -2617,7 +2577,7 @@ public class DDLSemanticAnalyzer extends
         alterTblDesc), conf));
   }
 
-  private void analyzeAlterTableDropParts(ASTNode ast, boolean expectView)
+  private void analyzeAlterTableDropParts(String[] qualified, ASTNode ast, boolean expectView)
       throws SemanticException {
 
     boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null)
@@ -2630,7 +2590,6 @@ public class DDLSemanticAnalyzer extends
     // popular case but that's kinda hacky. Let's not do it for now.
     boolean canGroupExprs = ifExists;
 
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
     Table tab = getTable(qualified);
     Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs =
         getFullPartitionSpecs(ast, tab, canGroupExprs);
@@ -2649,10 +2608,8 @@ public class DDLSemanticAnalyzer extends
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf));
   }
 
-  private void analyzeAlterTablePartColType(ASTNode ast)
+  private void analyzeAlterTablePartColType(String[] qualified, ASTNode ast)
       throws SemanticException {
-    // get table name
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
 
 
     // check if table exists.
@@ -2664,7 +2621,7 @@ public class DDLSemanticAnalyzer extends
 
     // Alter table ... partition column ( column newtype) only takes one column at a time.
     // It must have a column name followed with type.
-    ASTNode colAst = (ASTNode) ast.getChild(1);
+    ASTNode colAst = (ASTNode) ast.getChild(0);
     assert(colAst.getChildCount() == 2);
 
     FieldSchema newCol = new FieldSchema();
@@ -2710,12 +2667,11 @@ public class DDLSemanticAnalyzer extends
    * @throws SemanticException
    *           Parsing failed
    */
-  private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView)
+  private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boolean expectView)
       throws SemanticException {
 
     // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+)
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
-    boolean ifNotExists = ast.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS;
+    boolean ifNotExists = ast.getChild(0).getType() == HiveParser.TOK_IFNOTEXISTS;
 
     Table tab = getTable(qualified);
     boolean isView = tab.isView();
@@ -2723,7 +2679,7 @@ public class DDLSemanticAnalyzer extends
     outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED));
 
     int numCh = ast.getChildCount();
-    int start = ifNotExists ? 2 : 1;
+    int start = ifNotExists ? 1 : 0;
 
     String currentLocation = null;
     Map<String, String> currentPart = null;
@@ -2840,9 +2796,8 @@ public class DDLSemanticAnalyzer extends
    * @throws SemanticException
    *           Parsin failed
    */
-  private void analyzeAlterTableTouch(CommonTree ast)
+  private void analyzeAlterTableTouch(String[] qualified, CommonTree ast)
       throws SemanticException {
-    String[] qualified = getQualifiedTableName((ASTNode)ast.getChild(0));
 
     Table tab = getTable(qualified);
     validateAlterTableType(tab, AlterTableTypes.TOUCH);
@@ -2870,14 +2825,13 @@ public class DDLSemanticAnalyzer extends
     }
   }
 
-  private void analyzeAlterTableArchive(CommonTree ast, boolean isUnArchive)
+  private void analyzeAlterTableArchive(String[] qualified, CommonTree ast, boolean isUnArchive)
       throws SemanticException {
 
     if (!conf.getBoolVar(HiveConf.ConfVars.HIVEARCHIVEENABLED)) {
       throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg());
 
     }
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 
@@ -2948,7 +2902,7 @@ public class DDLSemanticAnalyzer extends
     List<Map<String, String>> partSpecs = new ArrayList<Map<String, String>>();
     int childIndex = 0;
     // get partition metadata if partition specified
-    for (childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
+    for (childIndex = 0; childIndex < ast.getChildCount(); childIndex++) {
       Tree partspec = ast.getChild(childIndex);
       // sanity check
       if (partspec.getType() == HiveParser.TOK_PARTSPEC) {
@@ -2976,7 +2930,7 @@ public class DDLSemanticAnalyzer extends
 
     Map<Integer, List<ExprNodeGenericFuncDesc>> result =
         new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
-    for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
+    for (int childIndex = 0; childIndex < ast.getChildCount(); childIndex++) {
       Tree partSpecTree = ast.getChild(childIndex);
       if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) continue;
       ExprNodeGenericFuncDesc expr = null;
@@ -3184,14 +3138,13 @@ public class DDLSemanticAnalyzer extends
    *          node
    * @throws SemanticException
    */
-  private void analyzeAltertableSkewedby(ASTNode ast) throws SemanticException {
+  private void analyzeAltertableSkewedby(String[] qualified, ASTNode ast) throws SemanticException {
     /**
      * Throw an error if the user tries to use the DDL with
      * hive.internal.ddl.list.bucketing.enable set to false.
      */
     HiveConf hiveConf = SessionState.get().getConf();
 
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0));
     Table tab = getTable(qualified);
 
     inputs.add(new ReadEntity(tab));
@@ -3200,7 +3153,7 @@ public class DDLSemanticAnalyzer extends
     validateAlterTableType(tab, AlterTableTypes.ADDSKEWEDBY);
 
     String tableName = getDotName(qualified);
-    if (ast.getChildCount() == 1) {
+    if (ast.getChildCount() == 0) {
       /* Convert a skewed table to non-skewed table. */
       AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, true,
           new ArrayList<String>(), new ArrayList<List<String>>());
@@ -3208,7 +3161,7 @@ public class DDLSemanticAnalyzer extends
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
           alterTblDesc), conf));
     } else {
-      switch (((ASTNode) ast.getChild(1)).getToken().getType()) {
+      switch (((ASTNode) ast.getChild(0)).getToken().getType()) {
       case HiveParser.TOK_TABLESKEWED:
         handleAlterTableSkewedBy(ast, tableName, tab);
         break;
@@ -3255,7 +3208,7 @@ public class DDLSemanticAnalyzer extends
     List<String> skewedColNames = new ArrayList<String>();
     List<List<String>> skewedValues = new ArrayList<List<String>>();
     /* skewed column names. */
-    ASTNode skewedNode = (ASTNode) ast.getChild(1);
+    ASTNode skewedNode = (ASTNode) ast.getChild(0);
     skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, skewedNode);
     /* skewed value. */
     analyzeDDLSkewedValues(skewedValues, skewedNode);

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Thu Aug 28 03:15:13 2014
@@ -126,11 +126,10 @@ TOK_CREATEINDEX;
 TOK_CREATEINDEX_INDEXTBLNAME;
 TOK_DEFERRED_REBUILDINDEX;
 TOK_DROPINDEX;
-TOK_DROPTABLE_PROPERTIES;
 TOK_LIKETABLE;
 TOK_DESCTABLE;
 TOK_DESCFUNCTION;
-TOK_ALTERTABLE_PARTITION;
+TOK_ALTERTABLE;
 TOK_ALTERTABLE_RENAME;
 TOK_ALTERTABLE_ADDCOLS;
 TOK_ALTERTABLE_RENAMECOL;
@@ -152,6 +151,13 @@ TOK_ALTERTABLE_FILEFORMAT;
 TOK_ALTERTABLE_LOCATION;
 TOK_ALTERTABLE_PROPERTIES;
 TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION;
+TOK_ALTERTABLE_DROPPROPERTIES;
+TOK_ALTERTABLE_SKEWED;
+TOK_ALTERTABLE_EXCHANGEPARTITION;
+TOK_ALTERTABLE_SKEWED_LOCATION;
+TOK_ALTERTABLE_BUCKETS;
+TOK_ALTERTABLE_CLUSTER_SORT;
+TOK_ALTERTABLE_COMPACT;
 TOK_ALTERINDEX_REBUILD;
 TOK_ALTERINDEX_PROPERTIES;
 TOK_MSCK;
@@ -177,7 +183,6 @@ TOK_TABCOLLIST;
 TOK_TABCOL;
 TOK_TABLECOMMENT;
 TOK_TABLEPARTCOLS;
-TOK_TABLEBUCKETS;
 TOK_TABLEROWFORMAT;
 TOK_TABLEROWFORMATFIELD;
 TOK_TABLEROWFORMATCOLLITEMS;
@@ -192,7 +197,6 @@ TOK_DISABLE;
 TOK_READONLY;
 TOK_NO_DROP;
 TOK_STORAGEHANDLER;
-TOK_ALTERTABLE_CLUSTER_SORT;
 TOK_NOT_CLUSTERED;
 TOK_NOT_SORTED;
 TOK_TABCOLNAME;
@@ -215,9 +219,9 @@ TOK_DROPMACRO;
 TOK_TEMPORARY;
 TOK_CREATEVIEW;
 TOK_DROPVIEW;
-TOK_ALTERVIEW_AS;
+TOK_ALTERVIEW;
 TOK_ALTERVIEW_PROPERTIES;
-TOK_DROPVIEW_PROPERTIES;
+TOK_ALTERVIEW_DROPPROPERTIES;
 TOK_ALTERVIEW_ADDPARTS;
 TOK_ALTERVIEW_DROPPARTS;
 TOK_ALTERVIEW_RENAME;
@@ -302,8 +306,6 @@ TOK_TABLESKEWED;
 TOK_TABCOLVALUE;
 TOK_TABCOLVALUE_PAIR;
 TOK_TABCOLVALUES;
-TOK_ALTERTABLE_SKEWED;
-TOK_ALTERTBLPART_SKEWED_LOCATION;
 TOK_SKEWED_LOCATIONS;
 TOK_SKEWED_LOCATION_LIST;
 TOK_SKEWED_LOCATION_MAP;
@@ -315,7 +317,6 @@ TOK_WINDOWSPEC;
 TOK_WINDOWVALUES;
 TOK_WINDOWRANGE;
 TOK_IGNOREPROTECTION;
-TOK_EXCHANGEPARTITION;
 TOK_SUBQUERY_EXPR;
 TOK_SUBQUERY_OP;
 TOK_SUBQUERY_OP_NOTIN;
@@ -328,7 +329,6 @@ TOK_FILE;
 TOK_JAR;
 TOK_RESOURCE_URI;
 TOK_RESOURCE_LIST;
-TOK_COMPACT;
 TOK_SHOW_COMPACTIONS;
 TOK_SHOW_TRANSACTIONS;
 TOK_DELETE_FROM;
@@ -935,56 +935,62 @@ dropTableStatement
 alterStatement
 @init { pushMsg("alter statement", state); }
 @after { popMsg(state); }
-    : KW_ALTER!
-        (
-            KW_TABLE! alterTableStatementSuffix
-        |
-            KW_VIEW! alterViewStatementSuffix
-        |
-            KW_INDEX! alterIndexStatementSuffix
-        |
-            (KW_DATABASE|KW_SCHEMA)! alterDatabaseStatementSuffix
-        )
+    : KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix)
+    | KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix)
+    | KW_ALTER KW_INDEX alterIndexStatementSuffix -> alterIndexStatementSuffix
+    | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix
     ;
 
 alterTableStatementSuffix
 @init { pushMsg("alter table statement", state); }
 @after { popMsg(state); }
-    : alterStatementSuffixRename
+    : alterStatementSuffixRename[true]
     | alterStatementSuffixAddCol
     | alterStatementSuffixRenameCol
     | alterStatementSuffixUpdateStatsCol
-    | alterStatementSuffixDropPartitions
-    | alterStatementSuffixAddPartitions
+    | alterStatementSuffixDropPartitions[true]
+    | alterStatementSuffixAddPartitions[true]
     | alterStatementSuffixTouch
     | alterStatementSuffixArchive
     | alterStatementSuffixUnArchive
     | alterStatementSuffixProperties
-    | alterTblPartitionStatement
     | alterStatementSuffixSkewedby
     | alterStatementSuffixExchangePartition
     | alterStatementPartitionKeyType
+    | partitionSpec? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix partitionSpec?
     ;
 
+alterTblPartitionStatementSuffix
+@init {pushMsg("alter table partition statement suffix", state);}
+@after {popMsg(state);}
+  : alterStatementSuffixFileFormat
+  | alterStatementSuffixLocation
+  | alterStatementSuffixProtectMode
+  | alterStatementSuffixMergeFiles
+  | alterStatementSuffixSerdeProperties
+  | alterStatementSuffixRenamePart
+  | alterStatementSuffixBucketNum
+  | alterTblPartitionStatementSuffixSkewedLocation
+  | alterStatementSuffixClusterbySortby
+  | alterStatementSuffixCompact
+  | alterStatementSuffixUpdateStatsCol
+  ;
+
 alterStatementPartitionKeyType
 @init {msgs.push("alter partition key type"); }
 @after {msgs.pop();}
-	: tableName KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
-	-> ^(TOK_ALTERTABLE_PARTCOLTYPE tableName columnNameType)
+	: KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
+	-> ^(TOK_ALTERTABLE_PARTCOLTYPE columnNameType)
 	;
 
 alterViewStatementSuffix
 @init { pushMsg("alter view statement", state); }
 @after { popMsg(state); }
     : alterViewSuffixProperties
-    | alterStatementSuffixRename
-        -> ^(TOK_ALTERVIEW_RENAME alterStatementSuffixRename)
-    | alterStatementSuffixAddPartitions
-        -> ^(TOK_ALTERVIEW_ADDPARTS alterStatementSuffixAddPartitions)
-    | alterStatementSuffixDropPartitions
-        -> ^(TOK_ALTERVIEW_DROPPARTS alterStatementSuffixDropPartitions)
-    | name=tableName KW_AS selectStatementWithCTE
-        -> ^(TOK_ALTERVIEW_AS $name selectStatementWithCTE)
+    | alterStatementSuffixRename[false]
+    | alterStatementSuffixAddPartitions[false]
+    | alterStatementSuffixDropPartitions[false]
+    | selectStatementWithCTE
     ;
 
 alterIndexStatementSuffix
@@ -1022,33 +1028,34 @@ alterDatabaseSuffixSetOwner
     -> ^(TOK_ALTERDATABASE_OWNER $dbName principalName)
     ;
 
-alterStatementSuffixRename
+alterStatementSuffixRename[boolean table]
 @init { pushMsg("rename statement", state); }
 @after { popMsg(state); }
-    : oldName=tableName KW_RENAME KW_TO newName=tableName
-    -> ^(TOK_ALTERTABLE_RENAME $oldName $newName)
+    : KW_RENAME KW_TO tableName
+    -> { table }? ^(TOK_ALTERTABLE_RENAME tableName)
+    ->            ^(TOK_ALTERVIEW_RENAME tableName)
     ;
 
 alterStatementSuffixAddCol
 @init { pushMsg("add column statement", state); }
 @after { popMsg(state); }
-    : tableName (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN
-    -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS tableName columnNameTypeList)
-    ->                 ^(TOK_ALTERTABLE_REPLACECOLS tableName columnNameTypeList)
+    : (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN
+    -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS columnNameTypeList)
+    ->                 ^(TOK_ALTERTABLE_REPLACECOLS columnNameTypeList)
     ;
 
 alterStatementSuffixRenameCol
 @init { pushMsg("rename column name", state); }
 @after { popMsg(state); }
-    : tableName KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition?
-    ->^(TOK_ALTERTABLE_RENAMECOL tableName $oldName $newName colType $comment? alterStatementChangeColPosition?)
+    : KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition?
+    ->^(TOK_ALTERTABLE_RENAMECOL $oldName $newName colType $comment? alterStatementChangeColPosition?)
     ;
 
 alterStatementSuffixUpdateStatsCol
 @init { pushMsg("update column statistics", state); }
 @after { popMsg(state); }
-    : identifier KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)?
-    ->^(TOK_ALTERTABLE_UPDATECOLSTATS identifier $colName tableProperties $comment?)
+    : KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)?
+    ->^(TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties $comment?)
     ;
 
 alterStatementChangeColPosition
@@ -1057,11 +1064,12 @@ alterStatementChangeColPosition
     -> ^(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol)
     ;
 
-alterStatementSuffixAddPartitions
+alterStatementSuffixAddPartitions[boolean table]
 @init { pushMsg("add partition statement", state); }
 @after { popMsg(state); }
-    : tableName KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+
-    -> ^(TOK_ALTERTABLE_ADDPARTS tableName ifNotExists? alterStatementSuffixAddPartitionsElement+)
+    : KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+
+    -> { table }? ^(TOK_ALTERTABLE_ADDPARTS ifNotExists? alterStatementSuffixAddPartitionsElement+)
+    ->            ^(TOK_ALTERVIEW_ADDPARTS ifNotExists? alterStatementSuffixAddPartitionsElement+)
     ;
 
 alterStatementSuffixAddPartitionsElement
@@ -1071,22 +1079,22 @@ alterStatementSuffixAddPartitionsElement
 alterStatementSuffixTouch
 @init { pushMsg("touch statement", state); }
 @after { popMsg(state); }
-    : tableName KW_TOUCH (partitionSpec)*
-    -> ^(TOK_ALTERTABLE_TOUCH tableName (partitionSpec)*)
+    : KW_TOUCH (partitionSpec)*
+    -> ^(TOK_ALTERTABLE_TOUCH (partitionSpec)*)
     ;
 
 alterStatementSuffixArchive
 @init { pushMsg("archive statement", state); }
 @after { popMsg(state); }
-    : tableName KW_ARCHIVE (partitionSpec)*
-    -> ^(TOK_ALTERTABLE_ARCHIVE tableName (partitionSpec)*)
+    : KW_ARCHIVE (partitionSpec)*
+    -> ^(TOK_ALTERTABLE_ARCHIVE (partitionSpec)*)
     ;
 
 alterStatementSuffixUnArchive
 @init { pushMsg("unarchive statement", state); }
 @after { popMsg(state); }
-    : tableName KW_UNARCHIVE (partitionSpec)*
-    -> ^(TOK_ALTERTABLE_UNARCHIVE tableName (partitionSpec)*)
+    : KW_UNARCHIVE (partitionSpec)*
+    -> ^(TOK_ALTERTABLE_UNARCHIVE (partitionSpec)*)
     ;
 
 partitionLocation
@@ -1096,29 +1104,30 @@ partitionLocation
       KW_LOCATION locn=StringLiteral -> ^(TOK_PARTITIONLOCATION $locn)
     ;
 
-alterStatementSuffixDropPartitions
+alterStatementSuffixDropPartitions[boolean table]
 @init { pushMsg("drop partition statement", state); }
 @after { popMsg(state); }
-    : tableName KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection?
-    -> ^(TOK_ALTERTABLE_DROPPARTS tableName dropPartitionSpec+ ifExists? ignoreProtection?)
+    : KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection?
+    -> { table }? ^(TOK_ALTERTABLE_DROPPARTS dropPartitionSpec+ ifExists? ignoreProtection?)
+    ->            ^(TOK_ALTERVIEW_DROPPARTS dropPartitionSpec+ ifExists? ignoreProtection?)
     ;
 
 alterStatementSuffixProperties
 @init { pushMsg("alter properties statement", state); }
 @after { popMsg(state); }
-    : tableName KW_SET KW_TBLPROPERTIES tableProperties
-    -> ^(TOK_ALTERTABLE_PROPERTIES tableName tableProperties)
-    | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
-    -> ^(TOK_DROPTABLE_PROPERTIES tableName tableProperties ifExists?)
+    : KW_SET KW_TBLPROPERTIES tableProperties
+    -> ^(TOK_ALTERTABLE_PROPERTIES tableProperties)
+    | KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
+    -> ^(TOK_ALTERTABLE_DROPPROPERTIES tableProperties ifExists?)
     ;
 
 alterViewSuffixProperties
 @init { pushMsg("alter view properties statement", state); }
 @after { popMsg(state); }
-    : tableName KW_SET KW_TBLPROPERTIES tableProperties
-    -> ^(TOK_ALTERVIEW_PROPERTIES tableName tableProperties)
-    | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
-    -> ^(TOK_DROPVIEW_PROPERTIES tableName tableProperties ifExists?)
+    : KW_SET KW_TBLPROPERTIES tableProperties
+    -> ^(TOK_ALTERVIEW_PROPERTIES tableProperties)
+    | KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
+    -> ^(TOK_ALTERVIEW_DROPPROPERTIES tableProperties ifExists?)
     ;
 
 alterStatementSuffixSerdeProperties
@@ -1137,29 +1146,6 @@ tablePartitionPrefix
   ->^(TOK_TABLE_PARTITION tableName partitionSpec?)
   ;
 
-alterTblPartitionStatement
-@init {pushMsg("alter table partition statement", state);}
-@after {popMsg(state);}
-  : tablePartitionPrefix alterTblPartitionStatementSuffix
-  -> ^(TOK_ALTERTABLE_PARTITION tablePartitionPrefix alterTblPartitionStatementSuffix)
-  ;
-
-alterTblPartitionStatementSuffix
-@init {pushMsg("alter table partition statement suffix", state);}
-@after {popMsg(state);}
-  : alterStatementSuffixFileFormat
-  | alterStatementSuffixLocation
-  | alterStatementSuffixProtectMode
-  | alterStatementSuffixMergeFiles
-  | alterStatementSuffixSerdeProperties
-  | alterStatementSuffixRenamePart
-  | alterStatementSuffixStatsPart
-  | alterStatementSuffixBucketNum
-  | alterTblPartitionStatementSuffixSkewedLocation
-  | alterStatementSuffixClusterbySortby
-  | alterStatementSuffixCompact
-  ;
-
 alterStatementSuffixFileFormat
 @init {pushMsg("alter fileformat statement", state); }
 @after {popMsg(state);}
@@ -1179,7 +1165,7 @@ alterTblPartitionStatementSuffixSkewedLo
 @init {pushMsg("alter partition skewed location", state);}
 @after {popMsg(state);}
   : KW_SET KW_SKEWED KW_LOCATION skewedLocations
-  -> ^(TOK_ALTERTBLPART_SKEWED_LOCATION skewedLocations)
+  -> ^(TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations)
   ;
   
 skewedLocations
@@ -1214,21 +1200,21 @@ alterStatementSuffixLocation
 alterStatementSuffixSkewedby
 @init {pushMsg("alter skewed by statement", state);}
 @after{popMsg(state);}
-	: tableName tableSkewed
-	->^(TOK_ALTERTABLE_SKEWED tableName tableSkewed)
+	: tableSkewed
+	->^(TOK_ALTERTABLE_SKEWED tableSkewed)
 	|
-	 tableName KW_NOT KW_SKEWED
-	->^(TOK_ALTERTABLE_SKEWED tableName)
+	 KW_NOT KW_SKEWED
+	->^(TOK_ALTERTABLE_SKEWED)
 	|
-	 tableName KW_NOT storedAsDirs
-	->^(TOK_ALTERTABLE_SKEWED tableName storedAsDirs)
+	 KW_NOT storedAsDirs
+	->^(TOK_ALTERTABLE_SKEWED storedAsDirs)
 	;
 
 alterStatementSuffixExchangePartition
 @init {pushMsg("alter exchange partition", state);}
 @after{popMsg(state);}
-    : tableName KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName
-    -> ^(TOK_EXCHANGEPARTITION tableName partitionSpec $exchangename)
+    : KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName
+    -> ^(TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename)
     ;
 
 alterStatementSuffixProtectMode
@@ -1278,14 +1264,14 @@ alterStatementSuffixBucketNum
 @init { pushMsg("", state); }
 @after { popMsg(state); }
     : KW_INTO num=Number KW_BUCKETS
-    -> ^(TOK_TABLEBUCKETS $num)
+    -> ^(TOK_ALTERTABLE_BUCKETS $num)
     ;
 
 alterStatementSuffixCompact
 @init { msgs.push("compaction request"); }
 @after { msgs.pop(); }
     : KW_COMPACT compactType=StringLiteral
-    -> ^(TOK_COMPACT $compactType)
+    -> ^(TOK_ALTERTABLE_COMPACT $compactType)
     ;
 
 
@@ -1707,7 +1693,7 @@ tableBuckets
 @after { popMsg(state); }
     :
       KW_CLUSTERED KW_BY LPAREN bucketCols=columnNameList RPAREN (KW_SORTED KW_BY LPAREN sortCols=columnNameOrderList RPAREN)? KW_INTO num=Number KW_BUCKETS
-    -> ^(TOK_TABLEBUCKETS $bucketCols $sortCols? $num)
+    -> ^(TOK_ALTERTABLE_BUCKETS $bucketCols $sortCols? $num)
     ;
 
 tableSkewed

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java Thu Aug 28 03:15:13 2014
@@ -88,6 +88,7 @@ public class IndexUpdater {
       sb.append("ALTER INDEX ");
       sb.append(idx.getIndexName());
       sb.append(" ON ");
+      sb.append(idx.getDbName()).append('.');
       sb.append(idx.getOrigTableName());
       sb.append(" REBUILD");
       driver.compile(sb.toString(), false);
@@ -125,6 +126,7 @@ public class IndexUpdater {
     sb.append("ALTER INDEX ");
     sb.append(index.getIndexName());
     sb.append(" ON ");
+    sb.append(index.getDbName()).append('.');
     sb.append(index.getOrigTableName());
     sb.append(" PARTITION ");
     sb.append(ps.toString());

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Aug 28 03:15:13 2014
@@ -9537,7 +9537,7 @@ public class SemanticAnalyzer extends Ba
 
     // analyze create view command
     if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW ||
-        ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
+        (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveParser.TOK_QUERY)) {
       child = analyzeCreateView(ast, qb);
       SessionState.get().setCommandType(HiveOperation.CREATEVIEW);
       if (child == null) {
@@ -9545,7 +9545,7 @@ public class SemanticAnalyzer extends Ba
       }
       viewSelect = child;
       // prevent view from referencing itself
-      viewsExpanded.add(SessionState.get().getCurrentDatabase() + "." + createVwDesc.getViewName());
+      viewsExpanded.add(createVwDesc.getViewName());
     }
 
     // continue analyzing from the child ASTNode.
@@ -10193,7 +10193,9 @@ public class SemanticAnalyzer extends Ba
    */
   private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
+    String[] qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
+    String dbDotTab = getDotName(qualifiedTabName);
+
     String likeTableName = null;
     List<FieldSchema> cols = new ArrayList<FieldSchema>();
     List<FieldSchema> partCols = new ArrayList<FieldSchema>();
@@ -10219,7 +10221,7 @@ public class SemanticAnalyzer extends Ba
     RowFormatParams rowFormatParams = new RowFormatParams();
     StorageFormat storageFormat = new StorageFormat(conf);
 
-    LOG.info("Creating table " + tableName + " position="
+    LOG.info("Creating table " + dbDotTab + " position="
         + ast.getCharPositionInLine());
     int numCh = ast.getChildCount();
 
@@ -10291,7 +10293,7 @@ public class SemanticAnalyzer extends Ba
       case HiveParser.TOK_TABLEPARTCOLS:
         partCols = getColumns((ASTNode) child.getChild(0), false);
         break;
-      case HiveParser.TOK_TABLEBUCKETS:
+      case HiveParser.TOK_ALTERTABLE_BUCKETS:
         bucketCols = getColumnNames((ASTNode) child.getChild(0));
         if (child.getChildCount() == 2) {
           numBuckets = (Integer.valueOf(child.getChild(1).getText()))
@@ -10350,7 +10352,7 @@ public class SemanticAnalyzer extends Ba
     // check for existence of table
     if (ifNotExists) {
       try {
-        Table table = getTable(tableName, false);
+        Table table = getTable(qualifiedTabName, false);
         if (table != null) { // table exists
           return null;
         }
@@ -10360,11 +10362,7 @@ public class SemanticAnalyzer extends Ba
       }
     }
 
-    String[] qualified = Hive.getQualifiedNames(tableName);
-    String dbName = qualified.length == 1 ? SessionState.get().getCurrentDatabase() : qualified[0];
-    Database database  = getDatabase(dbName);
-    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
-    outputs.add(new WriteEntity(new Table(dbName, tableName), WriteEntity.WriteType.DDL_NO_LOCK));
+    addDbAndTabToOutputs(qualifiedTabName);
 
     if (isTemporary) {
       if (partCols.size() > 0) {
@@ -10393,7 +10391,7 @@ public class SemanticAnalyzer extends Ba
     case CREATE_TABLE: // REGULAR CREATE TABLE DDL
       tblProps = addDefaultProperties(tblProps);
 
-      crtTblDesc = new CreateTableDesc(tableName, isExt, isTemporary, cols, partCols,
+      crtTblDesc = new CreateTableDesc(dbDotTab, isExt, isTemporary, cols, partCols,
           bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
           rowFormatParams.fieldEscape,
           rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
@@ -10422,7 +10420,7 @@ public class SemanticAnalyzer extends Ba
               + "and source table in CREATE TABLE LIKE is partitioned.");
         }
       }
-      CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt, isTemporary,
+      CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(dbDotTab, isExt, isTemporary,
           storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location,
           storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
           likeTableName);
@@ -10435,9 +10433,9 @@ public class SemanticAnalyzer extends Ba
 
       // Verify that the table does not already exist
       try {
-        Table dumpTable = db.newTable(tableName);
+        Table dumpTable = db.newTable(dbDotTab);
         if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false)) {
-          throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
+          throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTab));
         }
       } catch (HiveException e) {
         throw new SemanticException(e);
@@ -10445,11 +10443,10 @@ public class SemanticAnalyzer extends Ba
 
       tblProps = addDefaultProperties(tblProps);
 
-      crtTblDesc = new CreateTableDesc(dbName, tableName, isExt, isTemporary, cols, partCols,
-          bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
-          rowFormatParams.fieldEscape,
-          rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
-          comment, storageFormat.getInputFormat(),
+      crtTblDesc = new CreateTableDesc(qualifiedTabName[0], dbDotTab, isExt, isTemporary, cols,
+          partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
+          rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim,
+          rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(),
           storageFormat.getOutputFormat(), location, storageFormat.getSerde(),
           storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
           skewedColNames, skewedValues);
@@ -10466,9 +10463,17 @@ public class SemanticAnalyzer extends Ba
     return null;
   }
 
+  private void addDbAndTabToOutputs(String[] qualifiedTabName) throws SemanticException {
+    Database database  = getDatabase(qualifiedTabName[0]);
+    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
+    outputs.add(new WriteEntity(new Table(qualifiedTabName[0], qualifiedTabName[1]),
+        WriteEntity.WriteType.DDL_NO_LOCK));
+  }
+
   private ASTNode analyzeCreateView(ASTNode ast, QB qb)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
+    String[] qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
+    String dbDotTable = getDotName(qualTabName);
     List<FieldSchema> cols = null;
     boolean ifNotExists = false;
     boolean orReplace = false;
@@ -10478,7 +10483,7 @@ public class SemanticAnalyzer extends Ba
     Map<String, String> tblProps = null;
     List<String> partColNames = null;
 
-    LOG.info("Creating view " + tableName + " position="
+    LOG.info("Creating view " + dbDotTable + " position="
         + ast.getCharPositionInLine());
     int numCh = ast.getChildCount();
     for (int num = 1; num < numCh; num++) {
@@ -10514,19 +10519,21 @@ public class SemanticAnalyzer extends Ba
       throw new SemanticException("Can't combine IF NOT EXISTS and OR REPLACE.");
     }
 
-    if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
+    if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW &&
+        ast.getChild(1).getType() == HiveParser.TOK_QUERY) {
       isAlterViewAs = true;
       orReplace = true;
     }
 
     createVwDesc = new CreateViewDesc(
-      tableName, cols, comment, tblProps, partColNames,
+      dbDotTable, cols, comment, tblProps, partColNames,
       ifNotExists, orReplace, isAlterViewAs);
 
     unparseTranslator.enable();
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createVwDesc), conf));
 
+    addDbAndTabToOutputs(qualTabName);
     return selectStmt;
   }
 

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Thu Aug 28 03:15:13 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.util.HashMap;
 
+import org.antlr.runtime.tree.Tree;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -57,7 +58,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, HiveOperation.ALTERTABLE_ARCHIVE);
     commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_UNARCHIVE);
     commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
-    commandType.put(HiveParser.TOK_DROPTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
+    commandType.put(HiveParser.TOK_ALTERTABLE_DROPPROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
     commandType.put(HiveParser.TOK_SHOWDATABASES, HiveOperation.SHOWDATABASES);
     commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES);
     commandType.put(HiveParser.TOK_SHOWCOLUMNS, HiveOperation.SHOWCOLUMNS);
@@ -81,9 +82,11 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, HiveOperation.ALTERINDEX_REBUILD);
     commandType.put(HiveParser.TOK_ALTERINDEX_PROPERTIES, HiveOperation.ALTERINDEX_PROPS);
     commandType.put(HiveParser.TOK_ALTERVIEW_PROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES);
-    commandType.put(HiveParser.TOK_DROPVIEW_PROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES);
+    commandType.put(HiveParser.TOK_ALTERVIEW_DROPPROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES);
     commandType.put(HiveParser.TOK_ALTERVIEW_ADDPARTS, HiveOperation.ALTERTABLE_ADDPARTS);
     commandType.put(HiveParser.TOK_ALTERVIEW_DROPPARTS, HiveOperation.ALTERTABLE_DROPPARTS);
+    commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME);
+    commandType.put(HiveParser.TOK_ALTERVIEW, HiveOperation.ALTERVIEW_AS);
     commandType.put(HiveParser.TOK_QUERY, HiveOperation.QUERY);
     commandType.put(HiveParser.TOK_LOCKTABLE, HiveOperation.LOCKTABLE);
     commandType.put(HiveParser.TOK_UNLOCKTABLE, HiveOperation.UNLOCKTABLE);
@@ -105,11 +108,9 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE);
     commandType.put(HiveParser.TOK_ALTERTABLE_SKEWED, HiveOperation.ALTERTABLE_SKEWED);
     commandType.put(HiveParser.TOK_ANALYZE, HiveOperation.ANALYZE_TABLE);
-    commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME);
     commandType.put(HiveParser.TOK_ALTERTABLE_PARTCOLTYPE, HiveOperation.ALTERTABLE_PARTCOLTYPE);
     commandType.put(HiveParser.TOK_SHOW_COMPACTIONS, HiveOperation.SHOW_COMPACTIONS);
     commandType.put(HiveParser.TOK_SHOW_TRANSACTIONS, HiveOperation.SHOW_TRANSACTIONS);
-    commandType.put(HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS, HiveOperation.ALTERTABLE_UPDATETABLESTATS);
   }
 
   static {
@@ -134,17 +135,20 @@ public final class SemanticAnalyzerFacto
             HiveOperation.ALTERPARTITION_SERDEPROPERTIES });
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_RENAMEPART,
         new HiveOperation[] {null, HiveOperation.ALTERTABLE_RENAMEPART});
-    tablePartitionCommandType.put(HiveParser.TOK_COMPACT,
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_COMPACT,
         new HiveOperation[] {HiveOperation.ALTERTABLE_COMPACT, HiveOperation.ALTERTABLE_COMPACT});
-    tablePartitionCommandType.put(HiveParser.TOK_ALTERTBLPART_SKEWED_LOCATION,
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION,
         new HiveOperation[] {HiveOperation.ALTERTBLPART_SKEWED_LOCATION,
             HiveOperation.ALTERTBLPART_SKEWED_LOCATION });
-    tablePartitionCommandType.put(HiveParser.TOK_TABLEBUCKETS,
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_BUCKETS,
         new HiveOperation[] {HiveOperation.ALTERTABLE_BUCKETNUM,
             HiveOperation.ALTERPARTITION_BUCKETNUM});
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_CLUSTER_SORT,
         new HiveOperation[] {HiveOperation.ALTERTABLE_CLUSTER_SORT,
             HiveOperation.ALTERTABLE_CLUSTER_SORT});
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS,
+        new HiveOperation[] {HiveOperation.ALTERTABLE_UPDATETABLESTATS,
+            HiveOperation.ALTERTABLE_UPDATEPARTSTATS});
   }
 
   public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)
@@ -152,9 +156,9 @@ public final class SemanticAnalyzerFacto
     if (tree.getToken() == null) {
       throw new RuntimeException("Empty Syntax Tree");
     } else {
-      setSessionCommandType(commandType.get(tree.getToken().getType()));
+      setSessionCommandType(commandType.get(tree.getType()));
 
-      switch (tree.getToken().getType()) {
+      switch (tree.getType()) {
       case HiveParser.TOK_EXPLAIN:
         return new ExplainSemanticAnalyzer(conf);
       case HiveParser.TOK_EXPLAIN_SQ_REWRITE:
@@ -165,6 +169,47 @@ public final class SemanticAnalyzerFacto
         return new ExportSemanticAnalyzer(conf);
       case HiveParser.TOK_IMPORT:
         return new ImportSemanticAnalyzer(conf);
+      case HiveParser.TOK_ALTERTABLE: {
+        Tree child = tree.getChild(1);
+        switch (child.getType()) {
+          case HiveParser.TOK_ALTERTABLE_RENAME:
+          case HiveParser.TOK_ALTERTABLE_TOUCH:
+          case HiveParser.TOK_ALTERTABLE_ARCHIVE:
+          case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
+          case HiveParser.TOK_ALTERTABLE_ADDCOLS:
+          case HiveParser.TOK_ALTERTABLE_RENAMECOL:
+          case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
+          case HiveParser.TOK_ALTERTABLE_DROPPARTS:
+          case HiveParser.TOK_ALTERTABLE_ADDPARTS:
+          case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
+          case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+          case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES:
+          case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
+          case HiveParser.TOK_ALTERTABLE_SKEWED:
+          setSessionCommandType(commandType.get(child.getType()));
+          return new DDLSemanticAnalyzer(conf);
+        }
+        HiveOperation commandType =
+            tablePartitionCommandType.get(child.getType())[tree.getChildCount() > 2 ? 1 : 0];
+        setSessionCommandType(commandType);
+        return new DDLSemanticAnalyzer(conf);
+      }
+      case HiveParser.TOK_ALTERVIEW: {
+        Tree child = tree.getChild(1);
+        switch (child.getType()) {
+          case HiveParser.TOK_ALTERVIEW_PROPERTIES:
+          case HiveParser.TOK_ALTERVIEW_DROPPROPERTIES:
+          case HiveParser.TOK_ALTERVIEW_ADDPARTS:
+          case HiveParser.TOK_ALTERVIEW_DROPPARTS:
+          case HiveParser.TOK_ALTERVIEW_RENAME:
+            setSessionCommandType(commandType.get(child.getType()));
+            return new DDLSemanticAnalyzer(conf);
+        }
+        // TOK_ALTERVIEW_AS
+        assert child.getType() == HiveParser.TOK_QUERY;
+        setSessionCommandType(HiveOperation.ALTERVIEW_AS);
+        return new SemanticAnalyzer(conf);
+      }
       case HiveParser.TOK_CREATEDATABASE:
       case HiveParser.TOK_DROPDATABASE:
       case HiveParser.TOK_SWITCHDATABASE:
@@ -174,24 +219,8 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_DESCTABLE:
       case HiveParser.TOK_DESCFUNCTION:
       case HiveParser.TOK_MSCK:
-      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
-      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
-      case HiveParser.TOK_ALTERTABLE_RENAME:
-      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
-      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
-      case HiveParser.TOK_DROPTABLE_PROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
       case HiveParser.TOK_ALTERINDEX_REBUILD:
       case HiveParser.TOK_ALTERINDEX_PROPERTIES:
-      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
-      case HiveParser.TOK_DROPVIEW_PROPERTIES:
-      case HiveParser.TOK_ALTERVIEW_ADDPARTS:
-      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
-      case HiveParser.TOK_ALTERVIEW_RENAME:
       case HiveParser.TOK_SHOWDATABASES:
       case HiveParser.TOK_SHOWTABLES:
       case HiveParser.TOK_SHOWCOLUMNS:
@@ -209,9 +238,6 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_CREATEINDEX:
       case HiveParser.TOK_DROPINDEX:
       case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
-      case HiveParser.TOK_ALTERTABLE_TOUCH:
-      case HiveParser.TOK_ALTERTABLE_ARCHIVE:
-      case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
       case HiveParser.TOK_LOCKTABLE:
       case HiveParser.TOK_UNLOCKTABLE:
       case HiveParser.TOK_LOCKDB:
@@ -228,23 +254,8 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_SHOW_ROLES:
       case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
       case HiveParser.TOK_ALTERDATABASE_OWNER:
-      case HiveParser.TOK_ALTERTABLE_SKEWED:
       case HiveParser.TOK_TRUNCATETABLE:
-      case HiveParser.TOK_EXCHANGEPARTITION:
       case HiveParser.TOK_SHOW_SET_ROLE:
-      case HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS:
-        return new DDLSemanticAnalyzer(conf);
-      case HiveParser.TOK_ALTERTABLE_PARTITION:
-        HiveOperation commandType = null;
-        Integer type = ((ASTNode) tree.getChild(1)).getToken().getType();
-        if (type == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS) {
-          commandType = HiveOperation.ALTERTABLE_UPDATEPARTSTATS;
-        } else if (tree.getChild(0).getChildCount() > 1) {
-          commandType = tablePartitionCommandType.get(type)[1];
-        } else {
-          commandType = tablePartitionCommandType.get(type)[0];
-        }
-        setSessionCommandType(commandType);
         return new DDLSemanticAnalyzer(conf);
 
       case HiveParser.TOK_CREATEFUNCTION:

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java Thu Aug 28 03:15:13 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.parse;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
@@ -227,11 +228,19 @@ public abstract class TaskCompiler {
 
       crtTblDesc.validate(conf);
 
-      // Clear the output for CTAS since we don't need the output from the
-      // mapredWork, the
+      // clear the mapredWork output file from outputs for CTAS
       // DDLWork at the tail of the chain will have the output
-      outputs.clear();
-
+      Iterator<WriteEntity> outIter = outputs.iterator();
+      while (outIter.hasNext()) {
+        switch (outIter.next().getType()) {
+        case DFS_DIR:
+        case LOCAL_DIR:
+          outIter.remove();
+          break;
+        default:
+          break;
+        }
+      }
       Task<? extends Serializable> crtTblTask = TaskFactory.get(new DDLWork(
           inputs, outputs, crtTblDesc), conf);
 

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java Thu Aug 28 03:15:13 2014
@@ -44,11 +44,19 @@ public class AlterTableDesc extends DDLD
    *
    */
   public static enum AlterTableTypes {
-    RENAME, ADDCOLS, REPLACECOLS, ADDPROPS, DROPPROPS, ADDSERDE, ADDSERDEPROPS,
-    ADDFILEFORMAT, ADDCLUSTERSORTCOLUMN, RENAMECOLUMN, ADDPARTITION,
-    TOUCH, ARCHIVE, UNARCHIVE, ALTERPROTECTMODE, ALTERPARTITIONPROTECTMODE,
-    ALTERLOCATION, DROPPARTITION, RENAMEPARTITION, ADDSKEWEDBY, ALTERSKEWEDLOCATION,
-    ALTERBUCKETNUM, ALTERPARTITION, COMPACT
+    RENAME("rename"), ADDCOLS("add columns"), REPLACECOLS("replace columns"),
+    ADDPROPS("add props"), DROPPROPS("drop props"), ADDSERDE("add serde"), ADDSERDEPROPS("add serde props"),
+    ADDFILEFORMAT("add fileformat"), ADDCLUSTERSORTCOLUMN("add cluster sort column"),
+    RENAMECOLUMN("rename column"), ADDPARTITION("add partition"), TOUCH("touch"), ARCHIVE("archieve"),
+    UNARCHIVE("unarchieve"), ALTERPROTECTMODE("alter protect mode"),
+    ALTERPARTITIONPROTECTMODE("alter partition protect mode"), ALTERLOCATION("alter location"),
+    DROPPARTITION("drop partition"), RENAMEPARTITION("rename partition"), ADDSKEWEDBY("add skew column"),
+    ALTERSKEWEDLOCATION("alter skew location"), ALTERBUCKETNUM("alter bucket number"),
+    ALTERPARTITION("alter partition"), COMPACT("compact");
+
+    private final String name;
+    private AlterTableTypes(String name) { this.name = name; }
+    public String getName() { return name; }
   }
 
   public static enum ProtectModeType {
@@ -236,16 +244,7 @@ public class AlterTableDesc extends DDLD
 
   @Explain(displayName = "type")
   public String getAlterTableTypeString() {
-    switch (op) {
-    case RENAME:
-      return "rename";
-    case ADDCOLS:
-      return "add columns";
-    case REPLACECOLS:
-      return "replace columns";
-    }
-
-    return "unknown";
+    return op.getName();
   }
 
   /**

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Thu Aug 28 03:15:13 2014
@@ -114,8 +114,8 @@ public enum HiveOperation {
       new Privilege[] {Privilege.ALTER_DATA}, null),
   ALTERTABLE_PARTCOLTYPE("ALTERTABLE_PARTCOLTYPE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }),
   ALTERVIEW_RENAME("ALTERVIEW_RENAME", new Privilege[] {Privilege.ALTER_METADATA}, null),
-  ALTERTABLE_COMPACT("ALTERTABLE_COMPACT", new Privilege[]{Privilege.SELECT},
-      new Privilege[]{Privilege.ALTER_DATA}),
+  ALTERVIEW_AS("ALTERVIEW_AS", new Privilege[] {Privilege.ALTER_METADATA}, null),
+  ALTERTABLE_COMPACT("ALTERTABLE_COMPACT", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA}),
   SHOW_COMPACTIONS("SHOW COMPACTIONS", null, null),
   SHOW_TRANSACTIONS("SHOW TRANSACTIONS", null, null);
   ;

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Thu Aug 28 03:15:13 2014
@@ -347,7 +347,7 @@ public final class PlanUtils {
 
       if (crtTblDesc.getTableName() != null && crtTblDesc.getDatabaseName() != null) {
         properties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,
-            crtTblDesc.getDatabaseName() + "." + crtTblDesc.getTableName());
+            crtTblDesc.getTableName());
       }
 
       if (crtTblDesc.getTblProps() != null) {

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java Thu Aug 28 03:15:13 2014
@@ -114,6 +114,7 @@ public enum HiveOperationType {
   ALTERTABLE_SKEWED,
   ALTERTBLPART_SKEWED_LOCATION,
   ALTERVIEW_RENAME,
+  ALTERVIEW_AS,
   ALTERTABLE_COMPACT,
   SHOW_COMPACTIONS,
   SHOW_TRANSACTIONS,

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java Thu Aug 28 03:15:13 2014
@@ -246,6 +246,8 @@ public class Operation2Privilege {
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERVIEW_RENAME, PrivRequirement.newIOPrivRequirement
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERVIEW_AS, PrivRequirement.newIOPrivRequirement
+(OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.DROPVIEW, PrivRequirement.newIOPrivRequirement
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
 
@@ -276,8 +278,9 @@ public class Operation2Privilege {
 (SEL_NOGRANT_AR, null));
     op2Priv.put(HiveOperationType.SHOW_TBLPROPERTIES, PrivRequirement.newIOPrivRequirement
 (SEL_NOGRANT_AR, null));
-    op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, PrivRequirement.newIOPrivRequirement
-(SEL_NOGRANT_AR, null));
+    op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, PrivRequirement.newPrivRequirementList(
+        new PrivRequirement(SEL_NOGRANT_AR, IOType.INPUT),
+        new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));
 
     // QUERY,LOAD op can contain an insert & overwrite,
     // require delete privilege if this is an insert-overwrite
@@ -300,8 +303,9 @@ public class Operation2Privilege {
 
     // for now allow only create-view with 'select with grant'
     // the owner will also have select with grant privileges on new view
-    op2Priv.put(HiveOperationType.CREATEVIEW, PrivRequirement.newIOPrivRequirement
-(SEL_GRANT_AR, null));
+    op2Priv.put(HiveOperationType.CREATEVIEW, PrivRequirement.newPrivRequirementList(
+        new PrivRequirement(SEL_GRANT_AR, IOType.INPUT),
+        new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));
 
     op2Priv.put(HiveOperationType.SHOWFUNCTIONS, PrivRequirement.newIOPrivRequirement
 (null, null));

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java Thu Aug 28 03:15:13 2014
@@ -240,7 +240,7 @@ public class StatsUtils {
           if (aggrStats.getPartsFound() != partNames.size() && colState != State.NONE) {
             LOG.debug("Column stats requested for : " + partNames.size() +" partitions. "
               + "Able to retrieve for " + aggrStats.getPartsFound() + " partitions");
-            stats.updateColumnStatsState(State.PARTIAL);
+            colState = State.PARTIAL;
           }
           stats.setColumnStatsState(colState);
         }

Modified: hive/branches/cbo/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto (original)
+++ hive/branches/cbo/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto Thu Aug 28 03:15:13 2014
@@ -53,6 +53,12 @@ message DateStatistics {
   optional sint32 maximum = 2;
 }
 
+message TimestampStatistics {
+  // min,max values saved as milliseconds since epoch
+  optional sint64 minimum = 1;
+  optional sint64 maximum = 2;
+}
+
 message BinaryStatistics {
   // sum will store the total binary blob length in a stripe
   optional sint64 sum = 1;
@@ -67,6 +73,7 @@ message ColumnStatistics {
   optional DecimalStatistics decimalStatistics = 6;
   optional DateStatistics dateStatistics = 7;
   optional BinaryStatistics binaryStatistics = 8;
+  optional TimestampStatistics timestampStatistics = 9;
 }
 
 message RowIndexEntry {

Modified: hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java (original)
+++ hive/branches/cbo/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java Thu Aug 28 03:15:13 2014
@@ -92,7 +92,7 @@ public class TestFileDump {
     }
     conf.set(HiveConf.ConfVars.HIVE_ORC_ENCODING_STRATEGY.varname, "COMPRESSION");
     Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+        100000, CompressionKind.ZLIB, 10000, 1000);
     Random r1 = new Random(1);
     String[] words = new String[]{"It", "was", "the", "best", "of", "times,",
         "it", "was", "the", "worst", "of", "times,", "it", "was", "the", "age",
@@ -116,7 +116,7 @@ public class TestFileDump {
 
     // replace stdout and run command
     System.setOut(new PrintStream(myOut));
-    FileDump.main(new String[]{testFilePath.toString()});
+    FileDump.main(new String[]{testFilePath.toString(), "--rowindex=1,2,3"});
     System.out.flush();
     System.setOut(origOut);
 
@@ -138,7 +138,7 @@ public class TestFileDump {
     conf.set(HiveConf.ConfVars.HIVE_ORC_ENCODING_STRATEGY.varname, "COMPRESSION");
     conf.setFloat(HiveConf.ConfVars.HIVE_ORC_DICTIONARY_KEY_SIZE_THRESHOLD.varname, 0.49f);
     Writer writer = OrcFile.createWriter(fs, testFilePath, conf, inspector,
-        100000, CompressionKind.ZLIB, 10000, 10000);
+        100000, CompressionKind.ZLIB, 10000, 1000);
     Random r1 = new Random(1);
     String[] words = new String[]{"It", "was", "the", "best", "of", "times,",
         "it", "was", "the", "worst", "of", "times,", "it", "was", "the", "age",
@@ -171,7 +171,7 @@ public class TestFileDump {
 
     // replace stdout and run command
     System.setOut(new PrintStream(myOut));
-    FileDump.main(new String[]{testFilePath.toString()});
+    FileDump.main(new String[]{testFilePath.toString(), "--rowindex=1,2,3"});
     System.out.flush();
     System.setOut(origOut);
 

Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/add_part_exist.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/add_part_exist.q?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/add_part_exist.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/add_part_exist.q Thu Aug 28 03:15:13 2014
@@ -18,20 +18,21 @@ SHOW TABLES;
 
 -- Test ALTER TABLE ADD PARTITION in non-default Database
 CREATE DATABASE add_part_test_db;
-USE add_part_test_db;
-SHOW TABLES;
 
-CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING);
-SHOW PARTITIONS add_part_test;
+CREATE TABLE add_part_test_db.add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING);
+SHOW PARTITIONS add_part_test_db.add_part_test;
 
-ALTER TABLE add_part_test ADD PARTITION (ds='2010-01-01');
-SHOW PARTITIONS add_part_test;
+ALTER TABLE add_part_test_db.add_part_test ADD PARTITION (ds='2010-01-01');
+SHOW PARTITIONS add_part_test_db.add_part_test;
 
-ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01');
-SHOW PARTITIONS add_part_test;
+ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01');
+SHOW PARTITIONS add_part_test_db.add_part_test;
 
-ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02');
-SHOW PARTITIONS add_part_test;
+ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-02');
+SHOW PARTITIONS add_part_test_db.add_part_test;
 
-ALTER TABLE add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03');
-SHOW PARTITIONS add_part_test;
+ALTER TABLE add_part_test_db.add_part_test ADD IF NOT EXISTS PARTITION (ds='2010-01-01') PARTITION (ds='2010-01-02') PARTITION (ds='2010-01-03');
+SHOW PARTITIONS add_part_test_db.add_part_test;
+
+DROP TABLE add_part_test_db.add_part_test;
+DROP DATABASE add_part_test_db;

Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/alter1.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/alter1.q?rev=1621031&r1=1621030&r2=1621031&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/alter1.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/alter1.q Thu Aug 28 03:15:13 2014
@@ -32,40 +32,38 @@ SHOW TABLES;
 -- With non-default Database
 
 CREATE DATABASE alter1_db;
-USE alter1_db;
-SHOW TABLES;
+SHOW TABLES alter1_db;
 
-CREATE TABLE alter1(a INT, b INT);
-DESCRIBE EXTENDED alter1;
+CREATE TABLE alter1_db.alter1(a INT, b INT);
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='3');
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='3');
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3');
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('a'='1', 'c'='4', 'd'='3');
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE');
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='TRUE');
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE');
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET TBLPROPERTIES ('EXTERNAL'='FALSE');
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='9');
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='9');
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20');
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET SERDEPROPERTIES('s1'='10', 's2' ='20');
+DESCRIBE EXTENDED alter1_db.alter1;
 
 add jar ${system:maven.local.repository}/org/apache/hive/hive-it-test-serde/${system:hive.version}/hive-it-test-serde-${system:hive.version}.jar;
-ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9');
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.TestSerDe' WITH SERDEPROPERTIES ('s1'='9');
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe';
-DESCRIBE EXTENDED alter1;
+ALTER TABLE alter1_db.alter1 SET SERDE 'org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe';
+DESCRIBE EXTENDED alter1_db.alter1;
 
-ALTER TABLE alter1 REPLACE COLUMNS (a int, b int, c string);
-DESCRIBE alter1;
+ALTER TABLE alter1_db.alter1 REPLACE COLUMNS (a int, b int, c string);
+DESCRIBE alter1_db.alter1;
 
-DROP TABLE alter1;
-USE default;
+DROP TABLE alter1_db.alter1;
 DROP DATABASE alter1_db;