You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2012/11/02 12:20:29 UTC

svn commit: r1404924 [2/6] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/had...

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Fri Nov  2 11:20:26 2012
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.ql.hooks.W
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
+import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
@@ -900,4 +901,48 @@ public abstract class BaseSemanticAnalyz
   public QueryProperties getQueryProperties() {
     return queryProperties;
   }
+
+  /**
+   * Given a ASTNode, return list of values.
+   *
+   * use case:
+   *   create table xyz list bucketed (col1) with skew (1,2,5)
+   *   AST Node is for (1,2,5)
+   * @param ast
+   * @return
+   */
+  protected List<String> getSkewedValueFromASTNode(ASTNode ast) {
+    List<String> colList = new ArrayList<String>();
+    int numCh = ast.getChildCount();
+    for (int i = 0; i < numCh; i++) {
+      ASTNode child = (ASTNode) ast.getChild(i);
+      colList.add(stripQuotes(child.getText()).toLowerCase());
+    }
+    return colList;
+  }
+
+  /**
+   * Retrieve skewed values from ASTNode.
+   *
+   * @param node
+   * @return
+   * @throws SemanticException
+   */
+  protected List<String> getSkewedValuesFromASTNode(Node node) throws SemanticException {
+    List<String> result = null;
+    Tree leafVNode = ((ASTNode) node).getChild(0);
+    if (leafVNode == null) {
+      throw new SemanticException(
+          ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+    } else {
+      ASTNode lVAstNode = (ASTNode) leafVNode;
+      if (lVAstNode.getToken().getType() != HiveParser.TOK_TABCOLVALUE) {
+        throw new SemanticException(
+            ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+      } else {
+        result = new ArrayList<String>(getSkewedValueFromASTNode(lVAstNode));
+      }
+    }
+    return result;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Nov  2 11:20:26 2012
@@ -25,11 +25,15 @@ import static org.apache.hadoop.hive.ql.
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_SHOWDATABASES;
 
 import java.io.Serializable;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -64,6 +68,7 @@ import org.apache.hadoop.hive.ql.index.H
 import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
+import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
@@ -93,6 +98,7 @@ import org.apache.hadoop.hive.ql.plan.Lo
 import org.apache.hadoop.hive.ql.plan.MoveWork;
 import org.apache.hadoop.hive.ql.plan.MsckDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionSpec;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
 import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
@@ -158,15 +164,15 @@ public class DDLSemanticAnalyzer extends
     String tableName;
     HashMap<String, String> partSpec = null;
 
-    public TablePartition(){
+    public TablePartition() {
     }
 
-    public TablePartition (ASTNode tblPart) throws SemanticException {
+    public TablePartition(ASTNode tblPart) throws SemanticException {
       tableName = unescapeIdentifier(tblPart.getChild(0).getText());
       if (tblPart.getChildCount() > 1) {
         ASTNode part = (ASTNode) tblPart.getChild(1);
         if (part.getToken().getType() == HiveParser.TOK_PARTSPEC) {
-         this.partSpec = DDLSemanticAnalyzer.getPartSpec(part);
+          this.partSpec = DDLSemanticAnalyzer.getPartSpec(part);
         }
       }
     }
@@ -187,13 +193,13 @@ public class DDLSemanticAnalyzer extends
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
 
-    switch(ast.getToken().getType()) {
+    switch (ast.getToken().getType()) {
     case HiveParser.TOK_ALTERTABLE_PARTITION: {
-      ASTNode tablePart = (ASTNode)ast.getChild(0);
+      ASTNode tablePart = (ASTNode) ast.getChild(0);
       TablePartition tblPart = new TablePartition(tablePart);
       String tableName = tblPart.tableName;
       HashMap<String, String> partSpec = tblPart.partSpec;
-      ast = (ASTNode)ast.getChild(1);
+      ast = (ASTNode) ast.getChild(1);
       if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
         analyzeAlterTableFileFormat(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE) {
@@ -208,6 +214,8 @@ public class DDLSemanticAnalyzer extends
         analyzeAlterTableSerdeProps(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMEPART) {
         analyzeAlterTableRenamePart(ast, tableName, partSpec);
+      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTBLPART_SKEWED_LOCATION) {
+        analyzeAlterTableSkewedLocation(ast, tableName, partSpec);
       }
       break;
     }
@@ -380,6 +388,9 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_REVOKE:
       analyzeRevoke(ast);
       break;
+    case HiveParser.TOK_ALTERTABLE_SKEWED:
+      analyzeAltertableSkewedby(ast);
+      break;
     default:
       throw new SemanticException("Unsupported command.");
     }
@@ -507,7 +518,7 @@ public class DDLSemanticAnalyzer extends
     PrivilegeObjectDesc subject = new PrivilegeObjectDesc();
     subject.setObject(unescapeIdentifier(ast.getChild(0).getText()));
     if (ast.getChildCount() > 1) {
-      for (int i =0;i< ast.getChildCount();i++) {
+      for (int i = 0; i < ast.getChildCount(); i++) {
         ASTNode astChild = (ASTNode) ast.getChild(i);
         if (astChild.getToken().getType() == HiveParser.TOK_PARTSPEC) {
           subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(astChild));
@@ -707,12 +718,12 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeDropTable(ASTNode ast, boolean expectView)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
     boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
     // we want to signal an error if the table/view doesn't exist and we're
     // configured not to fail silently
     boolean throwException =
-      !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
+        !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
     try {
       Table tab = db.getTable(db.getCurrentDatabase(), tableName, throwException);
       if (tab != null) {
@@ -724,7 +735,7 @@ public class DDLSemanticAnalyzer extends
     }
 
     DropTableDesc dropTblDesc = new DropTableDesc(
-      tableName, expectView, ifExists, true);
+        tableName, expectView, ifExists, true);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropTblDesc), conf));
   }
@@ -732,7 +743,7 @@ public class DDLSemanticAnalyzer extends
   private void analyzeCreateIndex(ASTNode ast) throws SemanticException {
     String indexName = unescapeIdentifier(ast.getChild(0).getText());
     String typeName = unescapeSQLString(ast.getChild(1).getText());
-    String tableName = getUnescapedName((ASTNode)ast.getChild(2));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(2));
     List<String> indexedCols = getColumnNames((ASTNode) ast.getChild(3));
 
     IndexType indexType = HiveIndex.getIndexType(typeName);
@@ -768,7 +779,7 @@ public class DDLSemanticAnalyzer extends
         break;
       case HiveParser.TOK_CREATEINDEX_INDEXTBLNAME:
         ASTNode ch = (ASTNode) child.getChild(0);
-        indexTableName = getUnescapedName((ASTNode)ch);
+        indexTableName = getUnescapedName((ASTNode) ch);
         break;
       case HiveParser.TOK_DEFERRED_REBUILDINDEX:
         deferredRebuild = true;
@@ -800,7 +811,8 @@ public class DDLSemanticAnalyzer extends
 
 
     CreateIndexDesc crtIndexDesc = new CreateIndexDesc(tableName, indexName,
-        indexedCols, indexTableName, deferredRebuild, storageFormat.inputFormat, storageFormat.outputFormat,
+        indexedCols, indexTableName, deferredRebuild, storageFormat.inputFormat,
+        storageFormat.outputFormat,
         storageFormat.storageHandler, typeName, location, idxProps, tblProps,
         shared.serde, shared.serdeProps, rowFormatParams.collItemDelim,
         rowFormatParams.fieldDelim, rowFormatParams.fieldEscape,
@@ -812,12 +824,12 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeDropIndex(ASTNode ast) throws SemanticException {
     String indexName = unescapeIdentifier(ast.getChild(0).getText());
-    String tableName = getUnescapedName((ASTNode)ast.getChild(1));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(1));
     boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
     // we want to signal an error if the index doesn't exist and we're
     // configured not to ignore this
     boolean throwException =
-      !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
+        !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
     if (throwException) {
       try {
         Index idx = db.getIndex(tableName, indexName);
@@ -856,15 +868,15 @@ public class DDLSemanticAnalyzer extends
   }
 
   private void analyzeAlterIndexProps(ASTNode ast)
-    throws SemanticException {
+      throws SemanticException {
 
-    String baseTableName = getUnescapedName((ASTNode)ast.getChild(0));
+    String baseTableName = getUnescapedName((ASTNode) ast.getChild(0));
     String indexName = unescapeIdentifier(ast.getChild(1).getText());
     HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(2))
         .getChild(0));
 
     AlterIndexDesc alterIdxDesc =
-      new AlterIndexDesc(AlterIndexTypes.ADDPROPS);
+        new AlterIndexDesc(AlterIndexTypes.ADDPROPS);
     alterIdxDesc.setProps(mapProp);
     alterIdxDesc.setIndexName(indexName);
     alterIdxDesc.setBaseTableName(baseTableName);
@@ -887,7 +899,7 @@ public class DDLSemanticAnalyzer extends
 
       List<Partition> indexTblPartitions = null;
       List<Partition> baseTblPartitions = null;
-      if(indexTbl != null) {
+      if (indexTbl != null) {
         indexTblPartitions = new ArrayList<Partition>();
         baseTblPartitions = preparePartitions(baseTbl, partSpec,
             indexTbl, db, indexTblPartitions);
@@ -952,15 +964,15 @@ public class DDLSemanticAnalyzer extends
       }
 
       switch (op) {
-        case ADDPARTITION:
-        case DROPPARTITION:
-        case RENAMEPARTITION:
-        case ADDPROPS:
-        case RENAME:
-          // allow this form
-          break;
-        default:
-          throw new SemanticException(ErrorMsg.ALTER_VIEW_DISALLOWED_OP.getMsg(op.toString()));
+      case ADDPARTITION:
+      case DROPPARTITION:
+      case RENAMEPARTITION:
+      case ADDPROPS:
+      case RENAME:
+        // allow this form
+        break;
+      default:
+        throw new SemanticException(ErrorMsg.ALTER_VIEW_DISALLOWED_OP.getMsg(op.toString()));
       }
     } else {
       if (expectView) {
@@ -973,13 +985,13 @@ public class DDLSemanticAnalyzer extends
   }
 
   private void analyzeAlterTableProps(ASTNode ast, boolean expectView)
-    throws SemanticException {
+      throws SemanticException {
 
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
     HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
         .getChild(0));
     AlterTableDesc alterTblDesc =
-      new AlterTableDesc(AlterTableTypes.ADDPROPS, expectView);
+        new AlterTableDesc(AlterTableTypes.ADDPROPS, expectView);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
 
@@ -1050,7 +1062,7 @@ public class DDLSemanticAnalyzer extends
       break;
     case HiveParser.TOK_STORAGEHANDLER:
       storageHandler =
-        unescapeSQLString(((ASTNode) child.getChild(1)).getToken().getText());
+          unescapeSQLString(((ASTNode) child.getChild(1)).getToken().getText());
       try {
         Class.forName(storageHandler);
       } catch (ClassNotFoundException e) {
@@ -1105,7 +1117,7 @@ public class DDLSemanticAnalyzer extends
     else {
       List<Partition> allPartitions = null;
       try {
-        if (desc == null || desc.getOp() != AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE){
+        if (desc == null || desc.getOp() != AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE) {
           Partition part = db.getPartition(tab, partSpec, false);
           allPartitions = new ArrayList<Partition>(1);
           allPartitions.add(part);
@@ -1116,13 +1128,12 @@ public class DDLSemanticAnalyzer extends
             throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
           }
         }
-      }
-      catch (HiveException e) {
+      } catch (HiveException e) {
         throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
       }
 
-      if (allPartitions != null ){
-        for (Partition part: allPartitions) {
+      if (allPartitions != null) {
+        for (Partition part : allPartitions) {
           outputs.add(new WriteEntity(part));
         }
       }
@@ -1138,7 +1149,7 @@ public class DDLSemanticAnalyzer extends
 
     String newLocation = unescapeSQLString(ast.getChild(0).getText());
 
-    AlterTableDesc alterTblDesc = new AlterTableDesc (tableName, newLocation, partSpec);
+    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, newLocation, partSpec);
 
     addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
@@ -1150,7 +1161,7 @@ public class DDLSemanticAnalyzer extends
       throws SemanticException {
 
     AlterTableDesc alterTblDesc =
-      new AlterTableDesc(AlterTableTypes.ALTERPROTECTMODE);
+        new AlterTableDesc(AlterTableTypes.ALTERPROTECTMODE);
 
     alterTblDesc.setOldName(tableName);
     alterTblDesc.setPartSpec(partSpec);
@@ -1314,7 +1325,7 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeAlterTableClusterSort(ASTNode ast)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
     Table tab = null;
 
     try {
@@ -1467,6 +1478,7 @@ public class DDLSemanticAnalyzer extends
 
   /**
    * Describe database.
+   *
    * @param ast
    * @throws SemanticException
    */
@@ -1504,12 +1516,12 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeShowPartitions(ASTNode ast) throws SemanticException {
     ShowPartitionsDesc showPartsDesc;
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
     // We only can have a single partition spec
-    assert(partSpecs.size() <= 1);
+    assert (partSpecs.size() <= 1);
     Map<String, String> partSpec = null;
-    if(partSpecs.size() > 0) {
+    if (partSpecs.size() > 0) {
       partSpec = partSpecs.get(0);
     }
 
@@ -1569,13 +1581,13 @@ public class DDLSemanticAnalyzer extends
       showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames);
       break;
     case 2: // Specifies a DB
-      assert(ast.getChild(0).getType() == HiveParser.TOK_FROM);
+      assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
       dbName = unescapeIdentifier(ast.getChild(1).getText());
       validateDatabase(dbName);
       showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName);
       break;
     case 3: // Uses a pattern and specifies a DB
-      assert(ast.getChild(0).getType() == HiveParser.TOK_FROM);
+      assert (ast.getChild(0).getType() == HiveParser.TOK_FROM);
       dbName = unescapeIdentifier(ast.getChild(1).getText());
       tableNames = unescapeSQLString(ast.getChild(2).getText());
       validateDatabase(dbName);
@@ -1596,15 +1608,15 @@ public class DDLSemanticAnalyzer extends
     String dbName = null;
     String tableName = null;
     switch (ast.getChildCount()) {
-      case 1:
-        tableName = getUnescapedName((ASTNode)ast.getChild(0));
-        break;
-      case 2:
-        dbName = getUnescapedName((ASTNode)ast.getChild(0));
-        tableName = getUnescapedName((ASTNode)ast.getChild(1));
-        break;
-      default:
-        break;
+    case 1:
+      tableName = getUnescapedName((ASTNode) ast.getChild(0));
+      break;
+    case 2:
+      dbName = getUnescapedName((ASTNode) ast.getChild(0));
+      tableName = getUnescapedName((ASTNode) ast.getChild(1));
+      break;
+    default:
+      break;
     }
 
     try {
@@ -1622,13 +1634,13 @@ public class DDLSemanticAnalyzer extends
 
     showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), dbName, tableName);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-                                              showColumnsDesc), conf));
+        showColumnsDesc), conf));
     setFetchTask(createFetchTask(showColumnsDesc.getSchema()));
   }
 
   private void analyzeShowTableStatus(ASTNode ast) throws SemanticException {
     ShowTableStatusDesc showTblStatusDesc;
-    String tableNames = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableNames = getUnescapedName((ASTNode) ast.getChild(0));
     String dbName = db.getCurrentDatabase();
     int children = ast.getChildCount();
     HashMap<String, String> partSpec = null;
@@ -1661,7 +1673,7 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeShowTableProperties(ASTNode ast) throws SemanticException {
     ShowTblPropertiesDesc showTblPropertiesDesc;
-    String tableNames = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableNames = getUnescapedName((ASTNode) ast.getChild(0));
     String dbName = db.getCurrentDatabase();
     String propertyName = null;
     if (ast.getChildCount() > 1) {
@@ -1679,7 +1691,7 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeShowIndexes(ASTNode ast) throws SemanticException {
     ShowIndexesDesc showIndexesDesc;
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
     showIndexesDesc = new ShowIndexesDesc(tableName, ctx.getResFile());
 
     if (ast.getChildCount() == 2) {
@@ -1747,7 +1759,7 @@ public class DDLSemanticAnalyzer extends
     }
 
     ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), tableName,
-                                                    partSpec, isExtended);
+        partSpec, isExtended);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         showLocksDesc), conf));
     setFetchTask(createFetchTask(showLocksDesc.getSchema()));
@@ -1767,22 +1779,22 @@ public class DDLSemanticAnalyzer extends
    */
   private void analyzeLockTable(ASTNode ast)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0)).toLowerCase();
-    String mode      = unescapeIdentifier(ast.getChild(1).getText().toUpperCase());
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0)).toLowerCase();
+    String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase());
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 
     // We only can have a single partition spec
-    assert(partSpecs.size() <= 1);
+    assert (partSpecs.size() <= 1);
     Map<String, String> partSpec = null;
     if (partSpecs.size() > 0) {
       partSpec = partSpecs.get(0);
     }
 
     LockTableDesc lockTblDesc = new LockTableDesc(tableName, mode, partSpec,
-                                                  HiveConf.getVar(conf, ConfVars.HIVEQUERYID));
+        HiveConf.getVar(conf, ConfVars.HIVEQUERYID));
     lockTblDesc.setQueryStr(this.ctx.getCmd());
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-                                              lockTblDesc), conf));
+        lockTblDesc), conf));
 
     // Need to initialize the lock manager
     ctx.setNeedLockMgr(true);
@@ -1799,11 +1811,11 @@ public class DDLSemanticAnalyzer extends
    */
   private void analyzeUnlockTable(ASTNode ast)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 
     // We only can have a single partition spec
-    assert(partSpecs.size() <= 1);
+    assert (partSpecs.size() <= 1);
     Map<String, String> partSpec = null;
     if (partSpecs.size() > 0) {
       partSpec = partSpecs.get(0);
@@ -1811,7 +1823,7 @@ public class DDLSemanticAnalyzer extends
 
     UnlockTableDesc unlockTblDesc = new UnlockTableDesc(tableName, partSpec);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-                                              unlockTblDesc), conf));
+        unlockTblDesc), conf));
 
     // Need to initialize the lock manager
     ctx.setNeedLockMgr(true);
@@ -1849,9 +1861,9 @@ public class DDLSemanticAnalyzer extends
 
 
   private void analyzeAlterTableRename(ASTNode ast, boolean expectView) throws SemanticException {
-    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName,
-        getUnescapedName((ASTNode)ast.getChild(1)), expectView);
+        getUnescapedName((ASTNode) ast.getChild(1)), expectView);
 
     addInputsOutputsAlterTable(tblName, null, alterTblDesc);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
@@ -1859,7 +1871,7 @@ public class DDLSemanticAnalyzer extends
   }
 
   private void analyzeAlterTableRenameCol(ASTNode ast) throws SemanticException {
-    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     String newComment = null;
     String newType = null;
     newType = getTypeStringFromAST((ASTNode) ast.getChild(3));
@@ -1888,7 +1900,7 @@ public class DDLSemanticAnalyzer extends
     String oldColName = ast.getChild(1).getText();
     String newColName = ast.getChild(2).getText();
 
-    /*Validate the operation of renaming a column name.*/
+    /* Validate the operation of renaming a column name. */
     Table tab = null;
     try {
       tab = db.getTable(tblName);
@@ -1900,7 +1912,7 @@ public class DDLSemanticAnalyzer extends
         && (null != skewInfo.getSkewedColNames())
         && skewInfo.getSkewedColNames().contains(oldColName)) {
       throw new SemanticException(oldColName
-        + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
+          + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
     }
 
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName,
@@ -1914,7 +1926,7 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeAlterTableRenamePart(ASTNode ast, String tblName,
       HashMap<String, String> oldPartSpec) throws SemanticException {
-    Map<String, String> newPartSpec = extractPartitionSpecs((ASTNode)ast.getChild(0));
+    Map<String, String> newPartSpec = extractPartitionSpecs((ASTNode) ast.getChild(0));
     if (newPartSpec == null) {
       throw new SemanticException("RENAME PARTITION Missing Destination" + ast);
     }
@@ -1943,7 +1955,7 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeAlterTableModifyCols(ASTNode ast,
       AlterTableTypes alterType) throws SemanticException {
-    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     List<FieldSchema> newCols = getColumns((ASTNode) ast.getChild(1));
     AlterTableDesc alterTblDesc = new AlterTableDesc(tblName, newCols,
         alterType);
@@ -1954,9 +1966,9 @@ public class DDLSemanticAnalyzer extends
   }
 
   private void analyzeAlterTableDropParts(ASTNode ast, boolean expectView)
-    throws SemanticException {
+      throws SemanticException {
 
-    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     // get table metadata
     List<PartitionSpec> partSpecs = getFullPartitionSpecs(ast);
     Table tab = null;
@@ -1987,7 +1999,7 @@ public class DDLSemanticAnalyzer extends
       for (PartitionSpec partSpec : partSpecs) {
         if (partSpec.isNonEqualityOperator()) {
           throw new SemanticException(
-            ErrorMsg.DROP_PARTITION_NON_STRING_PARTCOLS_NONEQUALITY.getMsg());
+              ErrorMsg.DROP_PARTITION_NON_STRING_PARTCOLS_NONEQUALITY.getMsg());
         }
       }
     }
@@ -1997,7 +2009,7 @@ public class DDLSemanticAnalyzer extends
       // we want to signal an error if the partition doesn't exist and we're
       // configured not to fail silently
       boolean throwException =
-        !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
+          !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
       addTableDropPartsOutputs(tblName, partSpecs, throwException, stringPartitionColumns);
     }
 
@@ -2024,7 +2036,7 @@ public class DDLSemanticAnalyzer extends
   private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView)
       throws SemanticException {
 
-    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     boolean isView = false;
     Table tab = null;
     try {
@@ -2088,7 +2100,7 @@ public class DDLSemanticAnalyzer extends
 
     for (AddPartitionDesc addPartitionDesc : partitionDescs) {
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-            addPartitionDesc), conf));
+          addPartitionDesc), conf));
     }
 
     if (isView) {
@@ -2115,8 +2127,7 @@ public class DDLSemanticAnalyzer extends
         }
         boolean firstAnd = true;
         cmd.append("(");
-        for (Map.Entry<String, String> entry
-               : partitionDesc.getPartSpec().entrySet())
+        for (Map.Entry<String, String> entry : partitionDesc.getPartSpec().entrySet())
         {
           if (firstAnd) {
             firstAnd = false;
@@ -2152,7 +2163,7 @@ public class DDLSemanticAnalyzer extends
   private void analyzeAlterTableTouch(CommonTree ast)
       throws SemanticException {
 
-    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     Table tab;
 
     try {
@@ -2174,7 +2185,7 @@ public class DDLSemanticAnalyzer extends
           AlterTableDesc.AlterTableTypes.TOUCH);
       outputs.add(new WriteEntity(tab));
       rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-                                                touchDesc), conf));
+          touchDesc), conf));
     } else {
       addTablePartsOutputs(tblName, partSpecs);
       for (Map<String, String> partSpec : partSpecs) {
@@ -2182,7 +2193,7 @@ public class DDLSemanticAnalyzer extends
             db.getCurrentDatabase(), tblName, partSpec,
             AlterTableDesc.AlterTableTypes.TOUCH);
         rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-                                                  touchDesc), conf));
+            touchDesc), conf));
       }
     }
   }
@@ -2194,7 +2205,7 @@ public class DDLSemanticAnalyzer extends
       throw new SemanticException(ErrorMsg.ARCHIVE_METHODS_DISABLED.getMsg());
 
     }
-    String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 
@@ -2210,7 +2221,7 @@ public class DDLSemanticAnalyzer extends
     addTablePartsOutputs(tblName, partSpecs, true);
     validateAlterTableType(tab, AlterTableTypes.ARCHIVE);
 
-    if (partSpecs.size() > 1 ) {
+    if (partSpecs.size() > 1) {
       throw new SemanticException(isUnArchive ?
           ErrorMsg.UNARCHIVE_ON_MULI_PARTS.getMsg() :
           ErrorMsg.ARCHIVE_ON_MULI_PARTS.getMsg());
@@ -2219,17 +2230,17 @@ public class DDLSemanticAnalyzer extends
       throw new SemanticException(ErrorMsg.ARCHIVE_ON_TABLE.getMsg());
     }
 
-    Map<String,String> partSpec = partSpecs.get(0);
+    Map<String, String> partSpec = partSpecs.get(0);
     try {
       isValidPrefixSpec(tab, partSpec);
-    } catch(HiveException e) {
+    } catch (HiveException e) {
       throw new SemanticException(e.getMessage(), e);
     }
-      AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(
-          db.getCurrentDatabase(), tblName, partSpec,
-          (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE));
-      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-          archiveDesc), conf));
+    AlterTableSimpleDesc archiveDesc = new AlterTableSimpleDesc(
+        db.getCurrentDatabase(), tblName, partSpec,
+        (isUnArchive ? AlterTableTypes.UNARCHIVE : AlterTableTypes.ARCHIVE));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+        archiveDesc), conf));
 
   }
 
@@ -2247,9 +2258,9 @@ public class DDLSemanticAnalyzer extends
     if (ast.getChildCount() > 0) {
       repair = ast.getChild(0).getType() == HiveParser.KW_REPAIR;
       if (!repair) {
-        tableName = getUnescapedName((ASTNode)ast.getChild(0));
+        tableName = getUnescapedName((ASTNode) ast.getChild(0));
       } else if (ast.getChildCount() > 1) {
-        tableName = getUnescapedName((ASTNode)ast.getChild(1));
+        tableName = getUnescapedName((ASTNode) ast.getChild(1));
       }
     }
     List<Map<String, String>> specs = getPartitionSpecs(ast);
@@ -2309,7 +2320,7 @@ public class DDLSemanticAnalyzer extends
 
         for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
           CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
-          assert(partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
+          assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
           String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
           String operator = partSpecSingleKey.getChild(1).getText();
           String val = partSpecSingleKey.getChild(2).getText();
@@ -2321,6 +2332,7 @@ public class DDLSemanticAnalyzer extends
     }
     return partSpecList;
   }
+
   /**
    * Certain partition values are are used by hive. e.g. the default partition
    * in dynamic partitioning and the intermediate partition values used in the
@@ -2347,7 +2359,7 @@ public class DDLSemanticAnalyzer extends
    * pre-execution hook. If the partition does not exist, no error is thrown.
    */
   private void addTablePartsOutputs(String tblName, List<Map<String, String>> partSpecs)
-    throws SemanticException {
+      throws SemanticException {
     addTablePartsOutputs(tblName, partSpecs, false, false, null);
   }
 
@@ -2355,8 +2367,9 @@ public class DDLSemanticAnalyzer extends
    * Add the table partitions to be modified in the output, so that it is available for the
    * pre-execution hook. If the partition does not exist, no error is thrown.
    */
-  private void addTablePartsOutputs(String tblName, List<Map<String, String>> partSpecs, boolean allowMany)
-    throws SemanticException {
+  private void addTablePartsOutputs(String tblName, List<Map<String, String>> partSpecs,
+      boolean allowMany)
+      throws SemanticException {
     addTablePartsOutputs(tblName, partSpecs, false, allowMany, null);
   }
 
@@ -2366,8 +2379,8 @@ public class DDLSemanticAnalyzer extends
    * throwIfNonExistent is true, otherwise ignore it.
    */
   private void addTablePartsOutputs(String tblName, List<Map<String, String>> partSpecs,
-            boolean throwIfNonExistent, boolean allowMany, ASTNode ast)
-    throws SemanticException {
+      boolean throwIfNonExistent, boolean allowMany, ASTNode ast)
+      throws SemanticException {
     Table tab;
     try {
       tab = db.getTable(tblName);
@@ -2390,19 +2403,19 @@ public class DDLSemanticAnalyzer extends
         parts = new ArrayList<Partition>();
         try {
           Partition p = db.getPartition(tab, partSpec, false);
-          if(p != null) {
+          if (p != null) {
             parts.add(p);
           }
-        } catch(HiveException e) {
+        } catch (HiveException e) {
           LOG.debug("Wrong specification");
         }
       }
       if (parts.isEmpty()) {
-        if(throwIfNonExistent) {
+        if (throwIfNonExistent) {
           throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(index)));
         }
       }
-      for(Partition p: parts) {
+      for (Partition p : parts) {
         outputs.add(new WriteEntity(p));
       }
     }
@@ -2414,8 +2427,8 @@ public class DDLSemanticAnalyzer extends
    * throwIfNonExistent is true, otherwise ignore it.
    */
   private void addTableDropPartsOutputs(String tblName, List<PartitionSpec> partSpecs,
-            boolean throwIfNonExistent, boolean stringPartitionColumns)
-    throws SemanticException {
+      boolean throwIfNonExistent, boolean stringPartitionColumns)
+      throws SemanticException {
     Table tab;
     try {
       tab = db.getTable(tblName);
@@ -2432,25 +2445,294 @@ public class DDLSemanticAnalyzer extends
         try {
           parts = db.getPartitionsByFilter(tab, partSpec.toString());
         } catch (Exception e) {
-            throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
+          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
         }
       }
       else {
         try {
           parts = db.getPartitions(tab, partSpec.getPartSpecWithoutOperator());
         } catch (Exception e) {
-            throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
+          throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
         }
       }
 
       if (parts.isEmpty()) {
-        if(throwIfNonExistent) {
+        if (throwIfNonExistent) {
           throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
         }
       }
-      for(Partition p: parts) {
+      for (Partition p : parts) {
         outputs.add(new WriteEntity(p));
       }
     }
   }
+
+  /**
+   * Analyze alter table's skewed table
+   *
+   * @param ast
+   *          node
+   * @throws SemanticException
+   */
+  private void analyzeAltertableSkewedby(ASTNode ast) throws SemanticException {
+    /**
+     * Throw an error if the user tries to use the DDL with
+     * hive.internal.ddl.list.bucketing.enable set to false.
+     */
+    HiveConf hiveConf = SessionState.get().getConf();
+    if (!(hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_INTERNAL_DDL_LIST_BUCKETING_ENABLE))) {
+      throw new SemanticException(ErrorMsg.HIVE_INTERNAL_DDL_LIST_BUCKETING_DISABLED.getMsg());
+    }
+
+    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
+    Table tab = null;
+
+    try {
+      tab = db.getTable(db.getCurrentDatabase(), tableName, true);
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+    inputs.add(new ReadEntity(tab));
+    outputs.add(new WriteEntity(tab));
+
+    validateAlterTableType(tab, AlterTableTypes.ADDSKEWEDBY);
+
+    if (ast.getChildCount() == 1) {
+      /* Convert a skewed table to non-skewed table. */
+      AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, true,
+          new ArrayList<String>(), new ArrayList<List<String>>());
+      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+          alterTblDesc), conf));
+    } else {
+      List<String> skewedColNames = new ArrayList<String>();
+      List<List<String>> skewedValues = new ArrayList<List<String>>();
+      /* skewed column names. */
+      ASTNode skewedNode = (ASTNode) ast.getChild(1);
+      skewedColNames = analyzeAlterTableSkewedColNames(skewedColNames, skewedNode);
+      /* skewed value. */
+      Tree vNode = skewedNode.getChild(1);
+      if (vNode == null) {
+        throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+      } else {
+        ASTNode vAstNode = (ASTNode) vNode;
+        switch (vAstNode.getToken().getType()) {
+        case HiveParser.TOK_TABCOLVALUE:
+          for (String str : getColumnValues(vAstNode)) {
+            List<String> sList = new ArrayList<String>(Arrays.asList(str));
+            skewedValues.add(sList);
+          }
+          break;
+        case HiveParser.TOK_TABCOLVALUE_PAIR:
+          List<Node> vLNodes = vAstNode.getChildren();
+          for (Node node : vLNodes) {
+            if (((ASTNode) node).getToken().getType() != HiveParser.TOK_TABCOLVALUES) {
+              throw new SemanticException(
+                  ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+            } else {
+              Tree leafVNode = ((ASTNode) node).getChild(0);
+              if (leafVNode == null) {
+                throw new SemanticException(
+                    ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+              } else {
+                ASTNode lVAstNode = (ASTNode) leafVNode;
+                if (lVAstNode.getToken().getType() != HiveParser.TOK_TABCOLVALUE) {
+                  throw new SemanticException(
+                      ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+                } else {
+                  skewedValues.add(new ArrayList<String>(getColumnValues(lVAstNode)));
+                }
+              }
+            }
+          }
+          break;
+        default:
+          break;
+        }
+      }
+
+      AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, false,
+          skewedColNames, skewedValues);
+      /**
+       * Validate information about skewed table
+       */
+      alterTblDesc.setTable(tab);
+      alterTblDesc.validate();
+      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+          alterTblDesc), conf));
+    }
+  }
+
+  /**
+   * Analyze skewed column names
+   *
+   * @param skewedColNames
+   * @param child
+   * @return
+   * @throws SemanticException
+   */
+  private List<String> analyzeAlterTableSkewedColNames(List<String> skewedColNames,
+      ASTNode child) throws SemanticException {
+    Tree nNode = child.getChild(0);
+    if (nNode == null) {
+      throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg());
+    } else {
+      ASTNode nAstNode = (ASTNode) nNode;
+      if (nAstNode.getToken().getType() != HiveParser.TOK_TABCOLNAME) {
+        throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg());
+      } else {
+        skewedColNames = getColumnNames(nAstNode);
+      }
+    }
+    return skewedColNames;
+  }
+
+  /**
+   * Given a ASTNode, return list of values.
+   *
+   * use case:
+   * create table xyz list bucketed (col1) with skew (1,2,5)
+   * AST Node is for (1,2,5)
+   *
+   * @param ast
+   * @return
+   */
+  private List<String> getColumnValues(ASTNode ast) {
+    List<String> colList = new ArrayList<String>();
+    int numCh = ast.getChildCount();
+    for (int i = 0; i < numCh; i++) {
+      ASTNode child = (ASTNode) ast.getChild(i);
+      colList.add(stripQuotes(child.getText()).toLowerCase());
+    }
+    return colList;
+  }
+
+
+  /**
+   * Analyze alter table's skewed location
+   *
+   * @param ast
+   * @param tableName
+   * @param partSpec
+   * @throws SemanticException
+   */
+  private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName,
+      HashMap<String, String> partSpec) throws SemanticException {
+    /**
+     * Throw an error if the user tries to use the DDL with
+     * hive.internal.ddl.list.bucketing.enable set to false.
+     */
+    HiveConf hiveConf = SessionState.get().getConf();
+    if (!(hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_INTERNAL_DDL_LIST_BUCKETING_ENABLE))) {
+      throw new SemanticException(ErrorMsg.HIVE_INTERNAL_DDL_LIST_BUCKETING_DISABLED.getMsg());
+    }
+    /**
+     * Retrieve mappings from parser
+     */
+    Map<List<String>, String> locations = new HashMap<List<String>, String>();
+    ArrayList<Node> locNodes = ast.getChildren();
+    if (null == locNodes) {
+      throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
+    } else {
+      for (Node locNode : locNodes) {
+        // TOK_SKEWED_LOCATIONS
+        ASTNode locAstNode = (ASTNode) locNode;
+        ArrayList<Node> locListNodes = locAstNode.getChildren();
+        if (null == locListNodes) {
+          throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
+        } else {
+          for (Node locListNode : locListNodes) {
+            // TOK_SKEWED_LOCATION_LIST
+            ASTNode locListAstNode = (ASTNode) locListNode;
+            ArrayList<Node> locMapNodes = locListAstNode.getChildren();
+            if (null == locMapNodes) {
+              throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
+            } else {
+              for (Node locMapNode : locMapNodes) {
+                // TOK_SKEWED_LOCATION_MAP
+                ASTNode locMapAstNode = (ASTNode) locMapNode;
+                ArrayList<Node> locMapAstNodeMaps = locMapAstNode.getChildren();
+                if ((null == locMapAstNodeMaps) || (locMapAstNodeMaps.size() != 2)) {
+                  throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg());
+                } else {
+                  List<String> keyList = new LinkedList<String>();
+                  ASTNode node = (ASTNode) locMapAstNodeMaps.get(0);
+                  if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) {
+                    keyList = getSkewedValuesFromASTNode(node);
+                  } else if (isConstant(node)) {
+                    keyList.add(PlanUtils
+                        .stripQuotes(node.getText()));
+                  } else {
+                    throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+                  }
+                  String newLocation = PlanUtils
+                      .stripQuotes(unescapeSQLString(((ASTNode) locMapAstNodeMaps.get(1))
+                          .getText()));
+                  validateSkewedLocationString(newLocation);
+                  locations.put(keyList, newLocation);
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, locations, partSpec);
+    addInputsOutputsAlterTable(tableName, partSpec);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+        alterTblDesc), conf));
+  }
+
+  /**
+   * Check if the node is constant.
+   *
+   * @param node
+   * @return
+   */
+  private boolean isConstant(ASTNode node) {
+    boolean result = false;
+    switch(node.getToken().getType()) {
+      case HiveParser.Number:
+        result = true;
+        break;
+      case HiveParser.StringLiteral:
+        result = true;
+        break;
+      case HiveParser.BigintLiteral:
+        result = true;
+        break;
+      case HiveParser.SmallintLiteral:
+        result = true;
+        break;
+      case HiveParser.TinyintLiteral:
+        result = true;
+        break;
+      case HiveParser.CharSetName:
+        result = true;
+        break;
+      case HiveParser.KW_TRUE:
+      case HiveParser.KW_FALSE:
+        result = true;
+        break;
+      default:
+          break;
+    }
+    return result;
+  }
+
+  private void validateSkewedLocationString(String newLocation) throws SemanticException {
+    /* Validate location string. */
+    try {
+      URI locUri = new URI(newLocation);
+      if (!locUri.isAbsolute() || locUri.getScheme() == null
+          || locUri.getScheme().trim().equals("")) {
+        throw new SemanticException(
+            newLocation
+                + " is not absolute or has no scheme information. "
+                + "Please specify a complete absolute uri with scheme information.");
+      }
+    } catch (URISyntaxException e) {
+      throw new SemanticException(e);
+    }
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Fri Nov  2 11:20:26 2012
@@ -266,6 +266,11 @@ TOK_TABLESKEWED;
 TOK_TABCOLVALUE;
 TOK_TABCOLVALUE_PAIR;
 TOK_TABCOLVALUES;
+TOK_ALTERTABLE_SKEWED;
+TOK_ALTERTBLPART_SKEWED_LOCATION;
+TOK_SKEWED_LOCATIONS;
+TOK_SKEWED_LOCATION_LIST;
+TOK_SKEWED_LOCATION_MAP;
 }
 
 
@@ -589,6 +594,7 @@ alterTableStatementSuffix
     | alterStatementSuffixProperties
     | alterTblPartitionStatement
     | alterStatementSuffixClusterbySortby
+    | alterStatementSuffixSkewedby
     ;
 
 alterViewStatementSuffix
@@ -748,6 +754,7 @@ alterTblPartitionStatementSuffix
   | alterStatementSuffixMergeFiles
   | alterStatementSuffixSerdeProperties
   | alterStatementSuffixRenamePart
+  | alterTblPartitionStatementSuffixSkewedLocation
   ;
 
 alterStatementSuffixFileFormat
@@ -757,6 +764,34 @@ alterStatementSuffixFileFormat
 	-> ^(TOK_ALTERTABLE_FILEFORMAT fileFormat)
 	;
 
+alterTblPartitionStatementSuffixSkewedLocation
+@init {msgs.push("alter partition skewed location");}
+@after {msgs.pop();}
+  : KW_SET KW_SKEWED KW_LOCATION skewedLocations
+  -> ^(TOK_ALTERTBLPART_SKEWED_LOCATION skewedLocations)
+  ;
+  
+skewedLocations
+@init { msgs.push("skewed locations"); }
+@after { msgs.pop(); }
+    :
+      LPAREN skewedLocationsList RPAREN -> ^(TOK_SKEWED_LOCATIONS skewedLocationsList)
+    ;
+
+skewedLocationsList
+@init { msgs.push("skewed locations list"); }
+@after { msgs.pop(); }
+    :
+      skewedLocationMap (COMMA skewedLocationMap)* -> ^(TOK_SKEWED_LOCATION_LIST skewedLocationMap+)
+    ;
+
+skewedLocationMap
+@init { msgs.push("specifying skewed location map"); }
+@after { msgs.pop(); }
+    :
+      key=skewedValueLocationElement EQUAL value=StringLiteral -> ^(TOK_SKEWED_LOCATION_MAP $key $value)
+    ;
+
 alterStatementSuffixLocation
 @init {msgs.push("alter location");}
 @after {msgs.pop();}
@@ -764,6 +799,17 @@ alterStatementSuffixLocation
   -> ^(TOK_ALTERTABLE_LOCATION $newLoc)
   ;
 
+	
+alterStatementSuffixSkewedby
+@init {msgs.push("alter skewed by statement");}
+@after{msgs.pop();}
+	:name=Identifier tableSkewed
+	->^(TOK_ALTERTABLE_SKEWED $name tableSkewed)
+	|
+	name=Identifier KW_NOT KW_SKEWED
+	->^(TOK_ALTERTABLE_SKEWED $name)
+	;
+
 alterStatementSuffixProtectMode
 @init { msgs.push("alter partition protect mode statement"); }
 @after { msgs.pop(); }
@@ -1294,6 +1340,14 @@ skewedColumnValue
       constant
     ;
 
+skewedValueLocationElement
+@init { msgs.push("skewed value location element"); }
+@after { msgs.pop(); }
+    : 
+      skewedColumnValue
+     | skewedColumnValuePair
+    ;
+    
 columnNameOrder
 @init { msgs.push("column name order"); }
 @after { msgs.pop(); }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Fri Nov  2 11:20:26 2012
@@ -65,7 +65,7 @@ public class ParseContext {
   private HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
   private HashMap<TableScanOperator, PrunedPartitionList> opToPartList;
   private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
-  private Map<TableScanOperator, ExprNodeDesc> opToSkewedPruner;
+  private Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner;
   private HashMap<String, Operator<? extends OperatorDesc>> topOps;
   private HashMap<String, Operator<? extends OperatorDesc>> topSelOps;
   private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx;
@@ -169,7 +169,7 @@ public class ParseContext {
       GlobalLimitCtx globalLimitCtx,
       HashMap<String, SplitSample> nameToSplitSample,
       HashSet<ReadEntity> semanticInputs, List<Task<? extends Serializable>> rootTasks,
-      Map<TableScanOperator, ExprNodeDesc> opToSkewedPruner) {
+      Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner) {
     this.conf = conf;
     this.qb = qb;
     this.ast = ast;
@@ -195,7 +195,7 @@ public class ParseContext {
     this.globalLimitCtx = globalLimitCtx;
     this.semanticInputs = semanticInputs;
     this.rootTasks = rootTasks;
-    this.opToSkewedPruner = opToSkewedPruner;
+    this.opToPartToSkewedPruner = opToPartToSkewedPruner;
   }
 
   /**
@@ -563,17 +563,19 @@ public class ParseContext {
   }
 
   /**
-   * @return the opToSkewedPruner
+   * @return the opToPartToSkewedPruner
    */
-  public Map<TableScanOperator, ExprNodeDesc> getOpToSkewedPruner() {
-    return opToSkewedPruner;
+  public Map<TableScanOperator, Map<String, ExprNodeDesc>> getOpToPartToSkewedPruner() {
+    return opToPartToSkewedPruner;
   }
 
   /**
-   * @param opToSkewedPruner the opToSkewedPruner to set
+   * @param opToPartToSkewedPruner
+   *          the opToSkewedPruner to set
    */
-  public void setOpToSkewedPruner(HashMap<TableScanOperator, ExprNodeDesc> opToSkewedPruner) {
-    this.opToSkewedPruner = opToSkewedPruner;
+  public void setOpPartToSkewedPruner(
+      HashMap<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner) {
+    this.opToPartToSkewedPruner = opToPartToSkewedPruner;
   }
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Nov  2 11:20:26 2012
@@ -197,7 +197,7 @@ public class SemanticAnalyzer extends Ba
   private UnionProcContext uCtx;
   List<AbstractMapJoinOperator<? extends MapJoinDesc>> listMapJoinOpsNoReducer;
   private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
-  private final Map<TableScanOperator, ExprNodeDesc> opToSkewedPruner;
+  private final Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner;
   /**
    * a map for the split sampling, from ailias to an instance of SplitSample
    * that describes percentage and number.
@@ -251,7 +251,7 @@ public class SemanticAnalyzer extends Ba
     autogenColAliasPrfxIncludeFuncName = HiveConf.getBoolVar(conf,
                          HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME);
     queryProperties = new QueryProperties();
-    opToSkewedPruner = new HashMap<TableScanOperator, ExprNodeDesc>();
+    opToPartToSkewedPruner = new HashMap<TableScanOperator, Map<String, ExprNodeDesc>>();
   }
 
   @Override
@@ -299,7 +299,8 @@ public class SemanticAnalyzer extends Ba
         topSelOps, opParseCtx, joinContext, topToTable, loadTableWork,
         loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
         listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
-        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToSkewedPruner);
+        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
+        opToPartToSkewedPruner);
   }
 
   @SuppressWarnings("nls")
@@ -8019,7 +8020,8 @@ public class SemanticAnalyzer extends Ba
         opToPartList, topOps, topSelOps, opParseCtx, joinContext, topToTable,
         loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
         listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
-        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToSkewedPruner);
+        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
+        opToPartToSkewedPruner);
 
     // Generate table access stats if required
     if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_TABLEKEYS) == true) {
@@ -8581,43 +8583,9 @@ public class SemanticAnalyzer extends Ba
         // skewed value
         Tree vNode = child.getChild(1);
         if (vNode == null) {
-          throw new SemanticException(ErrorMsg.CREATE_SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+          throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
         } else {
-          ASTNode vAstNode = (ASTNode) vNode;
-          switch (vAstNode.getToken().getType()) {
-            case HiveParser.TOK_TABCOLVALUE:
-              for (String str : getSkewedColumnValuesFromASTNode(vAstNode)) {
-                List<String> sList = new ArrayList<String>(Arrays.asList(str));
-                skewedValues.add(sList);
-              }
-              break;
-            case HiveParser.TOK_TABCOLVALUE_PAIR:
-              ArrayList<Node> vLNodes = vAstNode.getChildren();
-              for (Node node : vLNodes) {
-                if ( ((ASTNode) node).getToken().getType() != HiveParser.TOK_TABCOLVALUES) {
-                  throw new SemanticException(
-                      ErrorMsg.CREATE_SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
-                } else {
-                  Tree leafVNode = ((ASTNode) node).getChild(0);
-                  if (leafVNode == null) {
-                    throw new SemanticException(
-                        ErrorMsg.CREATE_SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
-                  } else {
-                    ASTNode lVAstNode = (ASTNode) leafVNode;
-                    if (lVAstNode.getToken().getType() != HiveParser.TOK_TABCOLVALUE) {
-                      throw new SemanticException(
-                          ErrorMsg.CREATE_SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
-                    } else {
-                      skewedValues.add(new ArrayList<String>(
-                          getSkewedColumnValuesFromASTNode(lVAstNode)));
-                    }
-                  }
-                }
-              }
-              break;
-            default:
-              break;
-          }
+          analyzeDDLSkewedValues(skewedValues, vNode);
         }
         break;
       default:
@@ -8710,6 +8678,41 @@ public class SemanticAnalyzer extends Ba
   }
 
   /**
+   * Handle skewed values in DDL.
+   *
+   * It can be used by both skewed by ... on () and set skewed location ().
+   *
+   * @param skewedValues
+   * @param vNode
+   * @throws SemanticException
+   */
+  private void analyzeDDLSkewedValues(List<List<String>> skewedValues, Tree vNode)
+      throws SemanticException {
+    ASTNode vAstNode = (ASTNode) vNode;
+    switch (vAstNode.getToken().getType()) {
+      case HiveParser.TOK_TABCOLVALUE:
+        for (String str : getSkewedValueFromASTNode(vAstNode)) {
+          List<String> sList = new ArrayList<String>(Arrays.asList(str));
+          skewedValues.add(sList);
+        }
+        break;
+      case HiveParser.TOK_TABCOLVALUE_PAIR:
+        ArrayList<Node> vLNodes = vAstNode.getChildren();
+        for (Node node : vLNodes) {
+          if ( ((ASTNode) node).getToken().getType() != HiveParser.TOK_TABCOLVALUES) {
+            throw new SemanticException(
+                ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
+          } else {
+            skewedValues.add(getSkewedValuesFromASTNode(node));
+          }
+        }
+        break;
+      default:
+        break;
+    }
+  }
+
+  /**
    * Analyze list bucket column names
    *
    * @param skewedColNames
@@ -8721,11 +8724,11 @@ public class SemanticAnalyzer extends Ba
       ASTNode child) throws SemanticException {
     Tree nNode = child.getChild(0);
     if (nNode == null) {
-      throw new SemanticException(ErrorMsg.CREATE_SKEWED_TABLE_NO_COLUMN_NAME.getMsg());
+      throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg());
     } else {
       ASTNode nAstNode = (ASTNode) nNode;
       if (nAstNode.getToken().getType() != HiveParser.TOK_TABCOLNAME) {
-        throw new SemanticException(ErrorMsg.CREATE_SKEWED_TABLE_NO_COLUMN_NAME.getMsg());
+        throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_NAME.getMsg());
       } else {
         skewedColNames = getColumnNames(nAstNode);
       }
@@ -8733,26 +8736,6 @@ public class SemanticAnalyzer extends Ba
     return skewedColNames;
   }
 
-  /**
-   * Given a ASTNode, return list of values.
-   *
-   * use case:
-   *   create table xyz list bucketed (col1) with skew (1,2,5)
-   *   AST Node is for (1,2,5)
-   * @param ast
-   * @return
-   */
-  protected List<String> getSkewedColumnValuesFromASTNode(ASTNode ast) {
-    List<String> colList = new ArrayList<String>();
-    int numCh = ast.getChildCount();
-    for (int i = 0; i < numCh; i++) {
-      ASTNode child = (ASTNode) ast.getChild(i);
-      colList.add(stripQuotes(child.getText()).toLowerCase());
-    }
-    return colList;
-  }
-
-
   private ASTNode analyzeCreateView(ASTNode ast, QB qb)
       throws SemanticException {
     String tableName = getUnescapedName((ASTNode)ast.getChild(0));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Fri Nov  2 11:20:26 2012
@@ -91,6 +91,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_SHOW_ROLE_GRANT, HiveOperation.SHOW_ROLE_GRANT);
     commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, HiveOperation.ALTERDATABASE);
     commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE);
+    commandType.put(HiveParser.TOK_ALTERTABLE_SKEWED, HiveOperation.ALTERTABLE_SKEWED);
   }
 
   static {
@@ -115,6 +116,9 @@ public final class SemanticAnalyzerFacto
             HiveOperation.ALTERPARTITION_SERDEPROPERTIES });
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_RENAMEPART,
         new HiveOperation[] {null, HiveOperation.ALTERTABLE_RENAMEPART});
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTBLPART_SKEWED_LOCATION,
+        new HiveOperation[] {HiveOperation.ALTERTBLPART_SKEWED_LOCATION,
+            HiveOperation.ALTERTBLPART_SKEWED_LOCATION });
   }
 
   public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)
@@ -184,6 +188,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_REVOKE_ROLE:
       case HiveParser.TOK_SHOW_ROLE_GRANT:
       case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
+      case HiveParser.TOK_ALTERTABLE_SKEWED:
         return new DDLSemanticAnalyzer(conf);
       case HiveParser.TOK_ALTERTABLE_PARTITION:
         HiveOperation commandType = null;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Fri Nov  2 11:20:26 2012
@@ -444,9 +444,11 @@ public final class TypeCheckProcFactory 
           }
         } else {
           // It's a column.
-          return new ExprNodeColumnDesc(colInfo.getType(), colInfo
+          ExprNodeColumnDesc exprNodColDesc = new ExprNodeColumnDesc(colInfo.getType(), colInfo
               .getInternalName(), colInfo.getTabAlias(), colInfo
               .getIsVirtualCol());
+          exprNodColDesc.setSkewedCol(colInfo.isSkewedCol());
+          return exprNodColDesc;
         }
       }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java Fri Nov  2 11:20:26 2012
@@ -22,10 +22,14 @@ import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * AlterTableDesc.
@@ -43,7 +47,7 @@ public class AlterTableDesc extends DDLD
     RENAME, ADDCOLS, REPLACECOLS, ADDPROPS, ADDSERDE, ADDSERDEPROPS,
     ADDFILEFORMAT, ADDCLUSTERSORTCOLUMN, RENAMECOLUMN, ADDPARTITION,
     TOUCH, ARCHIVE, UNARCHIVE, ALTERPROTECTMODE, ALTERPARTITIONPROTECTMODE,
-    ALTERLOCATION, DROPPARTITION, RENAMEPARTITION
+    ALTERLOCATION, DROPPARTITION, RENAMEPARTITION, ADDSKEWEDBY, ALTERSKEWEDLOCATION
   };
 
   public static enum ProtectModeType {
@@ -75,6 +79,11 @@ public class AlterTableDesc extends DDLD
   private String newLocation;
   boolean protectModeEnable;
   ProtectModeType protectModeType;
+  Map<List<String>, String> skewedLocations;
+  boolean turnOffSkewed = false;
+  List<String> skewedColNames;
+  List<List<String>> skewedColValues;
+  Table table;
 
   public AlterTableDesc() {
   }
@@ -184,6 +193,23 @@ public class AlterTableDesc extends DDLD
     this.partSpec = partSpec;
   }
 
+  public AlterTableDesc(String tableName, Map<List<String>, String> locations,
+      HashMap<String, String> partSpec) {
+    op = AlterTableTypes.ALTERSKEWEDLOCATION;
+    this.oldName = tableName;
+    this.skewedLocations = locations;
+    this.partSpec = partSpec;
+  }
+
+  public AlterTableDesc(String tableName, boolean turnOffSkewed,
+      List<String> skewedColNames, List<List<String>> skewedColValues) {
+    oldName = tableName;
+    op = AlterTableTypes.ADDSKEWEDBY;
+    this.turnOffSkewed = turnOffSkewed;
+    this.skewedColNames = new ArrayList<String>(skewedColNames);
+    this.skewedColValues = new ArrayList<List<String>>(skewedColValues);
+  }
+
   @Explain(displayName = "new columns")
   public List<String> getNewColsString() {
     return Utilities.getFieldSchemaString(getNewCols());
@@ -538,4 +564,88 @@ public class AlterTableDesc extends DDLD
   public void setProtectModeType(ProtectModeType protectModeType) {
     this.protectModeType = protectModeType;
   }
+  /**
+   * @return the skewedLocations
+   */
+  public Map<List<String>, String> getSkewedLocations() {
+    return skewedLocations;
+  }
+
+  /**
+   * @param skewedLocations the skewedLocations to set
+   */
+  public void setSkewedLocations(Map<List<String>, String> skewedLocations) {
+    this.skewedLocations = skewedLocations;
+  }
+
+  /**
+   * @return the turnOffSkewed
+   */
+  public boolean isTurnOffSkewed() {
+    return turnOffSkewed;
+  }
+
+  /**
+   * @param turnOffSkewed the turnOffSkewed to set
+   */
+  public void setTurnOffSkewed(boolean turnOffSkewed) {
+    this.turnOffSkewed = turnOffSkewed;
+  }
+
+  /**
+   * @return the skewedColNames
+   */
+  public List<String> getSkewedColNames() {
+    return skewedColNames;
+  }
+
+  /**
+   * @param skewedColNames the skewedColNames to set
+   */
+  public void setSkewedColNames(List<String> skewedColNames) {
+    this.skewedColNames = skewedColNames;
+  }
+
+  /**
+   * @return the skewedColValues
+   */
+  public List<List<String>> getSkewedColValues() {
+    return skewedColValues;
+  }
+
+  /**
+   * @param skewedColValues the skewedColValues to set
+   */
+  public void setSkewedColValues(List<List<String>> skewedColValues) {
+    this.skewedColValues = skewedColValues;
+  }
+
+  /**
+   * Validate alter table description.
+   *
+   * @throws SemanticException
+   */
+  public void validate() throws SemanticException {
+    if (null != table) {
+      /* Validate skewed information. */
+      ValidationUtility.validateSkewedInformation(
+          ParseUtils.validateColumnNameUniqueness(table.getCols()), this.getSkewedColNames(),
+          this.getSkewedColValues());
+    }
+  }
+
+  /**
+   * @return the table
+   */
+  public Table getTable() {
+    return table;
+  }
+
+  /**
+   * @param table the table to set
+   */
+  public void setTable(Table table) {
+    this.table = table;
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java Fri Nov  2 11:20:26 2012
@@ -20,11 +20,9 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.common.JavaUtils;
@@ -480,98 +478,8 @@ public class CreateTableDesc extends DDL
       }
     }
 
-    validateSkewedInformation(colNames);
-  }
-
-
-  /**
-   * Validate skewed table creation
-
-   * @param colNames
-   * @throws SemanticException
-   */
-  private void validateSkewedInformation(List<String> colNames)
-      throws SemanticException {
-    if (this.getSkewedColNames().size() > 0) {
-      /**
-       * all columns in skewed column name are valid columns
-       */
-      validateSkewedColNames(colNames);
-
-      /**
-       * find out duplicate skewed column name
-       */
-      validateSkewedColumnNameUniqueness(this.getSkewedColNames());
-
-      if (this.getSkewedColValues() == null || this.getSkewedColValues().size() == 0) {
-        /**
-         * skewed column value is empty but skewed col name is not empty. something is wrong
-         */
-        throw new SemanticException(
-            ErrorMsg.CREATE_SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_2.getMsg());
-
-      } else {
-        /**
-         * each skewed col value should have the same number as number of skewed column names
-         */
-        validateSkewedColNameValueNumberMatch();
-
-      }
-    } else if (this.getSkewedColValues().size() > 0) {
-      /**
-       * skewed column name is empty but skewed col value is not empty. something is wrong
-       */
-      throw new SemanticException(
-          ErrorMsg.CREATE_SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_1.getMsg());
-    }
-  }
-
-  private void validateSkewedColNameValueNumberMatch()
-      throws SemanticException {
-    for (List<String> colValue : this.getSkewedColValues()) {
-      if (colValue.size() != this.getSkewedColNames().size()) {
-        throw new SemanticException(
-            ErrorMsg.CREATE_SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_3.getMsg()
-                + this.getSkewedColNames().size() + " : "
-                + colValue.size());
-      }
-    }
-  }
-
-  private void validateSkewedColNames(List<String> colNames)
-      throws SemanticException {
-    // make a copy
-    List<String> copySkewedColNames = new ArrayList<String>(this.getSkewedColNames());
-    // remove valid columns
-    copySkewedColNames.removeAll(colNames);
-    if (copySkewedColNames.size() > 0) {
-      StringBuilder invalidColNames = new StringBuilder();
-      for (String name : copySkewedColNames) {
-        invalidColNames.append(name);
-        invalidColNames.append(" ");
-      }
-      throw new SemanticException(
-          ErrorMsg.CREATE_SKEWED_TABLE_INVALID_COLUMN.getMsg(invalidColNames.toString()));
-    }
-  }
-
-
-  /**
-   * Find out duplicate name
-   * @param names
-   * @throws SemanticException
-   */
-  private void validateSkewedColumnNameUniqueness(
-      List<String> names) throws SemanticException {
-
-    Set<String> lookup = new HashSet<String>();
-    for (String name : names) {
-      if (lookup.contains(name)) {
-        throw new SemanticException(ErrorMsg.CREATE_SKEWED_TABLE_DUPLICATE_COLUMN_NAMES
-            .getMsg(name));
-      } else {
-        lookup.add(name);
-      }
-    }
+    /* Validate skewed information. */
+    ValidationUtility.validateSkewedInformation(colNames, this.getSkewedColNames(),
+        this.getSkewedColValues());
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1404924&r1=1404923&r2=1404924&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Fri Nov  2 11:20:26 2012
@@ -91,6 +91,9 @@ public enum HiveOperation {
   DESCDATABASE("DESCDATABASE", null, null),
   ALTERTABLE_MERGEFILES("ALTER_TABLE_MERGE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }),
   ALTERPARTITION_MERGEFILES("ALTER_PARTITION_MERGE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }),
+  ALTERTABLE_SKEWED("ALTERTABLE_SKEWED", new Privilege[] {Privilege.ALTER_METADATA}, null),
+  ALTERTBLPART_SKEWED_LOCATION("ALTERTBLPART_SKEWED_LOCATION",
+      new Privilege[] {Privilege.ALTER_DATA}, null),
   ;
 
   private String operationName;

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ValidationUtility.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ValidationUtility.java?rev=1404924&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ValidationUtility.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ValidationUtility.java Fri Nov  2 11:20:26 2012
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.plan;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Common utilities for validation.
+ *
+ * ValidationUtility.
+ *
+ */
+public final class ValidationUtility {
+
+  /**
+   * Utility class. No instance needs.
+   */
+  private ValidationUtility () {
+
+  }
+
+  /**
+   * Validate skewed table information.
+   * @param colNames column names
+   * @param skewedColNames skewed column names
+   * @param skewedColValues skewed column values
+   * @throws SemanticException
+   */
+  public static void validateSkewedInformation(List<String> colNames, List<String> skewedColNames,
+      List<List<String>> skewedColValues) throws SemanticException {
+   if (skewedColNames.size() > 0) {
+      /**
+       * all columns in skewed column name are valid columns
+       */
+      validateSkewedColNames(colNames, skewedColNames);
+
+      /**
+       * find out duplicate skewed column name
+       */
+      validateSkewedColumnNameUniqueness(skewedColNames);
+
+      if ((skewedColValues == null) || (skewedColValues.size() == 0)) {
+        /**
+         * skewed column value is empty but skewed col name is not empty. something is wrong
+         */
+        throw new SemanticException(
+            ErrorMsg.SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_2.getMsg());
+      } else {
+        /**
+         * each skewed col value should have the same number as number of skewed column names
+         */
+        validateSkewedColNameValueNumberMatch(skewedColNames, skewedColValues);
+      }
+    } else if (skewedColValues.size() > 0) {
+      /**
+       * skewed column name is empty but skewed col value is not empty. something is wrong
+       */
+      throw new SemanticException(
+          ErrorMsg.SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_1.getMsg());
+    }
+  }
+
+  /**
+   * Skewed column name and value should match.
+   *
+   * @param skewedColNames
+   * @param skewedColValues
+   * @throws SemanticException
+   */
+  public static void validateSkewedColNameValueNumberMatch(List<String> skewedColNames,
+      List<List<String>> skewedColValues) throws SemanticException {
+    for (List<String> colValue : skewedColValues) {
+      if (colValue.size() != skewedColNames.size()) {
+        throw new SemanticException(
+            ErrorMsg.SKEWED_TABLE_SKEWED_COL_NAME_VALUE_MISMATCH_3.getMsg()
+                + skewedColNames.size() + " : "
+                + colValue.size());
+      }
+    }
+  }
+
+  /**
+   * Skewed column name should be a valid column defined.
+   *
+   * @param colNames
+   * @param skewedColNames
+   * @throws SemanticException
+   */
+  public static void validateSkewedColNames(List<String> colNames, List<String> skewedColNames)
+      throws SemanticException {
+    // make a copy
+    List<String> copySkewedColNames = new ArrayList<String>(skewedColNames);
+    // remove valid columns
+    copySkewedColNames.removeAll(colNames);
+    if (copySkewedColNames.size() > 0) {
+      StringBuilder invalidColNames = new StringBuilder();
+      for (String name : copySkewedColNames) {
+        invalidColNames.append(name);
+        invalidColNames.append(" ");
+      }
+      throw new SemanticException(
+          ErrorMsg.SKEWED_TABLE_INVALID_COLUMN.getMsg(invalidColNames.toString()));
+    }
+  }
+
+  /**
+   * Find out duplicate name.
+   *
+   * @param names
+   * @throws SemanticException
+   */
+  public static void validateSkewedColumnNameUniqueness(List<String> names)
+      throws SemanticException {
+    Set<String> lookup = new HashSet<String>();
+    for (String name : names) {
+      if (lookup.contains(name)) {
+        throw new SemanticException(ErrorMsg.SKEWED_TABLE_DUPLICATE_COLUMN_NAMES
+            .getMsg(name));
+      } else {
+        lookup.add(name);
+      }
+    }
+  }
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/DynamicMultiDimeCollectionTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/DynamicMultiDimeCollectionTest.java?rev=1404924&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/DynamicMultiDimeCollectionTest.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/DynamicMultiDimeCollectionTest.java Fri Nov  2 11:20:26 2012
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.optimizer.listbucketingpruner;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ *
+ * Test {@link DynamicMultiDimeContainer}
+ *
+ */
+public class DynamicMultiDimeCollectionTest extends TestCase {
+  private static String DEF_DIR = "default";
+
+  @Test
+  public void testUniqueElementsList1() {
+    // (1,a,x), (2,b,x), (1,c,x), (2,a,y)
+    List<List<String>> values = new ArrayList<List<String>>();
+    List<String> v1 = Arrays.asList("1", "a", "x");
+    List<String> v2 = Arrays.asList("2", "b", "x");
+    List<String> v3 = Arrays.asList("1", "c", "x");
+    List<String> v4 = Arrays.asList("2", "a", "y");
+    values.add(v1);
+    values.add(v2);
+    values.add(v3);
+    values.add(v4);
+    List<List<String>> actuals = ListBucketingPruner.DynamicMultiDimensionalCollection
+        .uniqueElementsList(values, DEF_DIR);
+
+    List<List<String>> expecteds = new ArrayList<List<String>>();
+    v1 = Arrays.asList("1", "2", "default");
+    v2 = Arrays.asList("a", "b", "c", "default");
+    v3 = Arrays.asList("x", "y", "default");
+    expecteds.add(v1);
+    expecteds.add(v2);
+    expecteds.add(v3);
+
+    Assert.assertEquals(expecteds, actuals);
+  }
+
+  @Test
+  public void testUniqueElementsList2() {
+    // (1,a,x), (2,b,x), (1,c,x), (2,a,y)
+    List<List<String>> values = new ArrayList<List<String>>();
+    List<String> v1 = Arrays.asList("1", "a", "x");
+    values.add(v1);
+    List<List<String>> actuals = ListBucketingPruner.DynamicMultiDimensionalCollection
+        .uniqueElementsList(values, DEF_DIR);
+    List<List<String>> expecteds = new ArrayList<List<String>>();
+    v1 = Arrays.asList("1", "default");
+    List<String> v2 = Arrays.asList("a", "default");
+    List<String> v3 = Arrays.asList("x", "default");
+    expecteds.add(v1);
+    expecteds.add(v2);
+    expecteds.add(v3);
+
+    Assert.assertEquals(expecteds, actuals);
+  }
+
+  @Test
+  public void testUniqueElementsList3() {
+    // (1,a,x), (2,b,x), (1,c,x), (2,a,y)
+    List<List<String>> values = new ArrayList<List<String>>();
+    List<String> v1 = Arrays.asList("1");
+    List<String> v2 = Arrays.asList("2");
+    List<String> v3 = Arrays.asList("3");
+    List<String> v4 = Arrays.asList("4");
+    values.add(v1);
+    values.add(v2);
+    values.add(v3);
+    values.add(v4);
+    List<List<String>> actuals = ListBucketingPruner.DynamicMultiDimensionalCollection
+        .uniqueElementsList(values, DEF_DIR);
+    List<List<String>> expecteds = new ArrayList<List<String>>();
+    v1 = Arrays.asList("1", "2", "3", "4", "default");
+    expecteds.add(v1);
+
+    Assert.assertEquals(expecteds, actuals);
+  }
+
+  @Test
+  public void testFlat3() throws SemanticException {
+    List<List<String>> uniqSkewedElements = new ArrayList<List<String>>();
+    List<String> v1 = Arrays.asList("1", "2", "default");
+    List<String> v2 = Arrays.asList("a", "b", "c", "default");
+    List<String> v3 = Arrays.asList("x", "y", "default");
+    uniqSkewedElements.add(v1);
+    uniqSkewedElements.add(v2);
+    uniqSkewedElements.add(v3);
+    List<List<String>> actuals = ListBucketingPruner.DynamicMultiDimensionalCollection
+        .flat(uniqSkewedElements);
+    Assert.assertTrue(actuals.size() == 36);
+  }
+
+  @Test
+  public void testFlat2() throws SemanticException {
+    List<List<String>> uniqSkewedElements = new ArrayList<List<String>>();
+    List<String> v1 = Arrays.asList("1", "2");
+    uniqSkewedElements.add(v1);
+    List<List<String>> actual = ListBucketingPruner.DynamicMultiDimensionalCollection
+        .flat(uniqSkewedElements);
+    List<List<String>> expected = new ArrayList<List<String>>();
+    v1 = Arrays.asList("1");
+    List<String> v2 = Arrays.asList("2");
+    expected.add(v1);
+    expected.add(v2);
+    Assert.assertEquals(expected, actual);
+  }
+
+  @Test
+  public void testFlat1() throws SemanticException {
+    List<List<String>> uniqSkewedElements = new ArrayList<List<String>>();
+    List<String> v1 = Arrays.asList("1", "2");
+    List<String> v2 = Arrays.asList("3", "4");
+    uniqSkewedElements.add(v1);
+    uniqSkewedElements.add(v2);
+    List<List<String>> actual = ListBucketingPruner.DynamicMultiDimensionalCollection
+        .flat(uniqSkewedElements);
+    List<List<String>> expected = new ArrayList<List<String>>();
+    v1 = Arrays.asList("1", "3");
+    v2 = Arrays.asList("1", "4");
+    List<String> v3 = Arrays.asList("2", "3");
+    List<String> v4 = Arrays.asList("2", "4");
+    expected.add(v1);
+    expected.add(v2);
+    expected.add(v3);
+    expected.add(v4);
+    Assert.assertEquals(expected, actual);
+  }
+
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPrunnerTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPrunnerTest.java?rev=1404924&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPrunnerTest.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/optimizer/listbucketingpruner/ListBucketingPrunnerTest.java Fri Nov  2 11:20:26 2012
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.optimizer.listbucketingpruner;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.junit.Test;
+
+/**
+ *
+ * Test {@link ListBucketingPruner}
+ *
+ */
+public class ListBucketingPrunnerTest extends TestCase {
+
+  @Test
+  public void testSkipSkewedDirectory1() {
+    Assert.assertFalse(ListBucketingPrunerUtils.skipSkewedDirectory(null)) ;
+  }
+
+  @Test
+  public void testSkipSkewedDirectory2() {
+    Assert.assertTrue(ListBucketingPrunerUtils.skipSkewedDirectory(Boolean.FALSE)) ;
+  }
+
+  @Test
+  public void testSkipSkewedDirectory3() {
+    Assert.assertFalse(ListBucketingPrunerUtils.skipSkewedDirectory(Boolean.TRUE)) ;
+  }
+
+  @Test
+  public void testAndBoolOperand() {
+    /**
+     * Operand one|Operand another | And result
+     */
+    // unknown | T | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.andBoolOperand(null, Boolean.TRUE));
+    // unknown | F | F
+    Assert.assertFalse(ListBucketingPrunerUtils.andBoolOperand(null, Boolean.FALSE));
+    // unknown | unknown | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.andBoolOperand(null, null));
+    // T | T | T
+    Assert.assertTrue(ListBucketingPrunerUtils.andBoolOperand(Boolean.TRUE, Boolean.TRUE));
+    // T | F | F
+    Assert.assertFalse(ListBucketingPrunerUtils.andBoolOperand(Boolean.TRUE, Boolean.FALSE));
+    // T | unknown | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.andBoolOperand(Boolean.TRUE, null));
+    // F | T | F
+    Assert.assertFalse(ListBucketingPrunerUtils.andBoolOperand(Boolean.FALSE, Boolean.TRUE));
+    // F | F | F
+    Assert.assertFalse(ListBucketingPrunerUtils.andBoolOperand(Boolean.FALSE, Boolean.FALSE));
+    // F | unknown | F
+    Assert.assertFalse(ListBucketingPrunerUtils.andBoolOperand(Boolean.FALSE, null));
+  }
+
+  @Test
+  public void testOrBoolOperand() {
+    // Operand one|Operand another | or result
+    // unknown | T | T
+    Assert.assertTrue(ListBucketingPrunerUtils.orBoolOperand(null, Boolean.TRUE));
+    // unknown | F | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.orBoolOperand(null, Boolean.FALSE));
+    // unknown | unknown | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.orBoolOperand(null, Boolean.FALSE));
+    // T | T | T
+    Assert.assertTrue(ListBucketingPrunerUtils.orBoolOperand(Boolean.TRUE, Boolean.TRUE));
+    // T | F | T
+    Assert.assertTrue(ListBucketingPrunerUtils.orBoolOperand(Boolean.TRUE, Boolean.FALSE));
+    // T | unknown | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.orBoolOperand(Boolean.TRUE, null));
+    // F | T | T
+    Assert.assertTrue(ListBucketingPrunerUtils.orBoolOperand(Boolean.FALSE, Boolean.TRUE));
+    // F | F | F
+    Assert.assertFalse(ListBucketingPrunerUtils.orBoolOperand(Boolean.FALSE, Boolean.FALSE));
+    // F | unknown | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.orBoolOperand(Boolean.FALSE, null));
+  }
+
+  @Test
+  public void testNotBoolOperand() {
+    // Operand | Not
+    // T | F
+    Assert.assertFalse(ListBucketingPrunerUtils.notBoolOperand(Boolean.TRUE));
+    // F | T
+    Assert.assertTrue(ListBucketingPrunerUtils.notBoolOperand(Boolean.FALSE));
+    // unknown | unknown
+    Assert.assertNull(ListBucketingPrunerUtils.notBoolOperand(null));
+  }
+}

Added: hive/trunk/ql/src/test/queries/clientnegative/column_change_skewedcol_type1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/column_change_skewedcol_type1.q?rev=1404924&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/column_change_skewedcol_type1.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/column_change_skewedcol_type1.q Fri Nov  2 11:20:26 2012
@@ -0,0 +1,6 @@
+set hive.mapred.supports.subdirectories=true;
+set hive.internal.ddl.list.bucketing.enable=true;
+
+CREATE TABLE skewedtable (key STRING, value STRING) SKEWED BY (key) ON (1,5,6);
+
+ALTER TABLE skewedtable CHANGE key key INT;