You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/02 21:57:07 UTC

svn commit: r1622108 [16/27] - in /hive/branches/tez: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ checkstyle/ common/src/java/...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Tue Sep  2 19:56:56 2014
@@ -126,11 +126,10 @@ TOK_CREATEINDEX;
 TOK_CREATEINDEX_INDEXTBLNAME;
 TOK_DEFERRED_REBUILDINDEX;
 TOK_DROPINDEX;
-TOK_DROPTABLE_PROPERTIES;
 TOK_LIKETABLE;
 TOK_DESCTABLE;
 TOK_DESCFUNCTION;
-TOK_ALTERTABLE_PARTITION;
+TOK_ALTERTABLE;
 TOK_ALTERTABLE_RENAME;
 TOK_ALTERTABLE_ADDCOLS;
 TOK_ALTERTABLE_RENAMECOL;
@@ -152,6 +151,13 @@ TOK_ALTERTABLE_FILEFORMAT;
 TOK_ALTERTABLE_LOCATION;
 TOK_ALTERTABLE_PROPERTIES;
 TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION;
+TOK_ALTERTABLE_DROPPROPERTIES;
+TOK_ALTERTABLE_SKEWED;
+TOK_ALTERTABLE_EXCHANGEPARTITION;
+TOK_ALTERTABLE_SKEWED_LOCATION;
+TOK_ALTERTABLE_BUCKETS;
+TOK_ALTERTABLE_CLUSTER_SORT;
+TOK_ALTERTABLE_COMPACT;
 TOK_ALTERINDEX_REBUILD;
 TOK_ALTERINDEX_PROPERTIES;
 TOK_MSCK;
@@ -177,7 +183,6 @@ TOK_TABCOLLIST;
 TOK_TABCOL;
 TOK_TABLECOMMENT;
 TOK_TABLEPARTCOLS;
-TOK_TABLEBUCKETS;
 TOK_TABLEROWFORMAT;
 TOK_TABLEROWFORMATFIELD;
 TOK_TABLEROWFORMATCOLLITEMS;
@@ -192,7 +197,6 @@ TOK_DISABLE;
 TOK_READONLY;
 TOK_NO_DROP;
 TOK_STORAGEHANDLER;
-TOK_ALTERTABLE_CLUSTER_SORT;
 TOK_NOT_CLUSTERED;
 TOK_NOT_SORTED;
 TOK_TABCOLNAME;
@@ -215,9 +219,9 @@ TOK_DROPMACRO;
 TOK_TEMPORARY;
 TOK_CREATEVIEW;
 TOK_DROPVIEW;
-TOK_ALTERVIEW_AS;
+TOK_ALTERVIEW;
 TOK_ALTERVIEW_PROPERTIES;
-TOK_DROPVIEW_PROPERTIES;
+TOK_ALTERVIEW_DROPPROPERTIES;
 TOK_ALTERVIEW_ADDPARTS;
 TOK_ALTERVIEW_DROPPARTS;
 TOK_ALTERVIEW_RENAME;
@@ -302,8 +306,6 @@ TOK_TABLESKEWED;
 TOK_TABCOLVALUE;
 TOK_TABCOLVALUE_PAIR;
 TOK_TABCOLVALUES;
-TOK_ALTERTABLE_SKEWED;
-TOK_ALTERTBLPART_SKEWED_LOCATION;
 TOK_SKEWED_LOCATIONS;
 TOK_SKEWED_LOCATION_LIST;
 TOK_SKEWED_LOCATION_MAP;
@@ -315,7 +317,6 @@ TOK_WINDOWSPEC;
 TOK_WINDOWVALUES;
 TOK_WINDOWRANGE;
 TOK_IGNOREPROTECTION;
-TOK_EXCHANGEPARTITION;
 TOK_SUBQUERY_EXPR;
 TOK_SUBQUERY_OP;
 TOK_SUBQUERY_OP_NOTIN;
@@ -328,9 +329,17 @@ TOK_FILE;
 TOK_JAR;
 TOK_RESOURCE_URI;
 TOK_RESOURCE_LIST;
-TOK_COMPACT;
 TOK_SHOW_COMPACTIONS;
 TOK_SHOW_TRANSACTIONS;
+TOK_DELETE_FROM;
+TOK_UPDATE_TABLE;
+TOK_SET_COLUMNS_CLAUSE;
+TOK_VALUE_ROW;
+TOK_VALUES_TABLE;
+TOK_VIRTUAL_TABLE;
+TOK_VIRTUAL_TABREF;
+TOK_ANONYMOUS;
+TOK_COL_NAME;
 }
 
 
@@ -469,6 +478,9 @@ import java.util.HashMap;
     xlateMap.put("KW_DEFINED", "DEFINED");
     xlateMap.put("KW_SUBQUERY", "SUBQUERY");
     xlateMap.put("KW_REWRITE", "REWRITE");
+    xlateMap.put("KW_UPDATE", "UPDATE");
+
+    xlateMap.put("KW_VALUES", "VALUES");
 
     // Operators
     xlateMap.put("DOT", ".");
@@ -638,6 +650,8 @@ execStatement
     | exportStatement
     | importStatement
     | ddlStatement
+    | deleteStatement
+    | updateStatement
     ;
 
 loadStatement
@@ -921,56 +935,62 @@ dropTableStatement
 alterStatement
 @init { pushMsg("alter statement", state); }
 @after { popMsg(state); }
-    : KW_ALTER!
-        (
-            KW_TABLE! alterTableStatementSuffix
-        |
-            KW_VIEW! alterViewStatementSuffix
-        |
-            KW_INDEX! alterIndexStatementSuffix
-        |
-            (KW_DATABASE|KW_SCHEMA)! alterDatabaseStatementSuffix
-        )
+    : KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix)
+    | KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix)
+    | KW_ALTER KW_INDEX alterIndexStatementSuffix -> alterIndexStatementSuffix
+    | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix
     ;
 
 alterTableStatementSuffix
 @init { pushMsg("alter table statement", state); }
 @after { popMsg(state); }
-    : alterStatementSuffixRename
+    : alterStatementSuffixRename[true]
     | alterStatementSuffixAddCol
     | alterStatementSuffixRenameCol
     | alterStatementSuffixUpdateStatsCol
-    | alterStatementSuffixDropPartitions
-    | alterStatementSuffixAddPartitions
+    | alterStatementSuffixDropPartitions[true]
+    | alterStatementSuffixAddPartitions[true]
     | alterStatementSuffixTouch
     | alterStatementSuffixArchive
     | alterStatementSuffixUnArchive
     | alterStatementSuffixProperties
-    | alterTblPartitionStatement
     | alterStatementSuffixSkewedby
     | alterStatementSuffixExchangePartition
     | alterStatementPartitionKeyType
+    | partitionSpec? alterTblPartitionStatementSuffix -> alterTblPartitionStatementSuffix partitionSpec?
     ;
 
+alterTblPartitionStatementSuffix
+@init {pushMsg("alter table partition statement suffix", state);}
+@after {popMsg(state);}
+  : alterStatementSuffixFileFormat
+  | alterStatementSuffixLocation
+  | alterStatementSuffixProtectMode
+  | alterStatementSuffixMergeFiles
+  | alterStatementSuffixSerdeProperties
+  | alterStatementSuffixRenamePart
+  | alterStatementSuffixBucketNum
+  | alterTblPartitionStatementSuffixSkewedLocation
+  | alterStatementSuffixClusterbySortby
+  | alterStatementSuffixCompact
+  | alterStatementSuffixUpdateStatsCol
+  ;
+
 alterStatementPartitionKeyType
 @init {msgs.push("alter partition key type"); }
 @after {msgs.pop();}
-	: tableName KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
-	-> ^(TOK_ALTERTABLE_PARTCOLTYPE tableName columnNameType)
+	: KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
+	-> ^(TOK_ALTERTABLE_PARTCOLTYPE columnNameType)
 	;
 
 alterViewStatementSuffix
 @init { pushMsg("alter view statement", state); }
 @after { popMsg(state); }
     : alterViewSuffixProperties
-    | alterStatementSuffixRename
-        -> ^(TOK_ALTERVIEW_RENAME alterStatementSuffixRename)
-    | alterStatementSuffixAddPartitions
-        -> ^(TOK_ALTERVIEW_ADDPARTS alterStatementSuffixAddPartitions)
-    | alterStatementSuffixDropPartitions
-        -> ^(TOK_ALTERVIEW_DROPPARTS alterStatementSuffixDropPartitions)
-    | name=tableName KW_AS selectStatementWithCTE
-        -> ^(TOK_ALTERVIEW_AS $name selectStatementWithCTE)
+    | alterStatementSuffixRename[false]
+    | alterStatementSuffixAddPartitions[false]
+    | alterStatementSuffixDropPartitions[false]
+    | selectStatementWithCTE
     ;
 
 alterIndexStatementSuffix
@@ -1008,33 +1028,34 @@ alterDatabaseSuffixSetOwner
     -> ^(TOK_ALTERDATABASE_OWNER $dbName principalName)
     ;
 
-alterStatementSuffixRename
+alterStatementSuffixRename[boolean table]
 @init { pushMsg("rename statement", state); }
 @after { popMsg(state); }
-    : oldName=tableName KW_RENAME KW_TO newName=tableName
-    -> ^(TOK_ALTERTABLE_RENAME $oldName $newName)
+    : KW_RENAME KW_TO tableName
+    -> { table }? ^(TOK_ALTERTABLE_RENAME tableName)
+    ->            ^(TOK_ALTERVIEW_RENAME tableName)
     ;
 
 alterStatementSuffixAddCol
 @init { pushMsg("add column statement", state); }
 @after { popMsg(state); }
-    : tableName (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN
-    -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS tableName columnNameTypeList)
-    ->                 ^(TOK_ALTERTABLE_REPLACECOLS tableName columnNameTypeList)
+    : (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN
+    -> {$add != null}? ^(TOK_ALTERTABLE_ADDCOLS columnNameTypeList)
+    ->                 ^(TOK_ALTERTABLE_REPLACECOLS columnNameTypeList)
     ;
 
 alterStatementSuffixRenameCol
 @init { pushMsg("rename column name", state); }
 @after { popMsg(state); }
-    : tableName KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition?
-    ->^(TOK_ALTERTABLE_RENAMECOL tableName $oldName $newName colType $comment? alterStatementChangeColPosition?)
+    : KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition?
+    ->^(TOK_ALTERTABLE_RENAMECOL $oldName $newName colType $comment? alterStatementChangeColPosition?)
     ;
 
 alterStatementSuffixUpdateStatsCol
 @init { pushMsg("update column statistics", state); }
 @after { popMsg(state); }
-    : identifier KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)?
-    ->^(TOK_ALTERTABLE_UPDATECOLSTATS identifier $colName tableProperties $comment?)
+    : KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)?
+    ->^(TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties $comment?)
     ;
 
 alterStatementChangeColPosition
@@ -1043,11 +1064,12 @@ alterStatementChangeColPosition
     -> ^(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION $afterCol)
     ;
 
-alterStatementSuffixAddPartitions
+alterStatementSuffixAddPartitions[boolean table]
 @init { pushMsg("add partition statement", state); }
 @after { popMsg(state); }
-    : tableName KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+
-    -> ^(TOK_ALTERTABLE_ADDPARTS tableName ifNotExists? alterStatementSuffixAddPartitionsElement+)
+    : KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+
+    -> { table }? ^(TOK_ALTERTABLE_ADDPARTS ifNotExists? alterStatementSuffixAddPartitionsElement+)
+    ->            ^(TOK_ALTERVIEW_ADDPARTS ifNotExists? alterStatementSuffixAddPartitionsElement+)
     ;
 
 alterStatementSuffixAddPartitionsElement
@@ -1057,22 +1079,22 @@ alterStatementSuffixAddPartitionsElement
 alterStatementSuffixTouch
 @init { pushMsg("touch statement", state); }
 @after { popMsg(state); }
-    : tableName KW_TOUCH (partitionSpec)*
-    -> ^(TOK_ALTERTABLE_TOUCH tableName (partitionSpec)*)
+    : KW_TOUCH (partitionSpec)*
+    -> ^(TOK_ALTERTABLE_TOUCH (partitionSpec)*)
     ;
 
 alterStatementSuffixArchive
 @init { pushMsg("archive statement", state); }
 @after { popMsg(state); }
-    : tableName KW_ARCHIVE (partitionSpec)*
-    -> ^(TOK_ALTERTABLE_ARCHIVE tableName (partitionSpec)*)
+    : KW_ARCHIVE (partitionSpec)*
+    -> ^(TOK_ALTERTABLE_ARCHIVE (partitionSpec)*)
     ;
 
 alterStatementSuffixUnArchive
 @init { pushMsg("unarchive statement", state); }
 @after { popMsg(state); }
-    : tableName KW_UNARCHIVE (partitionSpec)*
-    -> ^(TOK_ALTERTABLE_UNARCHIVE tableName (partitionSpec)*)
+    : KW_UNARCHIVE (partitionSpec)*
+    -> ^(TOK_ALTERTABLE_UNARCHIVE (partitionSpec)*)
     ;
 
 partitionLocation
@@ -1082,29 +1104,30 @@ partitionLocation
       KW_LOCATION locn=StringLiteral -> ^(TOK_PARTITIONLOCATION $locn)
     ;
 
-alterStatementSuffixDropPartitions
+alterStatementSuffixDropPartitions[boolean table]
 @init { pushMsg("drop partition statement", state); }
 @after { popMsg(state); }
-    : tableName KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection?
-    -> ^(TOK_ALTERTABLE_DROPPARTS tableName dropPartitionSpec+ ifExists? ignoreProtection?)
+    : KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection?
+    -> { table }? ^(TOK_ALTERTABLE_DROPPARTS dropPartitionSpec+ ifExists? ignoreProtection?)
+    ->            ^(TOK_ALTERVIEW_DROPPARTS dropPartitionSpec+ ifExists? ignoreProtection?)
     ;
 
 alterStatementSuffixProperties
 @init { pushMsg("alter properties statement", state); }
 @after { popMsg(state); }
-    : tableName KW_SET KW_TBLPROPERTIES tableProperties
-    -> ^(TOK_ALTERTABLE_PROPERTIES tableName tableProperties)
-    | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
-    -> ^(TOK_DROPTABLE_PROPERTIES tableName tableProperties ifExists?)
+    : KW_SET KW_TBLPROPERTIES tableProperties
+    -> ^(TOK_ALTERTABLE_PROPERTIES tableProperties)
+    | KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
+    -> ^(TOK_ALTERTABLE_DROPPROPERTIES tableProperties ifExists?)
     ;
 
 alterViewSuffixProperties
 @init { pushMsg("alter view properties statement", state); }
 @after { popMsg(state); }
-    : tableName KW_SET KW_TBLPROPERTIES tableProperties
-    -> ^(TOK_ALTERVIEW_PROPERTIES tableName tableProperties)
-    | tableName KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
-    -> ^(TOK_DROPVIEW_PROPERTIES tableName tableProperties ifExists?)
+    : KW_SET KW_TBLPROPERTIES tableProperties
+    -> ^(TOK_ALTERVIEW_PROPERTIES tableProperties)
+    | KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
+    -> ^(TOK_ALTERVIEW_DROPPROPERTIES tableProperties ifExists?)
     ;
 
 alterStatementSuffixSerdeProperties
@@ -1123,29 +1146,6 @@ tablePartitionPrefix
   ->^(TOK_TABLE_PARTITION tableName partitionSpec?)
   ;
 
-alterTblPartitionStatement
-@init {pushMsg("alter table partition statement", state);}
-@after {popMsg(state);}
-  : tablePartitionPrefix alterTblPartitionStatementSuffix
-  -> ^(TOK_ALTERTABLE_PARTITION tablePartitionPrefix alterTblPartitionStatementSuffix)
-  ;
-
-alterTblPartitionStatementSuffix
-@init {pushMsg("alter table partition statement suffix", state);}
-@after {popMsg(state);}
-  : alterStatementSuffixFileFormat
-  | alterStatementSuffixLocation
-  | alterStatementSuffixProtectMode
-  | alterStatementSuffixMergeFiles
-  | alterStatementSuffixSerdeProperties
-  | alterStatementSuffixRenamePart
-  | alterStatementSuffixStatsPart
-  | alterStatementSuffixBucketNum
-  | alterTblPartitionStatementSuffixSkewedLocation
-  | alterStatementSuffixClusterbySortby
-  | alterStatementSuffixCompact
-  ;
-
 alterStatementSuffixFileFormat
 @init {pushMsg("alter fileformat statement", state); }
 @after {popMsg(state);}
@@ -1165,7 +1165,7 @@ alterTblPartitionStatementSuffixSkewedLo
 @init {pushMsg("alter partition skewed location", state);}
 @after {popMsg(state);}
   : KW_SET KW_SKEWED KW_LOCATION skewedLocations
-  -> ^(TOK_ALTERTBLPART_SKEWED_LOCATION skewedLocations)
+  -> ^(TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations)
   ;
   
 skewedLocations
@@ -1200,21 +1200,21 @@ alterStatementSuffixLocation
 alterStatementSuffixSkewedby
 @init {pushMsg("alter skewed by statement", state);}
 @after{popMsg(state);}
-	: tableName tableSkewed
-	->^(TOK_ALTERTABLE_SKEWED tableName tableSkewed)
+	: tableSkewed
+	->^(TOK_ALTERTABLE_SKEWED tableSkewed)
 	|
-	 tableName KW_NOT KW_SKEWED
-	->^(TOK_ALTERTABLE_SKEWED tableName)
+	 KW_NOT KW_SKEWED
+	->^(TOK_ALTERTABLE_SKEWED)
 	|
-	 tableName KW_NOT storedAsDirs
-	->^(TOK_ALTERTABLE_SKEWED tableName storedAsDirs)
+	 KW_NOT storedAsDirs
+	->^(TOK_ALTERTABLE_SKEWED storedAsDirs)
 	;
 
 alterStatementSuffixExchangePartition
 @init {pushMsg("alter exchange partition", state);}
 @after{popMsg(state);}
-    : tableName KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName
-    -> ^(TOK_EXCHANGEPARTITION tableName partitionSpec $exchangename)
+    : KW_EXCHANGE partitionSpec KW_WITH KW_TABLE exchangename=tableName
+    -> ^(TOK_ALTERTABLE_EXCHANGEPARTITION partitionSpec $exchangename)
     ;
 
 alterStatementSuffixProtectMode
@@ -1264,14 +1264,14 @@ alterStatementSuffixBucketNum
 @init { pushMsg("", state); }
 @after { popMsg(state); }
     : KW_INTO num=Number KW_BUCKETS
-    -> ^(TOK_TABLEBUCKETS $num)
+    -> ^(TOK_ALTERTABLE_BUCKETS $num)
     ;
 
 alterStatementSuffixCompact
 @init { msgs.push("compaction request"); }
 @after { msgs.pop(); }
     : KW_COMPACT compactType=StringLiteral
-    -> ^(TOK_COMPACT $compactType)
+    -> ^(TOK_ALTERTABLE_COMPACT $compactType)
     ;
 
 
@@ -1693,7 +1693,7 @@ tableBuckets
 @after { popMsg(state); }
     :
       KW_CLUSTERED KW_BY LPAREN bucketCols=columnNameList RPAREN (KW_SORTED KW_BY LPAREN sortCols=columnNameOrderList RPAREN)? KW_INTO num=Number KW_BUCKETS
-    -> ^(TOK_TABLEBUCKETS $bucketCols $sortCols? $num)
+    -> ^(TOK_ALTERTABLE_BUCKETS $bucketCols $sortCols? $num)
     ;
 
 tableSkewed
@@ -2095,11 +2095,28 @@ singleFromStatement
     ( b+=body )+ -> ^(TOK_QUERY fromClause body+)
     ;
 
+/*
+The valuesClause rule below ensures that the parse tree for
+"insert into table FOO values (1,2),(3,4)" looks the same as
+"insert into table FOO select a,b from (values(1,2),(3,4)) as BAR(a,b)" which itself is made to look
+very similar to the tree for "insert into table FOO select a,b from BAR".  Since virtual table name
+is implicit, it's represented as TOK_ANONYMOUS.
+*/
 regularBody[boolean topLevel]
    :
    i=insertClause
+   (
    s=selectStatement[topLevel]
      {$s.tree.getChild(1).replaceChildren(0, 0, $i.tree);} -> {$s.tree}
+     |
+     valuesClause
+      -> ^(TOK_QUERY
+            ^(TOK_FROM
+              ^(TOK_VIRTUAL_TABLE ^(TOK_VIRTUAL_TABREF ^(TOK_ANONYMOUS)) valuesClause)
+             )
+            ^(TOK_INSERT {$i.tree} ^(TOK_SELECT ^(TOK_SELEXPR TOK_ALLCOLREF)))
+          )
+   )
    |
    selectStatement[topLevel]
    ;
@@ -2208,3 +2225,34 @@ limitClause
    :
    KW_LIMIT num=Number -> ^(TOK_LIMIT $num)
    ;
+
+//DELETE FROM <tableName> WHERE ...;
+deleteStatement
+@init { pushMsg("delete statement", state); }
+@after { popMsg(state); }
+   :
+   KW_DELETE KW_FROM tableName (whereClause)? -> ^(TOK_DELETE_FROM tableName whereClause?)
+   ;
+
+/*SET <columName> = (3 + col2)*/
+columnAssignmentClause
+   :
+   tableOrColumn EQUAL^ atomExpression
+   ;
+
+/*SET col1 = 5, col2 = (4 + col4), ...*/
+setColumnsClause
+   :
+   KW_SET columnAssignmentClause (COMMA columnAssignmentClause)* -> ^(TOK_SET_COLUMNS_CLAUSE columnAssignmentClause* )
+   ;
+
+/* 
+  UPDATE <table> 
+  SET col1 = val1, col2 = val2... WHERE ...
+*/
+updateStatement
+@init { pushMsg("update statement", state); }
+@after { popMsg(state); }
+   :
+   KW_UPDATE tableName setColumnsClause whereClause? -> ^(TOK_UPDATE_TABLE tableName setColumnsClause whereClause?)
+   ;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Tue Sep  2 19:56:56 2014
@@ -538,5 +538,5 @@ functionIdentifier
 
 nonReserved
     :
-    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_AN
 ALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION
+    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_AN
 ALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION | KW_VALUES
     ;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java Tue Sep  2 19:56:56 2014
@@ -88,6 +88,7 @@ public class IndexUpdater {
       sb.append("ALTER INDEX ");
       sb.append(idx.getIndexName());
       sb.append(" ON ");
+      sb.append(idx.getDbName()).append('.');
       sb.append(idx.getOrigTableName());
       sb.append(" REBUILD");
       driver.compile(sb.toString(), false);
@@ -125,6 +126,7 @@ public class IndexUpdater {
     sb.append("ALTER INDEX ");
     sb.append(index.getIndexName());
     sb.append(" ON ");
+    sb.append(index.getDbName()).append('.');
     sb.append(index.getOrigTableName());
     sb.append(" PARTITION ");
     sb.append(ps.toString());

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Tue Sep  2 19:56:56 2014
@@ -66,26 +66,26 @@ public class QB {
   private HashMap<String, WindowingSpec> destToWindowingSpec;
 
   /*
-   * If this QB represents a SubQuery predicate then this will point to the SubQuery object.
+   * If this QB represents a  SubQuery predicate then this will point to the SubQuery object.
    */
   private QBSubQuery subQueryPredicateDef;
-  
-	/*
-	 * used to give a unique name to each SubQuery QB Currently there can be at
-	 * most 2 SubQueries in a Query: 1 in the Where clause, and 1 in the Having
-	 * clause.
-	 */
-	private int numSubQueryPredicates;
-	
-	/*
-	 * for now a top level QB can have 1 where clause SQ predicate.
-	 */
-	private QBSubQuery whereClauseSubQueryPredicate;
-	
+
+  /*
+   * used to give a unique name to each SubQuery QB Currently there can be at
+   * most 2 SubQueries in a Query: 1 in the Where clause, and 1 in the Having
+   * clause.
+   */
+  private int numSubQueryPredicates;
+
   /*
    * for now a top level QB can have 1 where clause SQ predicate.
    */
-	private QBSubQuery havingClauseSubQueryPredicate;
+  private QBSubQuery whereClauseSubQueryPredicate;
+
+  /*
+   * for now a top level QB can have 1 where clause SQ predicate.
+   */
+  private QBSubQuery havingClauseSubQueryPredicate;
 
   // results
 
@@ -341,28 +341,28 @@ public class QB {
   protected QBSubQuery getSubQueryPredicateDef() {
     return subQueryPredicateDef;
   }
-  
-	protected int getNumSubQueryPredicates() {
-		return numSubQueryPredicates;
-	}
-
-	protected int incrNumSubQueryPredicates() {
-		return ++numSubQueryPredicates;
-	}
-	
-	void setWhereClauseSubQueryPredicate(QBSubQuery sq) {
-	  whereClauseSubQueryPredicate = sq;
-  }
-	
-	public QBSubQuery getWhereClauseSubQueryPredicate() {
-	  return whereClauseSubQueryPredicate;
-	}
-	
-	void setHavingClauseSubQueryPredicate(QBSubQuery sq) {
+
+  protected int getNumSubQueryPredicates() {
+    return numSubQueryPredicates;
+  }
+
+  protected int incrNumSubQueryPredicates() {
+    return ++numSubQueryPredicates;
+  }
+
+  void setWhereClauseSubQueryPredicate(QBSubQuery sq) {
+    whereClauseSubQueryPredicate = sq;
+  }
+
+  public QBSubQuery getWhereClauseSubQueryPredicate() {
+    return whereClauseSubQueryPredicate;
+  }
+
+  void setHavingClauseSubQueryPredicate(QBSubQuery sq) {
     havingClauseSubQueryPredicate = sq;
   }
-	
-	public QBSubQuery getHavingClauseSubQueryPredicate() {
+
+  public QBSubQuery getHavingClauseSubQueryPredicate() {
     return havingClauseSubQueryPredicate;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java Tue Sep  2 19:56:56 2014
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.ql.parse.SubQueryDiagnostic.QBSubQueryRewrite;
 
 public class QBSubQuery implements ISubQueryJoinInfo {
-  
+
   public static enum SubQueryType {
     EXISTS,
     NOT_EXISTS,
@@ -149,16 +149,16 @@ public class QBSubQuery implements ISubQ
   }
 
   /*
-   * This class captures the information about a 
+   * This class captures the information about a
    * conjunct in the where clause of the SubQuery.
    * For a equality predicate it capture for each side:
    * - the AST
    * - the type of Expression (basically what columns are referenced)
-   * - for Expressions that refer the parent it captures the 
+   * - for Expressions that refer the parent it captures the
    *   parent's ColumnInfo. In case of outer Aggregation expressions
    *   we need this to introduce a new mapping in the OuterQuery
    *   RowResolver. A join condition must use qualified column references,
-   *   so we generate a new name for the aggr expression and use it in the 
+   *   so we generate a new name for the aggr expression and use it in the
    *   joining condition.
    *   For e.g.
    *   having exists ( select x from R2 where y = min(R1.z) )
@@ -174,8 +174,8 @@ public class QBSubQuery implements ISubQ
     private final ColumnInfo leftOuterColInfo;
     private final ColumnInfo rightOuterColInfo;
 
-   Conjunct(ASTNode leftExpr, 
-        ASTNode rightExpr, 
+   Conjunct(ASTNode leftExpr,
+        ASTNode rightExpr,
         ExprType leftExprType,
         ExprType rightExprType,
         ColumnInfo leftOuterColInfo,
@@ -239,8 +239,8 @@ public class QBSubQuery implements ISubQ
     Stack<Node> stack;
 
     ConjunctAnalyzer(RowResolver parentQueryRR,
-    		boolean forHavingClause,
-    		String parentQueryNewAlias) {
+        boolean forHavingClause,
+        String parentQueryNewAlias) {
       this.parentQueryRR = parentQueryRR;
       defaultExprProcessor = new DefaultExprProcessor();
       this.forHavingClause = forHavingClause;
@@ -260,13 +260,13 @@ public class QBSubQuery implements ISubQ
     private ObjectPair<ExprType,ColumnInfo> analyzeExpr(ASTNode expr) {
       ColumnInfo cInfo = null;
       if ( forHavingClause ) {
-      	try {
-      	  cInfo = parentQueryRR.getExpression(expr);
-      		if ( cInfo != null) {
-      		    return ObjectPair.create(ExprType.REFERS_PARENT, cInfo);
-      	    }
-      	} catch(SemanticException se) {
-      	}
+        try {
+          cInfo = parentQueryRR.getExpression(expr);
+          if ( cInfo != null) {
+              return ObjectPair.create(ExprType.REFERS_PARENT, cInfo);
+            }
+        } catch(SemanticException se) {
+        }
       }
       if ( expr.getType() == HiveParser.DOT) {
         ASTNode dot = firstDot(expr);
@@ -308,12 +308,12 @@ public class QBSubQuery implements ISubQ
         ObjectPair<ExprType,ColumnInfo> leftInfo = analyzeExpr(left);
         ObjectPair<ExprType,ColumnInfo> rightInfo = analyzeExpr(right);
 
-        return new Conjunct(left, right, 
+        return new Conjunct(left, right,
             leftInfo.getFirst(), rightInfo.getFirst(),
             leftInfo.getSecond(), rightInfo.getSecond());
       } else {
         ObjectPair<ExprType,ColumnInfo> sqExprInfo = analyzeExpr(conjunct);
-        return new Conjunct(conjunct, null, 
+        return new Conjunct(conjunct, null,
             sqExprInfo.getFirst(), null,
             sqExprInfo.getSecond(), sqExprInfo.getSecond());
       }
@@ -354,86 +354,86 @@ public class QBSubQuery implements ISubQ
   }
 
   /*
-   * When transforming a Not In SubQuery we need to check for nulls in the 
+   * When transforming a Not In SubQuery we need to check for nulls in the
    * Joining expressions of the SubQuery. If there are nulls then the SubQuery always
-   * return false. For more details see 
+   * return false. For more details see
    * https://issues.apache.org/jira/secure/attachment/12614003/SubQuerySpec.pdf
-   * 
+   *
    * Basically, SQL semantics say that:
    * - R1.A not in (null, 1, 2, ...)
-   *   is always false. 
-   *   A 'not in' operator is equivalent to a '<> all'. Since a not equal check with null 
+   *   is always false.
+   *   A 'not in' operator is equivalent to a '<> all'. Since a not equal check with null
    *   returns false, a not in predicate against aset with a 'null' value always returns false.
-   *   
+   *
    * So for not in SubQuery predicates:
    * - we join in a null count predicate.
    * - And the joining condition is that the 'Null Count' query has a count of 0.
-   *   
+   *
    */
   class NotInCheck implements ISubQueryJoinInfo {
-    
+
     private static final String CNT_ALIAS = "c1";
-    
+
     /*
      * expressions in SubQ that are joined to the Outer Query.
      */
     List<ASTNode> subQryCorrExprs;
-    
+
     /*
      * row resolver of the SubQuery.
      * Set by the SemanticAnalyzer after the Plan for the SubQuery is genned.
      * This is neede in case the SubQuery select list contains a TOK_ALLCOLREF
      */
     RowResolver sqRR;
-    
+
     NotInCheck() {
       subQryCorrExprs = new ArrayList<ASTNode>();
     }
-    
+
     void addCorrExpr(ASTNode corrExpr) {
       subQryCorrExprs.add(corrExpr);
     }
-    
+
     public ASTNode getSubQueryAST() {
       ASTNode ast = SubQueryUtils.buildNotInNullCheckQuery(
-          QBSubQuery.this.getSubQueryAST(), 
-          QBSubQuery.this.getAlias(), 
-          CNT_ALIAS, 
+          QBSubQuery.this.getSubQueryAST(),
+          QBSubQuery.this.getAlias(),
+          CNT_ALIAS,
           subQryCorrExprs,
           sqRR);
       SubQueryUtils.setOriginDeep(ast, QBSubQuery.this.originalSQASTOrigin);
       return ast;
     }
-    
+
     public String getAlias() {
       return QBSubQuery.this.getAlias() + "_notin_nullcheck";
     }
-    
+
     public JoinType getJoinType() {
       return JoinType.LEFTSEMI;
     }
-    
+
     public ASTNode getJoinConditionAST() {
-      ASTNode ast = 
+      ASTNode ast =
           SubQueryUtils.buildNotInNullJoinCond(getAlias(), CNT_ALIAS);
       SubQueryUtils.setOriginDeep(ast, QBSubQuery.this.originalSQASTOrigin);
       return ast;
     }
-    
+
     public QBSubQuery getSubQuery() {
       return QBSubQuery.this;
     }
-    
+
     public String getOuterQueryId() {
       return QBSubQuery.this.getOuterQueryId();
     }
-    
+
     void setSQRR(RowResolver sqRR) {
       this.sqRR = sqRR;
     }
-        
+
   }
-  
+
   private final String outerQueryId;
   private final int sqIdx;
   private final String alias;
@@ -455,11 +455,11 @@ public class QBSubQuery implements ISubQ
   private int numOfCorrelationExprsAddedToSQSelect;
 
   private boolean groupbyAddedToSQ;
-  
+
   private int numOuterCorrExprsForHaving;
-  
+
   private NotInCheck notInCheck;
-  
+
   private QBSubQueryRewrite subQueryDiagnostic;
 
   public QBSubQuery(String outerQueryId,
@@ -483,11 +483,11 @@ public class QBSubQuery implements ISubQ
     originalSQASTOrigin = new ASTNodeOrigin("SubQuery", alias, s, alias, originalSQAST);
     numOfCorrelationExprsAddedToSQSelect = 0;
     groupbyAddedToSQ = false;
-    
+
     if ( operator.getType() == SubQueryType.NOT_IN ) {
       notInCheck = new NotInCheck();
     }
-    
+
     subQueryDiagnostic = SubQueryDiagnostic.getRewrite(this, ctx.getTokenRewriteStream(), ctx);
   }
 
@@ -500,18 +500,18 @@ public class QBSubQuery implements ISubQ
   public SubQueryTypeDef getOperator() {
     return operator;
   }
-  
+
   public ASTNode getOriginalSubQueryASTForRewrite() {
     return (operator.getType() == SubQueryType.NOT_EXISTS
-        || operator.getType() == SubQueryType.NOT_IN ? 
-        (ASTNode) originalSQASTOrigin.getUsageNode().getParent() : 
+        || operator.getType() == SubQueryType.NOT_IN ?
+        (ASTNode) originalSQASTOrigin.getUsageNode().getParent() :
         originalSQASTOrigin.getUsageNode());
   }
 
   void validateAndRewriteAST(RowResolver outerQueryRR,
-		  boolean forHavingClause,
-		  String outerQueryAlias,
-		  Set<String> outerQryAliases) throws SemanticException {
+      boolean forHavingClause,
+      String outerQueryAlias,
+      Set<String> outerQryAliases) throws SemanticException {
 
     ASTNode selectClause = (ASTNode) subQueryAST.getChild(1).getChild(1);
 
@@ -519,12 +519,12 @@ public class QBSubQuery implements ISubQ
     if ( selectClause.getChild(0).getType() == HiveParser.TOK_HINTLIST ) {
       selectExprStart = 1;
     }
-    
+
     /*
      * Restriction.16.s :: Correlated Expression in Outer Query must not contain
      * unqualified column references.
      */
-    if ( parentQueryExpression != null && !forHavingClause ) { 
+    if ( parentQueryExpression != null && !forHavingClause ) {
         ASTNode u = SubQueryUtils.hasUnQualifiedColumnReferences(parentQueryExpression);
         if ( u != null ) {
           subQueryAST.setOrigin(originalSQASTOrigin);
@@ -532,7 +532,7 @@ public class QBSubQuery implements ISubQ
               u, "Correlating expression cannot contain unqualified column references."));
         }
     }
-    
+
     /*
      * Restriction 17.s :: SubQuery cannot use the same table alias as one used in
      * the Outer Query.
@@ -546,14 +546,14 @@ public class QBSubQuery implements ISubQ
     }
     if ( sharedAlias != null) {
       ASTNode whereClause = SubQueryUtils.subQueryWhere(subQueryAST);
-      
+
       if ( whereClause != null ) {
         ASTNode u = SubQueryUtils.hasUnQualifiedColumnReferences(whereClause);
         if ( u != null ) {
           subQueryAST.setOrigin(originalSQASTOrigin);
           throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
               u, "SubQuery cannot use the table alias: " + sharedAlias + "; " +
-              		"this is also an alias in the Outer Query and SubQuery contains a unqualified column reference"));
+                  "this is also an alias in the Outer Query and SubQuery contains a unqualified column reference"));
         }
       }
     }
@@ -641,25 +641,25 @@ public class QBSubQuery implements ISubQ
   }
 
   void buildJoinCondition(RowResolver outerQueryRR, RowResolver sqRR,
-		  boolean forHavingClause,
-		  String outerQueryAlias) throws SemanticException {
+      boolean forHavingClause,
+      String outerQueryAlias) throws SemanticException {
     ASTNode parentQueryJoinCond = null;
 
     if ( parentQueryExpression != null ) {
-      
+
       ColumnInfo outerQueryCol = null;
       try {
         outerQueryCol = outerQueryRR.getExpression(parentQueryExpression);
       } catch(SemanticException se) {
       }
-      
+
       parentQueryJoinCond = SubQueryUtils.buildOuterQryToSQJoinCond(
         getOuterQueryExpression(),
         alias,
         sqRR);
-      
+
       if ( outerQueryCol != null ) {
-        rewriteCorrConjunctForHaving(parentQueryJoinCond, true, 
+        rewriteCorrConjunctForHaving(parentQueryJoinCond, true,
             outerQueryAlias, outerQueryRR, outerQueryCol);
       }
       subQueryDiagnostic.addJoinCondition(parentQueryJoinCond, outerQueryCol != null, true);
@@ -682,10 +682,10 @@ public class QBSubQuery implements ISubQ
   ASTNode updateOuterQueryFilter(ASTNode outerQryFilter) {
     if (postJoinConditionAST == null ) {
       return outerQryFilter;
-    }  
-    
+    }
+
     subQueryDiagnostic.addPostJoinCondition(postJoinConditionAST);
-    
+
     if ( outerQryFilter == null ) {
       return postJoinConditionAST;
     }
@@ -738,7 +738,7 @@ public class QBSubQuery implements ISubQ
    * Additional things for Having clause:
    * - A correlation predicate may refer to an aggregation expression.
    * - This introduces 2 twists to the rewrite:
-   *   a. When analyzing equality predicates we need to analyze each side 
+   *   a. When analyzing equality predicates we need to analyze each side
    *      to see if it is an aggregation expression from the Outer Query.
    *      So for e.g. this is a valid correlation predicate:
    *         R2.x = min(R1.y)
@@ -748,12 +748,12 @@ public class QBSubQuery implements ISubQ
    *      to contain a qualified column references.
    *      We handle this by generating a new name for the aggregation expression,
    *      like R1._gby_sq_col_1 and adding this mapping to the Outer Query's
-   *      Row Resolver. Then we construct a joining predicate using this new 
+   *      Row Resolver. Then we construct a joining predicate using this new
    *      name; so in our e.g. the condition would be: R2.x = R1._gby_sq_col_1
    */
   private void rewrite(RowResolver parentQueryRR,
-		  boolean forHavingClause,
-		  String outerQueryAlias) throws SemanticException {
+      boolean forHavingClause,
+      String outerQueryAlias) throws SemanticException {
     ASTNode selectClause = (ASTNode) subQueryAST.getChild(1).getChild(1);
     ASTNode whereClause = SubQueryUtils.subQueryWhere(subQueryAST);
 
@@ -766,7 +766,7 @@ public class QBSubQuery implements ISubQ
     SubQueryUtils.extractConjuncts(searchCond, conjuncts);
 
     ConjunctAnalyzer conjunctAnalyzer = new ConjunctAnalyzer(parentQueryRR,
-    		forHavingClause, outerQueryAlias);
+        forHavingClause, outerQueryAlias);
     ASTNode sqNewSearchCond = null;
 
     for(ASTNode conjunctAST : conjuncts) {
@@ -805,7 +805,7 @@ public class QBSubQuery implements ISubQ
           corrCondLeftIsRewritten = true;
           if ( forHavingClause && conjunct.getRightOuterColInfo() != null ) {
             corrCondRightIsRewritten = true;
-            rewriteCorrConjunctForHaving(conjunctAST, false, outerQueryAlias, 
+            rewriteCorrConjunctForHaving(conjunctAST, false, outerQueryAlias,
                 parentQueryRR, conjunct.getRightOuterColInfo());
           }
           ASTNode joinPredciate = SubQueryUtils.alterCorrelatedPredicate(
@@ -829,7 +829,7 @@ public class QBSubQuery implements ISubQ
           corrCondRightIsRewritten = true;
           if ( forHavingClause && conjunct.getLeftOuterColInfo() != null ) {
             corrCondLeftIsRewritten = true;
-            rewriteCorrConjunctForHaving(conjunctAST, true, outerQueryAlias, 
+            rewriteCorrConjunctForHaving(conjunctAST, true, outerQueryAlias,
                 parentQueryRR, conjunct.getLeftOuterColInfo());
           }
           ASTNode joinPredciate = SubQueryUtils.alterCorrelatedPredicate(
@@ -901,7 +901,7 @@ public class QBSubQuery implements ISubQ
     for(ASTNode child : newChildren ) {
       subQueryAST.addChild(child);
     }
-    
+
     subQueryDiagnostic.setAddGroupByClause();
 
     return groupBy;
@@ -927,26 +927,26 @@ public class QBSubQuery implements ISubQ
   public int getNumOfCorrelationExprsAddedToSQSelect() {
     return numOfCorrelationExprsAddedToSQSelect;
   }
-  
-    
+
+
   public QBSubQueryRewrite getDiagnostic() {
     return subQueryDiagnostic;
   }
-  
+
   public QBSubQuery getSubQuery() {
     return this;
   }
-  
+
   NotInCheck getNotInCheck() {
     return notInCheck;
   }
-  
+
   private void rewriteCorrConjunctForHaving(ASTNode conjunctASTNode,
       boolean refersLeft,
       String outerQueryAlias,
       RowResolver outerQueryRR,
       ColumnInfo outerQueryCol) {
-    
+
     String newColAlias = "_gby_sq_col_" + numOuterCorrExprsForHaving++;
     ASTNode outerExprForCorr = SubQueryUtils.createColRefAST(outerQueryAlias, newColAlias);
     if ( refersLeft ) {
@@ -956,5 +956,5 @@ public class QBSubQuery implements ISubQ
     }
     outerQueryRR.put(outerQueryAlias, newColAlias, outerQueryCol);
   }
-      
+
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java Tue Sep  2 19:56:56 2014
@@ -98,7 +98,7 @@ public class RowResolver implements Seri
 
   public void put(String tab_alias, String col_alias, ColumnInfo colInfo) {
     if (!addMappingOnly(tab_alias, col_alias, colInfo)) {
-    	rowSchema.getSignature().add(colInfo);
+      rowSchema.getSignature().add(colInfo);
     }
   }
 
@@ -289,7 +289,7 @@ public class RowResolver implements Seri
   public boolean getIsExprResolver() {
     return isExprResolver;
   }
-  
+
   public String[] getAlternateMappings(String internalName) {
     return altInvRslvMap.get(internalName);
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Tue Sep  2 19:56:56 2014
@@ -972,6 +972,8 @@ public class SemanticAnalyzer extends Ba
         ASTNode frm = (ASTNode) ast.getChild(0);
         if (frm.getToken().getType() == HiveParser.TOK_TABREF) {
           processTable(qb, frm);
+        } else if (frm.getToken().getType() == HiveParser.TOK_VIRTUAL_TABLE) {
+          throw new RuntimeException("VALUES() clause is not fully supported yet...");
         } else if (frm.getToken().getType() == HiveParser.TOK_SUBQUERY) {
           processSubQuery(qb, frm);
         } else if (frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW ||
@@ -1164,6 +1166,10 @@ public class SemanticAnalyzer extends Ba
       case HiveParser.TOK_CTE:
         processCTE(qb, ast);
         break;
+      case HiveParser.TOK_DELETE_FROM:
+        throw new RuntimeException("DELETE is not (yet) implemented...");
+      case HiveParser.TOK_UPDATE_TABLE:
+        throw new RuntimeException("UPDATE is not (yet) implemented...");
       default:
         skipRecursion = false;
         break;
@@ -2233,8 +2239,8 @@ public class SemanticAnalyzer extends Ba
         String havingInputAlias = null;
 
         if ( forHavingClause ) {
-        	havingInputAlias = "gby_sq" + sqIdx;
-        	aliasToOpInfo.put(havingInputAlias, input);
+          havingInputAlias = "gby_sq" + sqIdx;
+          aliasToOpInfo.put(havingInputAlias, input);
         }
 
         subQuery.validateAndRewriteAST(inputRR, forHavingClause, havingInputAlias, aliasToOpInfo.keySet());
@@ -2345,7 +2351,10 @@ public class SemanticAnalyzer extends Ba
     ExprNodeDesc filterPred = null;
     List<Boolean> nullSafes = joinTree.getNullSafes();
     for (int i = 0; i < joinKeys.length; i++) {
-      if ( nullSafes.get(i)) {
+      if (nullSafes.get(i) || (joinKeys[i] instanceof ExprNodeColumnDesc &&
+         ((ExprNodeColumnDesc)joinKeys[i]).getIsPartitionColOrVirtualCol())) {
+        // no need to generate is not null predicate for partitioning or
+        // virtual column, since those columns can never be null.
         continue;
       }
       List<ExprNodeDesc> args = new ArrayList<ExprNodeDesc>();
@@ -9402,7 +9411,7 @@ public class SemanticAnalyzer extends Ba
 
     // analyze create view command
     if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW ||
-        ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
+        (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveParser.TOK_QUERY)) {
       child = analyzeCreateView(ast, qb);
       SessionState.get().setCommandType(HiveOperation.CREATEVIEW);
       if (child == null) {
@@ -9410,7 +9419,7 @@ public class SemanticAnalyzer extends Ba
       }
       viewSelect = child;
       // prevent view from referencing itself
-      viewsExpanded.add(SessionState.get().getCurrentDatabase() + "." + createVwDesc.getViewName());
+      viewsExpanded.add(createVwDesc.getViewName());
     }
 
     // continue analyzing from the child ASTNode.
@@ -9511,6 +9520,11 @@ public class SemanticAnalyzer extends Ba
 
     LOG.info("Completed plan generation");
 
+    // put accessed columns to readEntity
+    if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS)) {
+      putAccessedColumnsToReadEntity(inputs, columnAccessInfo);
+    }
+
     if (!ctx.getExplain()) {
       // if desired check we're not going over partition scan limits
       enforceScanLimits(pCtx, origFetchTask);
@@ -9519,6 +9533,26 @@ public class SemanticAnalyzer extends Ba
     return;
   }
 
+  private void putAccessedColumnsToReadEntity(HashSet<ReadEntity> inputs, ColumnAccessInfo columnAccessInfo) {
+    Map<String, List<String>> tableToColumnAccessMap = columnAccessInfo.getTableToColumnAccessMap();
+    if (tableToColumnAccessMap != null && !tableToColumnAccessMap.isEmpty()) {
+      for(ReadEntity entity: inputs) {
+        switch (entity.getType()) {
+          case TABLE:
+            entity.getAccessedColumns().addAll(
+                tableToColumnAccessMap.get(entity.getTable().getCompleteName()));
+            break;
+          case PARTITION:
+            entity.getAccessedColumns().addAll(
+                tableToColumnAccessMap.get(entity.getPartition().getTable().getCompleteName()));
+            break;
+          default:
+            // no-op
+        }
+      }
+    }
+  }
+
   private void enforceScanLimits(ParseContext pCtx, FetchTask fTask)
       throws SemanticException {
     int scanLimit = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVELIMITTABLESCANPARTITION);
@@ -9992,7 +10026,9 @@ public class SemanticAnalyzer extends Ba
    */
   private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
+    String[] qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
+    String dbDotTab = getDotName(qualifiedTabName);
+
     String likeTableName = null;
     List<FieldSchema> cols = new ArrayList<FieldSchema>();
     List<FieldSchema> partCols = new ArrayList<FieldSchema>();
@@ -10018,7 +10054,7 @@ public class SemanticAnalyzer extends Ba
     RowFormatParams rowFormatParams = new RowFormatParams();
     StorageFormat storageFormat = new StorageFormat(conf);
 
-    LOG.info("Creating table " + tableName + " position="
+    LOG.info("Creating table " + dbDotTab + " position="
         + ast.getCharPositionInLine());
     int numCh = ast.getChildCount();
 
@@ -10090,7 +10126,7 @@ public class SemanticAnalyzer extends Ba
       case HiveParser.TOK_TABLEPARTCOLS:
         partCols = getColumns((ASTNode) child.getChild(0), false);
         break;
-      case HiveParser.TOK_TABLEBUCKETS:
+      case HiveParser.TOK_ALTERTABLE_BUCKETS:
         bucketCols = getColumnNames((ASTNode) child.getChild(0));
         if (child.getChildCount() == 2) {
           numBuckets = (Integer.valueOf(child.getChild(1).getText()))
@@ -10149,7 +10185,7 @@ public class SemanticAnalyzer extends Ba
     // check for existence of table
     if (ifNotExists) {
       try {
-        Table table = getTable(tableName, false);
+        Table table = getTable(qualifiedTabName, false);
         if (table != null) { // table exists
           return null;
         }
@@ -10159,11 +10195,7 @@ public class SemanticAnalyzer extends Ba
       }
     }
 
-    String[] qualified = Hive.getQualifiedNames(tableName);
-    String dbName = qualified.length == 1 ? SessionState.get().getCurrentDatabase() : qualified[0];
-    Database database  = getDatabase(dbName);
-    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
-    outputs.add(new WriteEntity(new Table(dbName, tableName), WriteEntity.WriteType.DDL_NO_LOCK));
+    addDbAndTabToOutputs(qualifiedTabName);
 
     if (isTemporary) {
       if (partCols.size() > 0) {
@@ -10192,7 +10224,7 @@ public class SemanticAnalyzer extends Ba
     case CREATE_TABLE: // REGULAR CREATE TABLE DDL
       tblProps = addDefaultProperties(tblProps);
 
-      crtTblDesc = new CreateTableDesc(tableName, isExt, isTemporary, cols, partCols,
+      crtTblDesc = new CreateTableDesc(dbDotTab, isExt, isTemporary, cols, partCols,
           bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
           rowFormatParams.fieldEscape,
           rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
@@ -10221,7 +10253,7 @@ public class SemanticAnalyzer extends Ba
               + "and source table in CREATE TABLE LIKE is partitioned.");
         }
       }
-      CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt, isTemporary,
+      CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(dbDotTab, isExt, isTemporary,
           storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location,
           storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
           likeTableName);
@@ -10234,9 +10266,9 @@ public class SemanticAnalyzer extends Ba
 
       // Verify that the table does not already exist
       try {
-        Table dumpTable = db.newTable(tableName);
+        Table dumpTable = db.newTable(dbDotTab);
         if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false)) {
-          throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
+          throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTab));
         }
       } catch (HiveException e) {
         throw new SemanticException(e);
@@ -10244,11 +10276,10 @@ public class SemanticAnalyzer extends Ba
 
       tblProps = addDefaultProperties(tblProps);
 
-      crtTblDesc = new CreateTableDesc(dbName, tableName, isExt, isTemporary, cols, partCols,
-          bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
-          rowFormatParams.fieldEscape,
-          rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
-          comment, storageFormat.getInputFormat(),
+      crtTblDesc = new CreateTableDesc(qualifiedTabName[0], dbDotTab, isExt, isTemporary, cols,
+          partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
+          rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim,
+          rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(),
           storageFormat.getOutputFormat(), location, storageFormat.getSerde(),
           storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
           skewedColNames, skewedValues);
@@ -10265,9 +10296,17 @@ public class SemanticAnalyzer extends Ba
     return null;
   }
 
+  private void addDbAndTabToOutputs(String[] qualifiedTabName) throws SemanticException {
+    Database database  = getDatabase(qualifiedTabName[0]);
+    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
+    outputs.add(new WriteEntity(new Table(qualifiedTabName[0], qualifiedTabName[1]),
+        WriteEntity.WriteType.DDL_NO_LOCK));
+  }
+
   private ASTNode analyzeCreateView(ASTNode ast, QB qb)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode) ast.getChild(0));
+    String[] qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
+    String dbDotTable = getDotName(qualTabName);
     List<FieldSchema> cols = null;
     boolean ifNotExists = false;
     boolean orReplace = false;
@@ -10277,7 +10316,7 @@ public class SemanticAnalyzer extends Ba
     Map<String, String> tblProps = null;
     List<String> partColNames = null;
 
-    LOG.info("Creating view " + tableName + " position="
+    LOG.info("Creating view " + dbDotTable + " position="
         + ast.getCharPositionInLine());
     int numCh = ast.getChildCount();
     for (int num = 1; num < numCh; num++) {
@@ -10313,19 +10352,21 @@ public class SemanticAnalyzer extends Ba
       throw new SemanticException("Can't combine IF NOT EXISTS and OR REPLACE.");
     }
 
-    if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
+    if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW &&
+        ast.getChild(1).getType() == HiveParser.TOK_QUERY) {
       isAlterViewAs = true;
       orReplace = true;
     }
 
     createVwDesc = new CreateViewDesc(
-      tableName, cols, comment, tblProps, partColNames,
+      dbDotTable, cols, comment, tblProps, partColNames,
       ifNotExists, orReplace, isAlterViewAs);
 
     unparseTranslator.enable();
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createVwDesc), conf));
 
+    addDbAndTabToOutputs(qualTabName);
     return selectStmt;
   }
 
@@ -11548,40 +11589,40 @@ public class SemanticAnalyzer extends Ba
   }
 
   private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo,
-		  Operator<? extends OperatorDesc> reduceSinkOp, RowResolver gByRR) {
-	  if ( gByExpr.getType() == HiveParser.DOT
+      Operator<? extends OperatorDesc> reduceSinkOp, RowResolver gByRR) {
+    if ( gByExpr.getType() == HiveParser.DOT
           && gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL ) {
-		  String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
-		            .getChild(0).getChild(0).getText());
-		  String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(
-				  gByExpr.getChild(1).getText());
-		  gByRR.put(tab_alias, col_alias, colInfo);
-	  } else if ( gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL ) {
-		  String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
-		          .getChild(0).getText());
-		  String tab_alias = null;
-		  /*
-		   * If the input to the GBy has a tab alias for the column, then add an entry
-		   * based on that tab_alias.
-		   * For e.g. this query:
-		   * select b.x, count(*) from t1 b group by x
-		   * needs (tab_alias=b, col_alias=x) in the GBy RR.
-		   * tab_alias=b comes from looking at the RowResolver that is the ancestor
-		   * before any GBy/ReduceSinks added for the GBY operation.
-		   */
-		  Operator<? extends OperatorDesc> parent = reduceSinkOp;
-		  while ( parent instanceof ReduceSinkOperator ||
-				  parent instanceof GroupByOperator ) {
-			  parent = parent.getParentOperators().get(0);
-		  }
-		  RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
-		  try {
-			  ColumnInfo pColInfo = parentRR.get(tab_alias, col_alias);
-			  tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
-		  } catch(SemanticException se) {
-		  }
-		  gByRR.put(tab_alias, col_alias, colInfo);
-	  }
+      String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
+                .getChild(0).getChild(0).getText());
+      String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(
+          gByExpr.getChild(1).getText());
+      gByRR.put(tab_alias, col_alias, colInfo);
+    } else if ( gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL ) {
+      String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
+              .getChild(0).getText());
+      String tab_alias = null;
+      /*
+       * If the input to the GBy has a tab alias for the column, then add an entry
+       * based on that tab_alias.
+       * For e.g. this query:
+       * select b.x, count(*) from t1 b group by x
+       * needs (tab_alias=b, col_alias=x) in the GBy RR.
+       * tab_alias=b comes from looking at the RowResolver that is the ancestor
+       * before any GBy/ReduceSinks added for the GBY operation.
+       */
+      Operator<? extends OperatorDesc> parent = reduceSinkOp;
+      while ( parent instanceof ReduceSinkOperator ||
+          parent instanceof GroupByOperator ) {
+        parent = parent.getParentOperators().get(0);
+      }
+      RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
+      try {
+        ColumnInfo pColInfo = parentRR.get(tab_alias, col_alias);
+        tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
+      } catch(SemanticException se) {
+      }
+      gByRR.put(tab_alias, col_alias, colInfo);
+    }
   }
 
   private WriteEntity.WriteType determineWriteType(LoadTableDesc ltd, boolean isNonNativeTable) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Tue Sep  2 19:56:56 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.util.HashMap;
 
+import org.antlr.runtime.tree.Tree;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -57,7 +58,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, HiveOperation.ALTERTABLE_ARCHIVE);
     commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_UNARCHIVE);
     commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
-    commandType.put(HiveParser.TOK_DROPTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
+    commandType.put(HiveParser.TOK_ALTERTABLE_DROPPROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
     commandType.put(HiveParser.TOK_SHOWDATABASES, HiveOperation.SHOWDATABASES);
     commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES);
     commandType.put(HiveParser.TOK_SHOWCOLUMNS, HiveOperation.SHOWCOLUMNS);
@@ -81,9 +82,11 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, HiveOperation.ALTERINDEX_REBUILD);
     commandType.put(HiveParser.TOK_ALTERINDEX_PROPERTIES, HiveOperation.ALTERINDEX_PROPS);
     commandType.put(HiveParser.TOK_ALTERVIEW_PROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES);
-    commandType.put(HiveParser.TOK_DROPVIEW_PROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES);
+    commandType.put(HiveParser.TOK_ALTERVIEW_DROPPROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES);
     commandType.put(HiveParser.TOK_ALTERVIEW_ADDPARTS, HiveOperation.ALTERTABLE_ADDPARTS);
     commandType.put(HiveParser.TOK_ALTERVIEW_DROPPARTS, HiveOperation.ALTERTABLE_DROPPARTS);
+    commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME);
+    commandType.put(HiveParser.TOK_ALTERVIEW, HiveOperation.ALTERVIEW_AS);
     commandType.put(HiveParser.TOK_QUERY, HiveOperation.QUERY);
     commandType.put(HiveParser.TOK_LOCKTABLE, HiveOperation.LOCKTABLE);
     commandType.put(HiveParser.TOK_UNLOCKTABLE, HiveOperation.UNLOCKTABLE);
@@ -105,11 +108,9 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE);
     commandType.put(HiveParser.TOK_ALTERTABLE_SKEWED, HiveOperation.ALTERTABLE_SKEWED);
     commandType.put(HiveParser.TOK_ANALYZE, HiveOperation.ANALYZE_TABLE);
-    commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME);
     commandType.put(HiveParser.TOK_ALTERTABLE_PARTCOLTYPE, HiveOperation.ALTERTABLE_PARTCOLTYPE);
     commandType.put(HiveParser.TOK_SHOW_COMPACTIONS, HiveOperation.SHOW_COMPACTIONS);
     commandType.put(HiveParser.TOK_SHOW_TRANSACTIONS, HiveOperation.SHOW_TRANSACTIONS);
-    commandType.put(HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS, HiveOperation.ALTERTABLE_UPDATETABLESTATS);
   }
 
   static {
@@ -134,17 +135,20 @@ public final class SemanticAnalyzerFacto
             HiveOperation.ALTERPARTITION_SERDEPROPERTIES });
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_RENAMEPART,
         new HiveOperation[] {null, HiveOperation.ALTERTABLE_RENAMEPART});
-    tablePartitionCommandType.put(HiveParser.TOK_COMPACT,
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_COMPACT,
         new HiveOperation[] {HiveOperation.ALTERTABLE_COMPACT, HiveOperation.ALTERTABLE_COMPACT});
-    tablePartitionCommandType.put(HiveParser.TOK_ALTERTBLPART_SKEWED_LOCATION,
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SKEWED_LOCATION,
         new HiveOperation[] {HiveOperation.ALTERTBLPART_SKEWED_LOCATION,
             HiveOperation.ALTERTBLPART_SKEWED_LOCATION });
-    tablePartitionCommandType.put(HiveParser.TOK_TABLEBUCKETS,
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_BUCKETS,
         new HiveOperation[] {HiveOperation.ALTERTABLE_BUCKETNUM,
             HiveOperation.ALTERPARTITION_BUCKETNUM});
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_CLUSTER_SORT,
         new HiveOperation[] {HiveOperation.ALTERTABLE_CLUSTER_SORT,
             HiveOperation.ALTERTABLE_CLUSTER_SORT});
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS,
+        new HiveOperation[] {HiveOperation.ALTERTABLE_UPDATETABLESTATS,
+            HiveOperation.ALTERTABLE_UPDATEPARTSTATS});
   }
 
   public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)
@@ -152,9 +156,9 @@ public final class SemanticAnalyzerFacto
     if (tree.getToken() == null) {
       throw new RuntimeException("Empty Syntax Tree");
     } else {
-      setSessionCommandType(commandType.get(tree.getToken().getType()));
+      setSessionCommandType(commandType.get(tree.getType()));
 
-      switch (tree.getToken().getType()) {
+      switch (tree.getType()) {
       case HiveParser.TOK_EXPLAIN:
         return new ExplainSemanticAnalyzer(conf);
       case HiveParser.TOK_EXPLAIN_SQ_REWRITE:
@@ -165,6 +169,47 @@ public final class SemanticAnalyzerFacto
         return new ExportSemanticAnalyzer(conf);
       case HiveParser.TOK_IMPORT:
         return new ImportSemanticAnalyzer(conf);
+      case HiveParser.TOK_ALTERTABLE: {
+        Tree child = tree.getChild(1);
+        switch (child.getType()) {
+          case HiveParser.TOK_ALTERTABLE_RENAME:
+          case HiveParser.TOK_ALTERTABLE_TOUCH:
+          case HiveParser.TOK_ALTERTABLE_ARCHIVE:
+          case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
+          case HiveParser.TOK_ALTERTABLE_ADDCOLS:
+          case HiveParser.TOK_ALTERTABLE_RENAMECOL:
+          case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
+          case HiveParser.TOK_ALTERTABLE_DROPPARTS:
+          case HiveParser.TOK_ALTERTABLE_ADDPARTS:
+          case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
+          case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+          case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES:
+          case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
+          case HiveParser.TOK_ALTERTABLE_SKEWED:
+          setSessionCommandType(commandType.get(child.getType()));
+          return new DDLSemanticAnalyzer(conf);
+        }
+        HiveOperation commandType =
+            tablePartitionCommandType.get(child.getType())[tree.getChildCount() > 2 ? 1 : 0];
+        setSessionCommandType(commandType);
+        return new DDLSemanticAnalyzer(conf);
+      }
+      case HiveParser.TOK_ALTERVIEW: {
+        Tree child = tree.getChild(1);
+        switch (child.getType()) {
+          case HiveParser.TOK_ALTERVIEW_PROPERTIES:
+          case HiveParser.TOK_ALTERVIEW_DROPPROPERTIES:
+          case HiveParser.TOK_ALTERVIEW_ADDPARTS:
+          case HiveParser.TOK_ALTERVIEW_DROPPARTS:
+          case HiveParser.TOK_ALTERVIEW_RENAME:
+            setSessionCommandType(commandType.get(child.getType()));
+            return new DDLSemanticAnalyzer(conf);
+        }
+        // TOK_ALTERVIEW_AS
+        assert child.getType() == HiveParser.TOK_QUERY;
+        setSessionCommandType(HiveOperation.ALTERVIEW_AS);
+        return new SemanticAnalyzer(conf);
+      }
       case HiveParser.TOK_CREATEDATABASE:
       case HiveParser.TOK_DROPDATABASE:
       case HiveParser.TOK_SWITCHDATABASE:
@@ -174,24 +219,8 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_DESCTABLE:
       case HiveParser.TOK_DESCFUNCTION:
       case HiveParser.TOK_MSCK:
-      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
-      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
-      case HiveParser.TOK_ALTERTABLE_RENAME:
-      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
-      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
-      case HiveParser.TOK_DROPTABLE_PROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
       case HiveParser.TOK_ALTERINDEX_REBUILD:
       case HiveParser.TOK_ALTERINDEX_PROPERTIES:
-      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
-      case HiveParser.TOK_DROPVIEW_PROPERTIES:
-      case HiveParser.TOK_ALTERVIEW_ADDPARTS:
-      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
-      case HiveParser.TOK_ALTERVIEW_RENAME:
       case HiveParser.TOK_SHOWDATABASES:
       case HiveParser.TOK_SHOWTABLES:
       case HiveParser.TOK_SHOWCOLUMNS:
@@ -209,9 +238,6 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_CREATEINDEX:
       case HiveParser.TOK_DROPINDEX:
       case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
-      case HiveParser.TOK_ALTERTABLE_TOUCH:
-      case HiveParser.TOK_ALTERTABLE_ARCHIVE:
-      case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
       case HiveParser.TOK_LOCKTABLE:
       case HiveParser.TOK_UNLOCKTABLE:
       case HiveParser.TOK_LOCKDB:
@@ -228,23 +254,8 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_SHOW_ROLES:
       case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
       case HiveParser.TOK_ALTERDATABASE_OWNER:
-      case HiveParser.TOK_ALTERTABLE_SKEWED:
       case HiveParser.TOK_TRUNCATETABLE:
-      case HiveParser.TOK_EXCHANGEPARTITION:
       case HiveParser.TOK_SHOW_SET_ROLE:
-      case HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS:
-        return new DDLSemanticAnalyzer(conf);
-      case HiveParser.TOK_ALTERTABLE_PARTITION:
-        HiveOperation commandType = null;
-        Integer type = ((ASTNode) tree.getChild(1)).getToken().getType();
-        if (type == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS) {
-          commandType = HiveOperation.ALTERTABLE_UPDATEPARTSTATS;
-        } else if (tree.getChild(0).getChildCount() > 1) {
-          commandType = tablePartitionCommandType.get(type)[1];
-        } else {
-          commandType = tablePartitionCommandType.get(type)[0];
-        }
-        setSessionCommandType(commandType);
         return new DDLSemanticAnalyzer(conf);
 
       case HiveParser.TOK_CREATEFUNCTION:

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java Tue Sep  2 19:56:56 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.parse;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
@@ -227,11 +228,19 @@ public abstract class TaskCompiler {
 
       crtTblDesc.validate(conf);
 
-      // Clear the output for CTAS since we don't need the output from the
-      // mapredWork, the
+      // clear the mapredWork output file from outputs for CTAS
       // DDLWork at the tail of the chain will have the output
-      outputs.clear();
-
+      Iterator<WriteEntity> outIter = outputs.iterator();
+      while (outIter.hasNext()) {
+        switch (outIter.next().getType()) {
+        case DFS_DIR:
+        case LOCAL_DIR:
+          outIter.remove();
+          break;
+        default:
+          break;
+        }
+      }
       Task<? extends Serializable> crtTblTask = TaskFactory.get(new DDLWork(
           inputs, outputs, crtTblDesc), conf);
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java Tue Sep  2 19:56:56 2014
@@ -206,7 +206,7 @@ public class HiveAuthorizationTaskFactor
 
     List<String> roles = new ArrayList<String>();
     for (int i = rolesStartPos; i < ast.getChildCount(); i++) {
-      roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()).toLowerCase());
+      roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
     }
 
     String roleOwnerName = SessionState.getUserFromAuthenticator();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java Tue Sep  2 19:56:56 2014
@@ -44,11 +44,19 @@ public class AlterTableDesc extends DDLD
    *
    */
   public static enum AlterTableTypes {
-    RENAME, ADDCOLS, REPLACECOLS, ADDPROPS, DROPPROPS, ADDSERDE, ADDSERDEPROPS,
-    ADDFILEFORMAT, ADDCLUSTERSORTCOLUMN, RENAMECOLUMN, ADDPARTITION,
-    TOUCH, ARCHIVE, UNARCHIVE, ALTERPROTECTMODE, ALTERPARTITIONPROTECTMODE,
-    ALTERLOCATION, DROPPARTITION, RENAMEPARTITION, ADDSKEWEDBY, ALTERSKEWEDLOCATION,
-    ALTERBUCKETNUM, ALTERPARTITION, COMPACT
+    RENAME("rename"), ADDCOLS("add columns"), REPLACECOLS("replace columns"),
+    ADDPROPS("add props"), DROPPROPS("drop props"), ADDSERDE("add serde"), ADDSERDEPROPS("add serde props"),
+    ADDFILEFORMAT("add fileformat"), ADDCLUSTERSORTCOLUMN("add cluster sort column"),
+    RENAMECOLUMN("rename column"), ADDPARTITION("add partition"), TOUCH("touch"), ARCHIVE("archieve"),
+    UNARCHIVE("unarchieve"), ALTERPROTECTMODE("alter protect mode"),
+    ALTERPARTITIONPROTECTMODE("alter partition protect mode"), ALTERLOCATION("alter location"),
+    DROPPARTITION("drop partition"), RENAMEPARTITION("rename partition"), ADDSKEWEDBY("add skew column"),
+    ALTERSKEWEDLOCATION("alter skew location"), ALTERBUCKETNUM("alter bucket number"),
+    ALTERPARTITION("alter partition"), COMPACT("compact");
+
+    private final String name;
+    private AlterTableTypes(String name) { this.name = name; }
+    public String getName() { return name; }
   }
 
   public static enum ProtectModeType {
@@ -236,16 +244,7 @@ public class AlterTableDesc extends DDLD
 
   @Explain(displayName = "type")
   public String getAlterTableTypeString() {
-    switch (op) {
-    case RENAME:
-      return "rename";
-    case ADDCOLS:
-      return "add columns";
-    case REPLACECOLS:
-      return "replace columns";
-    }
-
-    return "unknown";
+    return op.getName();
   }
 
   /**

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Tue Sep  2 19:56:56 2014
@@ -114,8 +114,8 @@ public enum HiveOperation {
       new Privilege[] {Privilege.ALTER_DATA}, null),
   ALTERTABLE_PARTCOLTYPE("ALTERTABLE_PARTCOLTYPE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }),
   ALTERVIEW_RENAME("ALTERVIEW_RENAME", new Privilege[] {Privilege.ALTER_METADATA}, null),
-  ALTERTABLE_COMPACT("ALTERTABLE_COMPACT", new Privilege[]{Privilege.SELECT},
-      new Privilege[]{Privilege.ALTER_DATA}),
+  ALTERVIEW_AS("ALTERVIEW_AS", new Privilege[] {Privilege.ALTER_METADATA}, null),
+  ALTERTABLE_COMPACT("ALTERTABLE_COMPACT", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA}),
   SHOW_COMPACTIONS("SHOW COMPACTIONS", null, null),
   SHOW_TRANSACTIONS("SHOW TRANSACTIONS", null, null);
   ;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java Tue Sep  2 19:56:56 2014
@@ -37,7 +37,7 @@ public class LoadFileDesc extends LoadDe
   private String destinationCreateTable;
 
   static {
-	  PTFUtils.makeTransient(LoadFileDesc.class, "targetDir");
+    PTFUtils.makeTransient(LoadFileDesc.class, "targetDir");
   }
   public LoadFileDesc() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java Tue Sep  2 19:56:56 2014
@@ -38,7 +38,7 @@ public class LoadMultiFilesDesc implemen
   private transient List<Path> srcDirs;
 
   static {
-	  PTFUtils.makeTransient(LoadMultiFilesDesc.class, "targetDirs");
+    PTFUtils.makeTransient(LoadMultiFilesDesc.class, "targetDirs");
   }
   public LoadMultiFilesDesc() {
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java Tue Sep  2 19:56:56 2014
@@ -116,6 +116,8 @@ public class MapWork extends BaseWork {
 
   private boolean useOneNullRowInputFormat;
 
+  private boolean dummyTableScan = false;
+
   public MapWork() {}
 
   public MapWork(String name) {
@@ -525,4 +527,12 @@ public class MapWork extends BaseWork {
       }
     }
   }
+
+  public void setDummyTableScan(boolean dummyTableScan) {
+    this.dummyTableScan = dummyTableScan;
+  }
+
+  public boolean getDummyTableScan() {
+    return dummyTableScan;
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Tue Sep  2 19:56:56 2014
@@ -347,7 +347,7 @@ public final class PlanUtils {
 
       if (crtTblDesc.getTableName() != null && crtTblDesc.getDatabaseName() != null) {
         properties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,
-            crtTblDesc.getDatabaseName() + "." + crtTblDesc.getTableName());
+            crtTblDesc.getTableName());
       }
 
       if (crtTblDesc.getTblProps() != null) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java Tue Sep  2 19:56:56 2014
@@ -102,8 +102,7 @@ public class RoleDDLDesc extends DDLDesc
 
   public RoleDDLDesc(String principalName, PrincipalType principalType,
       RoleOperation operation, String roleOwnerName) {
-    this.name = (principalName != null  && principalType == PrincipalType.ROLE) ?
-      principalName.toLowerCase() : principalName;
+    this.name = principalName;
     this.principalType = principalType;
     this.operation = operation;
     this.roleOwnerName = roleOwnerName;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java Tue Sep  2 19:56:56 2014
@@ -49,6 +49,9 @@ public enum HiveCommand {
       if (command.length > 1 && "role".equalsIgnoreCase(command[1])) {
         // special handling for set role r1 statement
         return null;
+      } else if(command.length > 1 && "from".equalsIgnoreCase(command[1])) {
+        //special handling for SQL "delete from <table> where..."
+        return null;
       } else if (COMMANDS.contains(cmd)) {
         return HiveCommand.valueOf(cmd);
       }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Tue Sep  2 19:56:56 2014
@@ -67,9 +67,6 @@ public class AuthorizationUtils {
     case ROLE:
       return HivePrincipalType.ROLE;
     case GROUP:
-      if (SessionState.get().getAuthorizationMode() == SessionState.AuthorizationMode.V2) {
-        throw new HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
-      }
       return HivePrincipalType.GROUP;
     default:
       //should not happen as we take care of all existing types