You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/03 22:48:39 UTC

svn commit: r1615452 [3/4] - in /hive/branches/spark: ./ bin/ common/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/apache/hadoop/hive/contrib/metastore/hooks/ contrib/src/test/queries/clientnegative/ contrib/src/test/queries/...

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Sun Aug  3 20:48:35 2014
@@ -31,15 +31,16 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Set;
-import java.lang.StringBuffer;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.DemuxOperator;
@@ -65,11 +66,10 @@ import org.apache.hadoop.hive.ql.exec.mr
 import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
-import org.apache.hadoop.hive.ql.io.rcfile.merge.MergeWork;
+import org.apache.hadoop.hive.ql.io.merge.MergeWork;
+import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRUnionCtx;
 import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
 import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPruner;
@@ -1257,23 +1257,33 @@ public final class GenMapRedUtils {
     MapWork cplan;
     Serializable work;
 
-    if (conf.getBoolVar(ConfVars.HIVEMERGERCFILEBLOCKLEVEL) &&
-        fsInputDesc.getTableInfo().getInputFileFormatClass().equals(RCFileInputFormat.class)) {
+    if ((conf.getBoolVar(ConfVars.HIVEMERGERCFILEBLOCKLEVEL) &&
+        fsInputDesc.getTableInfo().getInputFileFormatClass().equals(RCFileInputFormat.class)) ||
+        (conf.getBoolVar(ConfVars.HIVEMERGEORCFILESTRIPELEVEL) &&
+            fsInputDesc.getTableInfo().getInputFileFormatClass().equals(OrcInputFormat.class))) {
 
       // Check if InputFormatClass is valid
-      String inputFormatClass = conf.getVar(ConfVars.HIVEMERGEINPUTFORMATBLOCKLEVEL);
+      final String inputFormatClass;
+      if (fsInputDesc.getTableInfo().getInputFileFormatClass().equals(RCFileInputFormat.class)) {
+        inputFormatClass = conf.getVar(ConfVars.HIVEMERGEINPUTFORMATBLOCKLEVEL);
+      } else {
+        inputFormatClass = conf.getVar(ConfVars.HIVEMERGEINPUTFORMATSTRIPELEVEL);
+      }
       try {
         Class c = Class.forName(inputFormatClass);
 
-        LOG.info("RCFile format- Using block level merge");
-        cplan = GenMapRedUtils.createRCFileMergeTask(fsInputDesc, finalName,
+        if(fsInputDesc.getTableInfo().getInputFileFormatClass().equals(OrcInputFormat.class)) {
+          LOG.info("OrcFile format - Using stripe level merge");
+        } else {
+          LOG.info("RCFile format- Using block level merge");
+        }
+        cplan = GenMapRedUtils.createMergeTask(fsInputDesc, finalName,
             dpCtx != null && dpCtx.getNumDPCols() > 0);
         work = cplan;
       } catch (ClassNotFoundException e) {
         String msg = "Illegal input format class: " + inputFormatClass;
         throw new SemanticException(msg);
       }
-
     } else {
       cplan = createMRWorkForMergingFiles(conf, tsMerge, fsInputDesc);
       if (conf.getVar(ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
@@ -1486,20 +1496,22 @@ public final class GenMapRedUtils {
   }
 
   /**
-   * Create a block level merge task for RCFiles.
+   * Create a block level merge task for RCFiles or stripe level merge task for
+   * ORCFiles
    *
    * @param fsInputDesc
    * @param finalName
-   * @return MergeWork if table is stored as RCFile,
+   * @return MergeWork if table is stored as RCFile or ORCFile,
    *         null otherwise
    */
-  public static MapWork createRCFileMergeTask(FileSinkDesc fsInputDesc,
+  public static MapWork createMergeTask(FileSinkDesc fsInputDesc,
       Path finalName, boolean hasDynamicPartitions) throws SemanticException {
 
     Path inputDir = fsInputDesc.getFinalDirName();
     TableDesc tblDesc = fsInputDesc.getTableInfo();
 
-    if (tblDesc.getInputFileFormatClass().equals(RCFileInputFormat.class)) {
+    if (tblDesc.getInputFileFormatClass().equals(RCFileInputFormat.class) ||
+        tblDesc.getInputFileFormatClass().equals(OrcInputFormat.class)) {
       ArrayList<Path> inputDirs = new ArrayList<Path>(1);
       ArrayList<String> inputDirstr = new ArrayList<String>(1);
       if (!hasDynamicPartitions
@@ -1509,7 +1521,8 @@ public final class GenMapRedUtils {
       }
 
       MergeWork work = new MergeWork(inputDirs, finalName,
-          hasDynamicPartitions, fsInputDesc.getDynPartCtx());
+          hasDynamicPartitions, fsInputDesc.getDynPartCtx(),
+          tblDesc.getInputFileFormatClass());
       LinkedHashMap<String, ArrayList<String>> pathToAliases =
           new LinkedHashMap<String, ArrayList<String>>();
       pathToAliases.put(inputDir.toString(), (ArrayList<String>) inputDirstr.clone());
@@ -1527,7 +1540,8 @@ public final class GenMapRedUtils {
       return work;
     }
 
-    throw new SemanticException("createRCFileMergeTask called on non-RCFile table");
+    throw new SemanticException("createMergeTask called on a table with file"
+        + " format other than RCFile or ORCFile");
   }
 
   /**
@@ -1709,12 +1723,8 @@ public final class GenMapRedUtils {
       // generate the temporary file
       // it must be on the same file system as the current destination
       Context baseCtx = parseCtx.getContext();
-  	  // if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
-      // to final location /user/hive/warehouse/ will fail later, so instead pick tmp dir
-      // on same namespace as tbl dir.
-      Path tmpDir = dest.toUri().getScheme().equals("viewfs") ?
-        baseCtx.getExtTmpPathRelTo(dest.toUri()) :
-        baseCtx.getExternalTmpPath(dest.toUri());
+
+      Path tmpDir = baseCtx.getExternalTmpPath(dest);
 
       FileSinkDesc fileSinkDesc = fsOp.getConf();
       // Change all the linked file sink descriptors

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SortedMergeJoinProc.java Sun Aug  3 20:48:35 2014
@@ -18,17 +18,13 @@
 
 package org.apache.hadoop.hive.ql.optimizer;
 
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Stack;
 
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
-import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
-import org.apache.hadoop.hive.ql.parse.QBJoinTree;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class SortedMergeJoinProc extends AbstractSMBJoinProc implements NodeProcessor {
@@ -46,12 +42,6 @@ public class SortedMergeJoinProc extends
 
     JoinOperator joinOp = (JoinOperator) nd;
     SortBucketJoinProcCtx smbJoinContext = (SortBucketJoinProcCtx) procCtx;
-    Map<MapJoinOperator, QBJoinTree> mapJoinMap = pGraphContext.getMapJoinContext();
-    if (mapJoinMap == null) {
-      mapJoinMap = new HashMap<MapJoinOperator, QBJoinTree>();
-      pGraphContext.setMapJoinContext(mapJoinMap);
-    }
-
     boolean convert =
         canConvertJoinToSMBJoin(
             joinOp, smbJoinContext, pGraphContext);

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java Sun Aug  3 20:48:35 2014
@@ -401,7 +401,6 @@ public class StatsRulesProcFactory {
 
       long numRows = stats.getNumRows();
 
-      // evaluate similar to "col = constant" expr
       if (pred instanceof ExprNodeGenericFuncDesc) {
 
         ExprNodeGenericFuncDesc genFunc = (ExprNodeGenericFuncDesc) pred;
@@ -413,9 +412,7 @@ public class StatsRulesProcFactory {
             String tabAlias = colDesc.getTabAlias();
             ColStatistics cs = stats.getColumnStatisticsForColumn(tabAlias, colName);
             if (cs != null) {
-              long dvs = cs.getCountDistint();
-              numRows = dvs == 0 ? numRows / 2 : numRows / dvs;
-              return numRows;
+              return cs.getNumNulls();
             }
           }
         }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTablePartMergeFilesDesc.java Sun Aug  3 20:48:35 2014
@@ -25,6 +25,7 @@ import java.util.List;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
+import org.apache.hadoop.mapred.InputFormat;
 
 @Explain(displayName = "Alter Table Partition Merge Files")
 public class AlterTablePartMergeFilesDesc {
@@ -35,6 +36,7 @@ public class AlterTablePartMergeFilesDes
 
   private List<Path> inputDir = new ArrayList<Path>();
   private Path outputDir = null;
+  private Class<? extends InputFormat> inputFormatClass;
 
   public AlterTablePartMergeFilesDesc(String tableName,
       HashMap<String, String> partSpec) {
@@ -90,4 +92,12 @@ public class AlterTablePartMergeFilesDes
     this.lbCtx = lbCtx;
   }
 
+  public Class<? extends InputFormat> getInputFormatClass() {
+    return inputFormatClass;
+  }
+
+  public void setInputFormatClass(Class<? extends InputFormat> inputFormatClass) {
+    this.inputFormatClass = inputFormatClass;
+  }
+
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Sun Aug  3 20:48:35 2014
@@ -69,6 +69,7 @@ import org.apache.hadoop.hive.ql.index.H
 import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
+import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
 import org.apache.hadoop.hive.ql.lockmgr.LockException;
@@ -969,7 +970,7 @@ public class DDLSemanticAnalyzer extends
         TableDesc tblDesc = Utilities.getTableDesc(table);
         // Write the output to temporary directory and move it to the final location at the end
         // so the operation is atomic.
-        Path queryTmpdir = ctx.getExternalTmpPath(newTblPartLoc.toUri());
+        Path queryTmpdir = ctx.getExternalTmpPath(newTblPartLoc);
         truncateTblDesc.setOutputDir(queryTmpdir);
         LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc,
             partSpec == null ? new HashMap<String, String>() : partSpec);
@@ -1520,11 +1521,13 @@ public class DDLSemanticAnalyzer extends
             tblObj.getSkewedColValueLocationMaps(), tblObj.isStoredAsSubDirectories(), conf);
       }
 
-      // throw a HiveException for non-rcfile.
-      if (!inputFormatClass.equals(RCFileInputFormat.class)) {
+      // throw a HiveException for other than rcfile and orcfile.
+      if (!((inputFormatClass.equals(RCFileInputFormat.class) ||
+          (inputFormatClass.equals(OrcInputFormat.class))))) {
         throw new SemanticException(
-            "Only RCFileFormat is supportted right now.");
+            "Only RCFile and ORCFile Formats are supportted right now.");
       }
+      mergeDesc.setInputFormatClass(inputFormatClass);
 
       // throw a HiveException if the table/partition is bucketized
       if (bucketCols != null && bucketCols.size() > 0) {
@@ -1549,7 +1552,7 @@ public class DDLSemanticAnalyzer extends
       ddlWork.setNeedLock(true);
       Task<? extends Serializable> mergeTask = TaskFactory.get(ddlWork, conf);
       TableDesc tblDesc = Utilities.getTableDesc(tblObj);
-      Path queryTmpdir = ctx.getExternalTmpPath(newTblPartLoc.toUri());
+      Path queryTmpdir = ctx.getExternalTmpPath(newTblPartLoc);
       mergeDesc.setOutputDir(queryTmpdir);
       LoadTableDesc ltd = new LoadTableDesc(queryTmpdir, tblDesc,
           partSpec == null ? new HashMap<String, String>() : partSpec);

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/GenTezUtils.java Sun Aug  3 20:48:35 2014
@@ -313,7 +313,8 @@ public class GenTezUtils {
 
     if (chDir) {
       // Merge the files in the destination table/partitions by creating Map-only merge job
-      // If underlying data is RCFile a RCFileBlockMerge task would be created.
+      // If underlying data is RCFile or OrcFile, RCFileBlockMerge task or
+      // OrcFileStripeMerge task would be created.
       LOG.info("using CombineHiveInputformat for the merge job");
       GenMapRedUtils.createMRWorkForMergingFiles(fileSink, finalName,
           context.dependencyTask, context.moveTask,

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sun Aug  3 20:48:35 2014
@@ -928,7 +928,7 @@ alterStatement
         |
             KW_INDEX! alterIndexStatementSuffix
         |
-            KW_DATABASE! alterDatabaseStatementSuffix
+            (KW_DATABASE|KW_SCHEMA)! alterDatabaseStatementSuffix
         )
     ;
 
@@ -1299,7 +1299,7 @@ descStatement
 @after { popMsg(state); }
     : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED|descOptions=KW_PRETTY)? (parttype=descPartTypeExpr) -> ^(TOK_DESCTABLE $parttype $descOptions?)
     | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) -> ^(TOK_DESCFUNCTION $name KW_EXTENDED?)
-    | (KW_DESCRIBE|KW_DESC) KW_DATABASE KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
+    | (KW_DESCRIBE|KW_DESC) (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
     ;
 
 analyzeStatement
@@ -1324,7 +1324,7 @@ showStatement
     -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?)
     | KW_SHOW KW_TBLPROPERTIES tblName=identifier (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES $tblName $prptyName?)
     | KW_SHOW KW_LOCKS (parttype=partTypeExpr)? (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWLOCKS $parttype? $isExtended?)
-    | KW_SHOW KW_LOCKS KW_DATABASE (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?)
+    | KW_SHOW KW_LOCKS (KW_DATABASE|KW_SCHEMA) (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?)
     | KW_SHOW (showOptions=KW_FORMATTED)? (KW_INDEX|KW_INDEXES) KW_ON showStmtIdentifier ((KW_FROM|KW_IN) db_name=identifier)?
     -> ^(TOK_SHOWINDEXES showStmtIdentifier $showOptions? $db_name?)
     | KW_SHOW KW_COMPACTIONS -> ^(TOK_SHOW_COMPACTIONS)
@@ -1341,7 +1341,7 @@ lockStatement
 lockDatabase
 @init { pushMsg("lock database statement", state); }
 @after { popMsg(state); }
-    : KW_LOCK KW_DATABASE (dbName=Identifier) lockMode -> ^(TOK_LOCKDB $dbName lockMode)
+    : KW_LOCK (KW_DATABASE|KW_SCHEMA) (dbName=Identifier) lockMode -> ^(TOK_LOCKDB $dbName lockMode)
     ;
 
 lockMode
@@ -1359,7 +1359,7 @@ unlockStatement
 unlockDatabase
 @init { pushMsg("unlock database statement", state); }
 @after { popMsg(state); }
-    : KW_UNLOCK KW_DATABASE (dbName=Identifier) -> ^(TOK_UNLOCKDB $dbName)
+    : KW_UNLOCK (KW_DATABASE|KW_SCHEMA) (dbName=Identifier) -> ^(TOK_UNLOCKDB $dbName)
     ;
 
 createRoleStatement
@@ -1471,7 +1471,7 @@ privilegeObject
 privObjectType
 @init {pushMsg("privilege object type type", state);}
 @after {popMsg(state);}
-    : KW_DATABASE -> ^(TOK_DB_TYPE)
+    : (KW_DATABASE|KW_SCHEMA) -> ^(TOK_DB_TYPE)
     | KW_TABLE? -> ^(TOK_TABLE_TYPE)
     ;
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Sun Aug  3 20:48:35 2014
@@ -276,7 +276,7 @@ public class ImportSemanticAnalyzer exte
 
   private Task<?> loadTable(URI fromURI, Table table) {
     Path dataPath = new Path(fromURI.toString(), "data");
-    Path tmpPath = ctx.getExternalTmpPath(fromURI);
+    Path tmpPath = ctx.getExternalTmpPath(new Path(fromURI));
     Task<?> copyTask = TaskFactory.get(new CopyWork(dataPath,
        tmpPath, false), conf);
     LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath,
@@ -321,7 +321,7 @@ public class ImportSemanticAnalyzer exte
       LOG.debug("adding dependent CopyWork/AddPart/MoveWork for partition "
           + partSpecToString(partSpec.getPartSpec())
           + " with source location: " + srcLocation);
-      Path tmpPath = ctx.getExternalTmpPath(fromURI);
+      Path tmpPath = ctx.getExternalTmpPath(new Path(fromURI));
       Task<?> copyTask = TaskFactory.get(new CopyWork(new Path(srcLocation),
           tmpPath, false), conf);
       Task<?> addPartTask = TaskFactory.get(new DDLWork(getInputs(),

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sun Aug  3 20:48:35 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
-import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DATABASE_WAREHOUSE_SUFFIX;
 
 import java.io.IOException;
 import java.io.Serializable;
@@ -31,10 +30,10 @@ import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.UUID;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeSet;
+import java.util.UUID;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
@@ -1426,7 +1425,7 @@ public class SemanticAnalyzer extends Ba
               }
               try {
                 fname = ctx.getExternalTmpPath(
-                    FileUtils.makeQualified(location, conf).toUri()).toString();
+                    FileUtils.makeQualified(location, conf)).toString();
               } catch (Exception e) {
                 throw new SemanticException(generateErrorMessage(ast,
                     "Error creating temporary folder on: " + location.toString()), e);
@@ -2328,13 +2327,13 @@ public class SemanticAnalyzer extends Ba
    * for inner joins push a 'is not null predicate' to the join sources for
    * every non nullSafe predicate.
    */
-  private Operator genNotNullFilterForJoinSourcePlan(QB qb, Operator input, 
+  private Operator genNotNullFilterForJoinSourcePlan(QB qb, Operator input,
       QBJoinTree joinTree, ExprNodeDesc[] joinKeys) throws SemanticException {
 
     if (qb == null || joinTree == null) {
       return input;
     }
-    
+
     if (!joinTree.getNoOuterJoin()) {
       return input;
     }
@@ -5662,12 +5661,7 @@ public class SemanticAnalyzer extends Ba
       if (isNonNativeTable) {
         queryTmpdir = dest_path;
       } else {
-    	// if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
-        // to final /user/hive/warehouse/ will fail later, so instead pick tmp dir
-        // on same namespace as tbl dir.
-        queryTmpdir = dest_path.toUri().getScheme().equals("viewfs") ?
-          ctx.getExtTmpPathRelTo(dest_path.getParent().toUri()) :
-          ctx.getExternalTmpPath(dest_path.toUri());
+        queryTmpdir = ctx.getExternalTmpPath(dest_path);
       }
       if (dpCtx != null) {
         // set the root of the temporary path where dynamic partition columns will populate
@@ -5780,12 +5774,7 @@ public class SemanticAnalyzer extends Ba
       dest_path = new Path(tabPath.toUri().getScheme(), tabPath.toUri()
           .getAuthority(), partPath.toUri().getPath());
 
-      // if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
-      // to final /user/hive/warehouse/ will fail later, so instead pick tmp dir
-      // on same namespace as tbl dir.
-      queryTmpdir = dest_path.toUri().getScheme().equals("viewfs") ?
-        ctx.getExtTmpPathRelTo(dest_path.getParent().toUri()) :
-        ctx.getExternalTmpPath(dest_path.toUri());
+      queryTmpdir = ctx.getExternalTmpPath(dest_path);
       table_desc = Utilities.getTableDesc(dest_tab);
 
       // Add sorting/bucketing if needed
@@ -5842,7 +5831,7 @@ public class SemanticAnalyzer extends Ba
 
         try {
           Path qPath = FileUtils.makeQualified(dest_path, conf);
-          queryTmpdir = ctx.getExternalTmpPath(qPath.toUri());
+          queryTmpdir = ctx.getExternalTmpPath(qPath);
         } catch (Exception e) {
           throw new SemanticException("Error creating temporary folder on: "
               + dest_path, e);
@@ -6003,7 +5992,7 @@ public class SemanticAnalyzer extends Ba
     // it should be the same as the MoveWork's sourceDir.
     fileSinkDesc.setStatsAggPrefix(fileSinkDesc.getDirName().toString());
     if (HiveConf.getVar(conf, HIVESTATSDBCLASS).equalsIgnoreCase(StatDB.fs.name())) {
-      String statsTmpLoc = ctx.getExternalTmpPath(queryTmpdir.toUri()).toString();
+      String statsTmpLoc = ctx.getExternalTmpPath(queryTmpdir).toString();
       LOG.info("Set stats collection dir : " + statsTmpLoc);
       conf.set(StatsSetupConst.STATS_TMP_LOC, statsTmpLoc);
     }
@@ -9014,7 +9003,7 @@ public class SemanticAnalyzer extends Ba
       tsDesc.setGatherStats(false);
     } else {
       if (HiveConf.getVar(conf, HIVESTATSDBCLASS).equalsIgnoreCase(StatDB.fs.name())) {
-        String statsTmpLoc = ctx.getExternalTmpPath(tab.getPath().toUri()).toString();
+        String statsTmpLoc = ctx.getExternalTmpPath(tab.getPath()).toString();
         LOG.info("Set stats collection dir : " + statsTmpLoc);
         conf.set(StatsSetupConst.STATS_TMP_LOC, statsTmpLoc);
       }
@@ -9501,7 +9490,11 @@ public class SemanticAnalyzer extends Ba
 
     // Generate column access stats if required - wait until column pruning takes place
     // during optimization
-    if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS) == true) {
+    boolean isColumnInfoNeedForAuth = SessionState.get().isAuthorizationModeV2()
+        && HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED);
+
+    if (isColumnInfoNeedForAuth
+        || HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS) == true) {
       ColumnAccessAnalyzer columnAccessAnalyzer = new ColumnAccessAnalyzer(pCtx);
       setColumnAccessInfo(columnAccessAnalyzer.analyzeColumnAccess());
     }
@@ -9548,7 +9541,7 @@ public class SemanticAnalyzer extends Ba
             if (((TableScanDesc)topOp.getConf()).getIsMetadataOnly()) {
               continue;
             }
-            PrunedPartitionList parts = pCtx.getOpToPartList().get((TableScanOperator) topOp);
+            PrunedPartitionList parts = pCtx.getOpToPartList().get(topOp);
             if (parts.getPartitions().size() > scanLimit) {
               throw new SemanticException(ErrorMsg.PARTITION_SCAN_LIMIT_EXCEEDED, ""
                   + parts.getPartitions().size(), "" + parts.getSourceTable().getTableName(), ""
@@ -10169,7 +10162,7 @@ public class SemanticAnalyzer extends Ba
     String dbName = qualified.length == 1 ? SessionState.get().getCurrentDatabase() : qualified[0];
     Database database  = getDatabase(dbName);
     outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
- 
+
     if (isTemporary) {
       if (partCols.size() > 0) {
         throw new SemanticException("Partition columns are not supported on temporary tables");

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java Sun Aug  3 20:48:35 2014
@@ -36,6 +36,17 @@ public class ExprNodeConstantDesc extend
   private static final long serialVersionUID = 1L;
   final protected transient static char[] hexArray = "0123456789ABCDEF".toCharArray();
   private Object value;
+  // If this constant was created while doing constant folding, foldedFromCol holds the name of
+  // original column from which it was folded.
+  private transient String foldedFromCol;
+
+  public String getFoldedFromCol() {
+    return foldedFromCol;
+  }
+
+  public void setFoldedFromCol(String foldedFromCol) {
+    this.foldedFromCol = foldedFromCol;
+  }
 
   public ExprNodeConstantDesc() {
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Sun Aug  3 20:48:35 2014
@@ -17,6 +17,10 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
 import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
@@ -24,8 +28,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.Entity;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType;
@@ -42,9 +46,6 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
-import java.util.ArrayList;
-import java.util.List;
-
 /**
  * Utility code shared by hive internal code and sql standard authorization plugin implementation
  */
@@ -173,7 +174,7 @@ public class AuthorizationUtils {
   }
 
   public static HivePrivilegeObject getHivePrivilegeObject(
-      PrivilegeObjectDesc privSubjectDesc, List<String> columns) throws HiveException {
+      PrivilegeObjectDesc privSubjectDesc, Set<String> columns) throws HiveException {
 
     // null means ALL for show grants, GLOBAL for grant/revoke
     HivePrivilegeObjectType objectType = null;

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java Sun Aug  3 20:48:35 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.security.authorization;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
@@ -27,7 +26,7 @@ public class DefaultHiveAuthorizationPro
     BitSetCheckedAuthorizationProvider {
 
   public void init(Configuration conf) throws HiveException {
-    hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class)));
+    hive_db = new HiveProxy(Hive.get(conf, HiveAuthorizationProvider.class));
   }
 
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java Sun Aug  3 20:48:35 2014
@@ -35,7 +35,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.Database;
@@ -83,7 +82,7 @@ public class StorageBasedAuthorizationPr
         // till we explicitly initialize it as being from the client side. So, we have a
         // chicken-and-egg problem. So, we now track whether or not we're running from client-side
         // in the SBAP itself.
-        hive_db = new HiveProxy(Hive.get(new HiveConf(getConf(), StorageBasedAuthorizationProvider.class)));
+        hive_db = new HiveProxy(Hive.get(getConf(), StorageBasedAuthorizationProvider.class));
         this.wh = new Warehouse(getConf());
         if (this.wh == null){
           // If wh is still null after just having initialized it, bail out - something's very wrong.
@@ -117,7 +116,7 @@ public class StorageBasedAuthorizationPr
 
     // Update to previous comment: there does seem to be one place that uses this
     // and that is to authorize "show databases" in hcat commandline, which is used
-    // by webhcat. And user-level auth seems to be a resonable default in this case.
+    // by webhcat. And user-level auth seems to be a reasonable default in this case.
     // The now deprecated HdfsAuthorizationProvider in hcatalog approached this in
     // another way, and that was to see if the user had said above appropriate requested
     // privileges for the hive root warehouse directory. That seems to be the best

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java Sun Aug  3 20:48:35 2014
@@ -17,9 +17,12 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin;
 
-import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
@@ -35,7 +38,9 @@ public class HivePrivilegeObject impleme
   public int compareTo(HivePrivilegeObject o) {
     int compare = type.compareTo(o.type);
     if (compare == 0) {
-      compare = dbname.compareTo(o.dbname);
+      compare = dbname != null ?
+          (o.dbname != null ? dbname.compareTo(o.dbname) : 1) :
+          (o.dbname != null ? -1 : 0);
     }
     if (compare == 0) {
       compare = objectName != null ?
@@ -55,9 +60,18 @@ public class HivePrivilegeObject impleme
     return compare;
   }
 
-  private int compare(List<String> o1, List<String> o2) {
-    for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
-      int compare = o1.get(i).compareTo(o2.get(i));
+  private int compare(Collection<String> o1, Collection<String> o2) {
+    Iterator<String> it1 = o1.iterator();
+    Iterator<String> it2 = o2.iterator();
+    while (it1.hasNext()) {
+      if (!it2.hasNext()) {
+        break;
+      }
+      String s1 = it1.next();
+      String s2 = it2.next();
+      int compare = s1 != null ?
+          (s2 != null ? s1.compareTo(s2) : 1) :
+            (s2 != null ? -1 : 0);
       if (compare != 0) {
         return compare;
       }
@@ -77,7 +91,7 @@ public class HivePrivilegeObject impleme
   private final String objectName;
   private final List<String> commandParams;
   private final List<String> partKeys;
-  private final List<String> columns;
+  private Set<String> columns;
   private final HivePrivObjectActionType actionType;
 
   public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName) {
@@ -92,9 +106,8 @@ public class HivePrivilegeObject impleme
   public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
       List<String> partKeys, String column) {
     this(type, dbname, objectName, partKeys,
-        column == null ? null : new ArrayList<String>(Arrays.asList(column)),
+        column == null ? null : new HashSet<String>(Arrays.asList(column)),
         HivePrivObjectActionType.OTHER, null);
-
   }
 
   /**
@@ -108,12 +121,12 @@ public class HivePrivilegeObject impleme
   }
 
   public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
-    List<String> partKeys, List<String> columns, List<String> commandParams) {
+    List<String> partKeys, Set<String> columns, List<String> commandParams) {
     this(type, dbname, objectName, partKeys, columns, HivePrivObjectActionType.OTHER, commandParams);
   }
 
   public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
-      List<String> partKeys, List<String> columns, HivePrivObjectActionType actionType,
+      List<String> partKeys, Set<String> columns, HivePrivObjectActionType actionType,
       List<String> commandParams) {
     this.type = type;
     this.dbname = dbname;
@@ -151,7 +164,13 @@ public class HivePrivilegeObject impleme
     return partKeys;
   }
 
-  public List<String> getColumns() {
+  /**
+   * Applicable columns in this object
+   * In case of DML read operations, this is the set of columns being used.
+   * Column information is not set for DDL operations and for tables being written into
+   * @return list of applicable columns
+   */
+  public Set<String> getColumns() {
     return columns;
   }
 
@@ -200,4 +219,8 @@ public class HivePrivilegeObject impleme
     return (dbname == null ? "" : dbname + ".") + objectName;
   }
 
+  public void setColumns(Set<String> columnms) {
+    this.columns = columnms;
+  }
+
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java Sun Aug  3 20:48:35 2014
@@ -123,5 +123,7 @@ public class HiveRoleGrant implements Co
 
   }
 
-
+  public String toString() {
+    return roleName + "[" + principalName + ":" + principalType + (grantOption ? ":WITH GRANT]" : "]");
+  }
 }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java Sun Aug  3 20:48:35 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.securi
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -318,7 +319,7 @@ public class HiveV1Authorizer implements
           privs.addAll(hive.showPrivilegeGrant(HiveObjectType.DATABASE,
               name, type, dbObj.getName(), null, null, null));
         } else {
-          List<String> columns = privObj.getColumns();
+          Set<String> columns = privObj.getColumns();
           if (columns != null && !columns.isEmpty()) {
             // show column level privileges
             for (String columnName : columns) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java Sun Aug  3 20:48:35 2014
@@ -105,6 +105,7 @@ public class SQLStdHiveAccessController 
     }
     this.currentUserName = newUserName;
     this.currentRoles = getRolesFromMS();
+    LOG.info("Current user : " + currentUserName + ", Current Roles : " + currentRoles);
   }
 
   private List<HiveRoleGrant> getRolesFromMS() throws HiveAuthzPluginException {
@@ -532,6 +533,7 @@ public class SQLStdHiveAccessController 
       currentRoles.add(adminRole);
       return;
     }
+    LOG.info("Current user : " + currentUserName + ", Current Roles : " + currentRoles);
     // If we are here it means, user is requesting a role he doesn't belong to.
     throw new HiveAccessControlException(currentUserName +" doesn't belong to role "
       +roleName);

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java Sun Aug  3 20:48:35 2014
@@ -429,6 +429,8 @@ public class StatsUtils {
       cs.setAvgColLen(JavaDataModel.get().lengthOfTimestamp());
     } else if (colType.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
       cs.setAvgColLen(JavaDataModel.get().lengthOfDecimal());
+      cs.setCountDistint(csd.getDecimalStats().getNumDVs());
+      cs.setNumNulls(csd.getDecimalStats().getNumNulls());
     } else if (colType.equalsIgnoreCase(serdeConstants.DATE_TYPE_NAME)) {
       cs.setAvgColLen(JavaDataModel.get().lengthOfDate());
     } else {

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java Sun Aug  3 20:48:35 2014
@@ -126,12 +126,13 @@ public class TestDbTxnManager {
   public void testSingleWriteTable() throws Exception {
     WriteEntity we = addTableOutput(WriteEntity.WriteType.INSERT);
     QueryPlan qp = new MockQueryPlan(this);
+    txnMgr.openTxn("fred");
     txnMgr.acquireLocks(qp, ctx, "fred");
     List<HiveLock> locks = ctx.getHiveLocks();
     Assert.assertEquals(1, locks.size());
     Assert.assertEquals(1,
         TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
-    txnMgr.getLockManager().unlock(locks.get(0));
+    txnMgr.commitTxn();
     locks = txnMgr.getLockManager().getLocks(false, false);
     Assert.assertEquals(0, locks.size());
   }
@@ -144,12 +145,13 @@ public class TestDbTxnManager {
     addPartitionInput(t);
     WriteEntity we = addTableOutput(WriteEntity.WriteType.INSERT);
     QueryPlan qp = new MockQueryPlan(this);
+    txnMgr.openTxn("fred");
     txnMgr.acquireLocks(qp, ctx, "fred");
     List<HiveLock> locks = ctx.getHiveLocks();
     Assert.assertEquals(1, locks.size());
     Assert.assertEquals(4,
         TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
-    txnMgr.getLockManager().unlock(locks.get(0));
+    txnMgr.commitTxn();
     locks = txnMgr.getLockManager().getLocks(false, false);
     Assert.assertEquals(0, locks.size());
   }
@@ -158,12 +160,13 @@ public class TestDbTxnManager {
   public void testUpdate() throws Exception {
     WriteEntity we = addTableOutput(WriteEntity.WriteType.UPDATE);
     QueryPlan qp = new MockQueryPlan(this);
+    txnMgr.openTxn("fred");
     txnMgr.acquireLocks(qp, ctx, "fred");
     List<HiveLock> locks = ctx.getHiveLocks();
     Assert.assertEquals(1, locks.size());
     Assert.assertEquals(1,
         TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
-    txnMgr.getLockManager().unlock(locks.get(0));
+    txnMgr.commitTxn();
     locks = txnMgr.getLockManager().getLocks(false, false);
     Assert.assertEquals(0, locks.size());
   }
@@ -172,12 +175,28 @@ public class TestDbTxnManager {
   public void testDelete() throws Exception {
     WriteEntity we = addTableOutput(WriteEntity.WriteType.DELETE);
     QueryPlan qp = new MockQueryPlan(this);
+    txnMgr.openTxn("fred");
     txnMgr.acquireLocks(qp, ctx, "fred");
     List<HiveLock> locks = ctx.getHiveLocks();
     Assert.assertEquals(1, locks.size());
     Assert.assertEquals(1,
         TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
-    txnMgr.getLockManager().unlock(locks.get(0));
+    txnMgr.commitTxn();
+    locks = txnMgr.getLockManager().getLocks(false, false);
+    Assert.assertEquals(0, locks.size());
+  }
+
+  @Test
+  public void testRollback() throws Exception {
+    WriteEntity we = addTableOutput(WriteEntity.WriteType.DELETE);
+    QueryPlan qp = new MockQueryPlan(this);
+    txnMgr.openTxn("fred");
+    txnMgr.acquireLocks(qp, ctx, "fred");
+    List<HiveLock> locks = ctx.getHiveLocks();
+    Assert.assertEquals(1, locks.size());
+    Assert.assertEquals(1,
+        TxnDbUtil.countLockComponents(((DbLockManager.DbHiveLock) locks.get(0)).lockId));
+    txnMgr.rollbackTxn();
     locks = txnMgr.getLockManager().getLocks(false, false);
     Assert.assertEquals(0, locks.size());
   }

Modified: hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Sun Aug  3 20:48:35 2014
@@ -509,6 +509,7 @@ public class TestHive extends TestCase {
 
   public void testHiveRefreshOnConfChange() throws Throwable{
     Hive prevHiveObj = Hive.get();
+    prevHiveObj.getDatabaseCurrent();
     Hive newHiveObj;
 
     //if HiveConf has not changed, same object should be returned
@@ -522,6 +523,7 @@ public class TestHive extends TestCase {
 
     //if HiveConf has changed, new object should be returned
     prevHiveObj = Hive.get();
+    prevHiveObj.getDatabaseCurrent();
     //change value of a metavar config param in new hive conf
     newHconf = new HiveConf(hiveConf);
     newHconf.setIntVar(ConfVars.METASTORETHRIFTCONNECTIONRETRIES,

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_all_role.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_all_role.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_all_role.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_all_role.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_default_role.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_default_role.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_default_role.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_default_role.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_none_role.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_none_role.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_none_role.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_cannot_create_none_role.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_caseinsensitivity.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_caseinsensitivity.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_caseinsensitivity.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_caseinsensitivity.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_cascade.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_cascade.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_cascade.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_cascade.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_empty.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_empty.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_empty.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_db_empty.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_role_no_admin.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_role_no_admin.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_role_no_admin.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_drop_role_no_admin.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles1.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles1.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles1.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles1.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles2.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles2.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles2.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_cycles2.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant2.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant2.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant2.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant2.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_nosuchrole.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_nosuchrole.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_nosuchrole.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_nosuchrole.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otherrole.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otherrole.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otherrole.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otherrole.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otheruser.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otheruser.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otheruser.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_role_grant_otheruser.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_rolehierarchy_privs.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_rolehierarchy_privs.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_rolehierarchy_privs.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_rolehierarchy_privs.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otherrole.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otherrole.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otherrole.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otherrole.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_all.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_all.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_all.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_all.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_alltabs.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_alltabs.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_alltabs.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_alltabs.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_wtab.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_wtab.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_wtab.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_show_grant_otheruser_wtab.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_1_sql_std.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_1_sql_std.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_1_sql_std.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_1_sql_std.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q Sun Aug  3 20:48:35 2014
@@ -2,6 +2,7 @@
 
 create table dummy (key string, value string);
 
+grant select to user hive_test_user;
 grant select on database default to user hive_test_user;
 grant select on table dummy to user hive_test_user;
 grant select (key, value) on table dummy to user hive_test_user;
@@ -10,16 +11,21 @@ show grant user hive_test_user on databa
 show grant user hive_test_user on table dummy;
 show grant user hive_test_user on all;
 
+grant select to user hive_test_user2;
 grant select on database default to user hive_test_user2;
 grant select on table dummy to user hive_test_user2;
 grant select (key, value) on table dummy to user hive_test_user2;
 
 show grant on all;
+show grant user hive_test_user on all;
+show grant user hive_test_user2 on all;
 
+revoke select from user hive_test_user;
 revoke select on database default from user hive_test_user;
 revoke select on table dummy from user hive_test_user;
 revoke select (key, value) on table dummy from user hive_test_user;
 
+revoke select from user hive_test_user2;
 revoke select on database default from user hive_test_user2;
 revoke select on table dummy from user hive_test_user2;
 revoke select (key, value) on table dummy from user hive_test_user2;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_test_user;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty2.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty2.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty2.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_admin_almighty2.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_func1.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_func1.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_func1.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_func1.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_macro1.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_macro1.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_macro1.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_create_macro1.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_insert.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_insert.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_insert.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_insert.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_owner_actions_db.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_owner_actions_db.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_owner_actions_db.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_owner_actions_db.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant2.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant2.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant2.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant2.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_set_show_current_role.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set user.name=hive_admin_user;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_show_grant.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_show_grant.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_show_grant.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_show_grant.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q Sun Aug  3 20:48:35 2014
@@ -1,4 +1,3 @@
-set hive.users.in.admin.role=hive_admin_user;
 set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 set hive.security.authorization.enabled=true;

Modified: hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_filter.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_filter.q.out?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_filter.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_filter.q.out Sun Aug  3 20:48:35 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join.q.out?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/annotate_stats_join.q.out Sun Aug  3 20:48:35 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/authorization_9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/authorization_9.q.out?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/authorization_9.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/authorization_9.q.out Sun Aug  3 20:48:35 2014
@@ -9,6 +9,10 @@ create table dummy (key string, value st
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@dummy
+PREHOOK: query: grant select to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: query: grant select to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
 PREHOOK: query: grant select on database default to user hive_test_user
 PREHOOK: type: GRANT_PRIVILEGE
 POSTHOOK: query: grant select on database default to user hive_test_user
@@ -39,10 +43,15 @@ PREHOOK: query: show grant user hive_tes
 PREHOOK: type: SHOW_GRANT
 POSTHOOK: query: show grant user hive_test_user on all
 POSTHOOK: type: SHOW_GRANT
+				hive_test_user	USER	SELECT	false	-1	hive_test_user
 default				hive_test_user	USER	SELECT	false	-1	hive_test_user
 default	dummy			hive_test_user	USER	SELECT	false	-1	hive_test_user
 default	dummy		[key]	hive_test_user	USER	SELECT	false	-1	hive_test_user
 default	dummy		[value]	hive_test_user	USER	SELECT	false	-1	hive_test_user
+PREHOOK: query: grant select to user hive_test_user2
+PREHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: query: grant select to user hive_test_user2
+POSTHOOK: type: GRANT_PRIVILEGE
 PREHOOK: query: grant select on database default to user hive_test_user2
 PREHOOK: type: GRANT_PRIVILEGE
 POSTHOOK: query: grant select on database default to user hive_test_user2
@@ -64,6 +73,8 @@ PREHOOK: type: SHOW_GRANT
 POSTHOOK: query: show grant on all
 POSTHOOK: type: SHOW_GRANT
 				admin	ROLE	ALL	true	-1	admin
+				hive_test_user	USER	SELECT	false	-1	hive_test_user
+				hive_test_user2	USER	SELECT	false	-1	hive_test_user
 default				hive_test_user	USER	SELECT	false	-1	hive_test_user
 default				hive_test_user2	USER	SELECT	false	-1	hive_test_user
 default	dummy			hive_test_user	USER	SELECT	false	-1	hive_test_user
@@ -72,6 +83,28 @@ default	dummy		[key]	hive_test_user	USER
 default	dummy		[key]	hive_test_user2	USER	SELECT	false	-1	hive_test_user
 default	dummy		[value]	hive_test_user	USER	SELECT	false	-1	hive_test_user
 default	dummy		[value]	hive_test_user2	USER	SELECT	false	-1	hive_test_user
+PREHOOK: query: show grant user hive_test_user on all
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user on all
+POSTHOOK: type: SHOW_GRANT
+				hive_test_user	USER	SELECT	false	-1	hive_test_user
+default				hive_test_user	USER	SELECT	false	-1	hive_test_user
+default	dummy			hive_test_user	USER	SELECT	false	-1	hive_test_user
+default	dummy		[key]	hive_test_user	USER	SELECT	false	-1	hive_test_user
+default	dummy		[value]	hive_test_user	USER	SELECT	false	-1	hive_test_user
+PREHOOK: query: show grant user hive_test_user2 on all
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user2 on all
+POSTHOOK: type: SHOW_GRANT
+				hive_test_user2	USER	SELECT	false	-1	hive_test_user
+default				hive_test_user2	USER	SELECT	false	-1	hive_test_user
+default	dummy			hive_test_user2	USER	SELECT	false	-1	hive_test_user
+default	dummy		[key]	hive_test_user2	USER	SELECT	false	-1	hive_test_user
+default	dummy		[value]	hive_test_user2	USER	SELECT	false	-1	hive_test_user
+PREHOOK: query: revoke select from user hive_test_user
+PREHOOK: type: REVOKE_PRIVILEGE
+POSTHOOK: query: revoke select from user hive_test_user
+POSTHOOK: type: REVOKE_PRIVILEGE
 PREHOOK: query: revoke select on database default from user hive_test_user
 PREHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: query: revoke select on database default from user hive_test_user
@@ -88,6 +121,10 @@ PREHOOK: Output: default@dummy
 POSTHOOK: query: revoke select (key, value) on table dummy from user hive_test_user
 POSTHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: Output: default@dummy
+PREHOOK: query: revoke select from user hive_test_user2
+PREHOOK: type: REVOKE_PRIVILEGE
+POSTHOOK: query: revoke select from user hive_test_user2
+POSTHOOK: type: REVOKE_PRIVILEGE
 PREHOOK: query: revoke select on database default from user hive_test_user2
 PREHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: query: revoke select on database default from user hive_test_user2

Modified: hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out?rev=1615452&r1=1615451&r2=1615452&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out Sun Aug  3 20:48:35 2014
@@ -563,10 +563,14 @@ STAGE PLANS:
     Stats-Aggr Operator
 
   Stage: Stage-4
-    Block level merge
+    Merge Work
+      merge level: block
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
 
   Stage: Stage-6
-    Block level merge
+    Merge Work
+      merge level: block
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
 
   Stage: Stage-7
     Move Operator