You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/15 17:38:13 UTC

svn commit: r1618215 [5/25] - in /hive/branches/spark: ./ bin/ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ hbase-handler/src/test/results/negative/ hbase-handler/src/tes...

Modified: hive/branches/spark/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/pom.xml?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/pom.xml (original)
+++ hive/branches/spark/pom.xml Fri Aug 15 15:37:46 2014
@@ -147,7 +147,7 @@
     <slf4j.version>1.7.5</slf4j.version>
     <ST4.version>4.0.4</ST4.version>
     <super-csv.version>2.2.0</super-csv.version>
-    <tez.version>0.4.0-incubating</tez.version>
+    <tez.version>0.4.1-incubating</tez.version>
     <spark.version>1.1.0-SNAPSHOT</spark.version>
     <scala.binary.version>2.10</scala.binary.version>
     <scala.version>2.10.4</scala.version>

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java Fri Aug 15 15:37:46 2014
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.io.merg
 import org.apache.hadoop.hive.ql.io.merge.MergeWork;
 import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanTask;
 import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanWork;
+import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
 import org.apache.hadoop.hive.ql.plan.ColumnStatsWork;
 import org.apache.hadoop.hive.ql.plan.ConditionalWork;
 import org.apache.hadoop.hive.ql.plan.CopyWork;
@@ -94,6 +95,7 @@ public final class TaskFactory {
         StatsTask.class));
     taskvec.add(new TaskTuple<StatsNoJobWork>(StatsNoJobWork.class, StatsNoJobTask.class));
     taskvec.add(new TaskTuple<ColumnStatsWork>(ColumnStatsWork.class, ColumnStatsTask.class));
+    taskvec.add(new TaskTuple<ColumnStatsUpdateWork>(ColumnStatsUpdateWork.class, ColumnStatsUpdateTask.class));
     taskvec.add(new TaskTuple<MergeWork>(MergeWork.class,
         MergeTask.class));
     taskvec.add(new TaskTuple<DependencyCollectionWork>(DependencyCollectionWork.class,

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/hooks/EnforceReadOnlyTables.java Fri Aug 15 15:37:46 2014
@@ -59,7 +59,10 @@ public class EnforceReadOnlyTables imple
   public void run(SessionState sess, Set<ReadEntity> inputs,
       Set<WriteEntity> outputs, UserGroupInformation ugi)
     throws Exception {
-    if (sess.getConf().getBoolean("hive.test.init.phase", false) == true) {
+
+    // Don't enforce during test driver setup or shutdown.
+    if (sess.getConf().getBoolean("hive.test.init.phase", false) ||
+        sess.getConf().getBoolean("hive.test.shutdown.phase", false)) {
       return;
     }
     for (WriteEntity w: outputs) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Fri Aug 15 15:37:46 2014
@@ -2716,4 +2716,19 @@ private void constructOneLBLocationMap(F
     }
   }
 
+  public void setMetaConf(String propName, String propValue) throws HiveException {
+    try {
+      getMSC().setMetaConf(propName, propValue);
+    } catch (TException te) {
+      throw new HiveException(te);
+    }
+  }
+
+  public String getMetaConf(String propName) throws HiveException {
+    try {
+      return getMSC().getMetaConf(propName);
+    } catch (TException te) {
+      throw new HiveException(te);
+    }
+  }
 };

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Aug 15 15:37:46 2014
@@ -25,6 +25,7 @@ import java.io.Serializable;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -57,6 +58,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
+import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -91,6 +93,8 @@ import org.apache.hadoop.hive.ql.plan.Al
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
 import org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition;
 import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc;
+import org.apache.hadoop.hive.ql.plan.ColumnStatsDesc;
+import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
@@ -276,6 +280,8 @@ public class DDLSemanticAnalyzer extends
         analyzeAlterTableClusterSort(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_COMPACT) {
         analyzeAlterTableCompact(ast, tableName, partSpec);
+      } else if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS){
+        analyzeAlterTableUpdateStats(ast,tblPart);
       }
       break;
     }
@@ -378,6 +384,9 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_ALTERTABLE_RENAME:
       analyzeAlterTableRename(ast, false);
       break;
+    case HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS:
+      analyzeAlterTableUpdateStats(ast, null);
+      break;
     case HiveParser.TOK_ALTERTABLE_TOUCH:
       analyzeAlterTableTouch(ast);
       break;
@@ -507,6 +516,57 @@ public class DDLSemanticAnalyzer extends
     }
   }
 
+  private void analyzeAlterTableUpdateStats(ASTNode ast, TablePartition tblPart)
+      throws SemanticException {
+    String tblName = null;
+    String colName = null;
+    Map<String, String> mapProp = null;
+    Map<String, String> partSpec = null;
+    String partName = null;
+    if (tblPart == null) {
+      tblName = getUnescapedName((ASTNode) ast.getChild(0));
+      colName = getUnescapedName((ASTNode) ast.getChild(1));
+      mapProp = getProps((ASTNode) (ast.getChild(2)).getChild(0));
+    } else {
+      tblName = tblPart.tableName;
+      partSpec = tblPart.partSpec;
+      try {
+        partName = Warehouse.makePartName(partSpec, false);
+      } catch (MetaException e) {
+        // TODO Auto-generated catch block
+        throw new SemanticException("partition " + partSpec.toString()
+            + " not found");
+      }
+      colName = getUnescapedName((ASTNode) ast.getChild(0));
+      mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0));
+    }
+
+    Table tbl = null;
+    try {
+      tbl = db.getTable(tblName);
+    } catch (HiveException e) {
+      throw new SemanticException("table " + tbl + " not found");
+    }
+
+    String colType = null;
+    List<FieldSchema> cols = tbl.getCols();
+    for (FieldSchema col : cols) {
+      if (colName.equalsIgnoreCase(col.getName())) {
+        colType = col.getType();
+        break;
+      }
+    }
+
+    if (colType == null)
+      throw new SemanticException("column type not found");
+
+    ColumnStatsDesc cStatsDesc = new ColumnStatsDesc(tbl.getTableName(),
+        Arrays.asList(colName), Arrays.asList(colType), partSpec == null);
+    ColumnStatsUpdateTask cStatsUpdateTask = (ColumnStatsUpdateTask) TaskFactory
+        .get(new ColumnStatsUpdateWork(cStatsDesc, partName, mapProp), conf);
+    rootTasks.add(cStatsUpdateTask);
+  }
+
   private void analyzeSetShowRole(ASTNode ast) throws SemanticException {
     switch (ast.getChildCount()) {
       case 0:
@@ -745,6 +805,8 @@ public class DDLSemanticAnalyzer extends
     if (dbProps != null) {
       createDatabaseDesc.setDatabaseProperties(dbProps);
     }
+    Database database = new Database(dbName, dbComment, dbLocation, dbProps);
+    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
 
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createDatabaseDesc), conf));
@@ -795,8 +857,12 @@ public class DDLSemanticAnalyzer extends
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc), conf));
   }
 
-  private void analyzeSwitchDatabase(ASTNode ast) {
+  private void analyzeSwitchDatabase(ASTNode ast) throws SemanticException {
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
+    Database database = getDatabase(dbName, true);
+    ReadEntity dbReadEntity = new ReadEntity(database);
+    dbReadEntity.noLockNeeded();
+    inputs.add(dbReadEntity);
     SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         switchDatabaseDesc), conf));
@@ -1015,7 +1081,7 @@ public class DDLSemanticAnalyzer extends
   private void analyzeCreateIndex(ASTNode ast) throws SemanticException {
     String indexName = unescapeIdentifier(ast.getChild(0).getText());
     String typeName = unescapeSQLString(ast.getChild(1).getText());
-    String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(2));
+    String[] qTabName = getQualifiedTableName((ASTNode) ast.getChild(2));
     List<String> indexedCols = getColumnNames((ASTNode) ast.getChild(3));
 
     IndexType indexType = HiveIndex.getIndexType(typeName);
@@ -1080,15 +1146,15 @@ public class DDLSemanticAnalyzer extends
     }
 
     storageFormat.fillDefaultStorageFormat();
-
     if (indexTableName == null) {
-      indexTableName = MetaStoreUtils.getIndexTableName(qualified[0], qualified[1], indexName);
-      indexTableName = qualified[0] + "." + indexTableName; // on same database with base table
+      indexTableName = MetaStoreUtils.getIndexTableName(qTabName[0], qTabName[1], indexName);
+      indexTableName = qTabName[0] + "." + indexTableName; // on same database with base table
     } else {
       indexTableName = getDotName(Utilities.getDbTableName(indexTableName));
     }
+    inputs.add(new ReadEntity(getTable(qTabName)));
 
-    CreateIndexDesc crtIndexDesc = new CreateIndexDesc(getDotName(qualified), indexName,
+    CreateIndexDesc crtIndexDesc = new CreateIndexDesc(getDotName(qTabName), indexName,
         indexedCols, indexTableName, deferredRebuild, storageFormat.getInputFormat(),
         storageFormat.getOutputFormat(),
         storageFormat.getStorageHandler(), typeName, location, idxProps, tblProps,
@@ -1116,6 +1182,8 @@ public class DDLSemanticAnalyzer extends
       }
     }
 
+    inputs.add(new ReadEntity(getTable(tableName)));
+
     DropIndexDesc dropIdxDesc = new DropIndexDesc(indexName, tableName);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropIdxDesc), conf));

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Fri Aug 15 15:37:46 2014
@@ -146,6 +146,7 @@ TOK_ALTERTABLE_ARCHIVE;
 TOK_ALTERTABLE_UNARCHIVE;
 TOK_ALTERTABLE_SERDEPROPERTIES;
 TOK_ALTERTABLE_SERIALIZER;
+TOK_ALTERTABLE_UPDATECOLSTATS;
 TOK_TABLE_PARTITION;
 TOK_ALTERTABLE_FILEFORMAT;
 TOK_ALTERTABLE_LOCATION;
@@ -938,6 +939,7 @@ alterTableStatementSuffix
     : alterStatementSuffixRename
     | alterStatementSuffixAddCol
     | alterStatementSuffixRenameCol
+    | alterStatementSuffixUpdateStatsCol
     | alterStatementSuffixDropPartitions
     | alterStatementSuffixAddPartitions
     | alterStatementSuffixTouch
@@ -1028,6 +1030,13 @@ alterStatementSuffixRenameCol
     ->^(TOK_ALTERTABLE_RENAMECOL tableName $oldName $newName colType $comment? alterStatementChangeColPosition?)
     ;
 
+alterStatementSuffixUpdateStatsCol
+@init { pushMsg("update column statistics", state); }
+@after { popMsg(state); }
+    : identifier KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)?
+    ->^(TOK_ALTERTABLE_UPDATECOLSTATS identifier $colName tableProperties $comment?)
+    ;
+
 alterStatementChangeColPosition
     : first=KW_FIRST|KW_AFTER afterCol=identifier
     ->{$first != null}? ^(TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION )
@@ -1130,6 +1139,7 @@ alterTblPartitionStatementSuffix
   | alterStatementSuffixMergeFiles
   | alterStatementSuffixSerdeProperties
   | alterStatementSuffixRenamePart
+  | alterStatementSuffixStatsPart
   | alterStatementSuffixBucketNum
   | alterTblPartitionStatementSuffixSkewedLocation
   | alterStatementSuffixClusterbySortby
@@ -1221,6 +1231,13 @@ alterStatementSuffixRenamePart
     ->^(TOK_ALTERTABLE_RENAMEPART partitionSpec)
     ;
 
+alterStatementSuffixStatsPart
+@init { pushMsg("alter table stats partition statement", state); }
+@after { popMsg(state); }
+    : KW_UPDATE KW_STATISTICS KW_FOR KW_COLUMN? colName=identifier KW_SET tableProperties (KW_COMMENT comment=StringLiteral)?
+    ->^(TOK_ALTERTABLE_UPDATECOLSTATS $colName tableProperties $comment?)
+    ;
+
 alterStatementSuffixMergeFiles
 @init { pushMsg("", state); }
 @after { popMsg(state); }
@@ -1300,6 +1317,7 @@ descStatement
     | (KW_DESCRIBE|KW_DESC) (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
     ;
 
+
 analyzeStatement
 @init { pushMsg("analyze statement", state); }
 @after { popMsg(state); }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Aug 15 15:37:46 2014
@@ -10163,6 +10163,7 @@ public class SemanticAnalyzer extends Ba
     String dbName = qualified.length == 1 ? SessionState.get().getCurrentDatabase() : qualified[0];
     Database database  = getDatabase(dbName);
     outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
+    outputs.add(new WriteEntity(new Table(dbName, tableName), WriteEntity.WriteType.DDL_NO_LOCK));
 
     if (isTemporary) {
       if (partCols.size() > 0) {

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Fri Aug 15 15:37:46 2014
@@ -109,6 +109,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_ALTERTABLE_PARTCOLTYPE, HiveOperation.ALTERTABLE_PARTCOLTYPE);
     commandType.put(HiveParser.TOK_SHOW_COMPACTIONS, HiveOperation.SHOW_COMPACTIONS);
     commandType.put(HiveParser.TOK_SHOW_TRANSACTIONS, HiveOperation.SHOW_TRANSACTIONS);
+    commandType.put(HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS, HiveOperation.ALTERTABLE_UPDATETABLESTATS);
   }
 
   static {
@@ -231,12 +232,14 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_TRUNCATETABLE:
       case HiveParser.TOK_EXCHANGEPARTITION:
       case HiveParser.TOK_SHOW_SET_ROLE:
-
+      case HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS:
         return new DDLSemanticAnalyzer(conf);
       case HiveParser.TOK_ALTERTABLE_PARTITION:
         HiveOperation commandType = null;
         Integer type = ((ASTNode) tree.getChild(1)).getToken().getType();
-        if (tree.getChild(0).getChildCount() > 1) {
+        if (type == HiveParser.TOK_ALTERTABLE_UPDATECOLSTATS) {
+          commandType = HiveOperation.ALTERTABLE_UPDATEPARTSTATS;
+        } else if (tree.getChild(0).getChildCount() > 1) {
           commandType = tablePartitionCommandType.get(type)[1];
         } else {
           commandType = tablePartitionCommandType.get(type)[0];

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Fri Aug 15 15:37:46 2014
@@ -27,7 +27,7 @@ public enum HiveOperation {
   IMPORT("IMPORT", null, new Privilege[]{Privilege.ALTER_METADATA, Privilege.ALTER_DATA}),
   CREATEDATABASE("CREATEDATABASE", null, new Privilege[]{Privilege.CREATE}),
   DROPDATABASE("DROPDATABASE", null, new Privilege[]{Privilege.DROP}),
-  SWITCHDATABASE("SWITCHDATABASE", new Privilege[]{Privilege.SELECT}, null),
+  SWITCHDATABASE("SWITCHDATABASE", null, null),
   LOCKDB("LOCKDATABASE",  new Privilege[]{Privilege.LOCK}, null),
   UNLOCKDB("UNLOCKDATABASE",  new Privilege[]{Privilege.LOCK}, null),
   DROPTABLE ("DROPTABLE", null, new Privilege[]{Privilege.DROP}),
@@ -38,6 +38,8 @@ public enum HiveOperation {
   ALTERTABLE_REPLACECOLS("ALTERTABLE_REPLACECOLS", new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERTABLE_RENAMECOL("ALTERTABLE_RENAMECOL", new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERTABLE_RENAMEPART("ALTERTABLE_RENAMEPART", new Privilege[]{Privilege.DROP}, new Privilege[]{Privilege.CREATE}),
+  ALTERTABLE_UPDATEPARTSTATS("ALTERTABLE_UPDATEPARTSTATS", new Privilege[]{Privilege.ALTER_METADATA}, null),
+  ALTERTABLE_UPDATETABLESTATS("ALTERTABLE_UPDATETABLESTATS", new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERTABLE_RENAME("ALTERTABLE_RENAME", new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERTABLE_DROPPARTS("ALTERTABLE_DROPPARTS", new Privilege[]{Privilege.DROP}, null),
   // The location is input and table is output for alter-table add partitions

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Fri Aug 15 15:37:46 2014
@@ -157,7 +157,7 @@ public final class PlanUtils {
     } catch (ClassNotFoundException e) {
       // mimicking behaviour in CreateTableDesc tableDesc creation
       // returning null table description for output.
-      e.printStackTrace();
+      LOG.warn("Unable to find class in getDefaultTableDesc: " + e.getMessage(), e);
       return null;
     }
     return ret;
@@ -364,8 +364,7 @@ public final class PlanUtils {
       ret.setInputFileFormatClass(in_class);
       ret.setOutputFileFormatClass(out_class);
     } catch (ClassNotFoundException e) {
-      e.printStackTrace();
-      return null;
+      throw new RuntimeException("Unable to find class in getTableDesc: " + e.getMessage(), e);
     }
     return ret;
   }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Fri Aug 15 15:37:46 2014
@@ -32,6 +32,7 @@ import java.util.TreeMap;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
@@ -104,11 +105,12 @@ public class SetProcessor implements Com
     try {
       return new CommandProcessorResponse(setVariable(varname, varvalue));
     } catch (Exception e) {
-      return new CommandProcessorResponse(1, e.getMessage(), "42000");
+      return new CommandProcessorResponse(1, e.getMessage(), "42000",
+          e instanceof IllegalArgumentException ? null : e);
     }
   }
 
-  public static int setVariable(String varname, String varvalue) throws IllegalArgumentException {
+  public static int setVariable(String varname, String varvalue) throws Exception {
     SessionState ss = SessionState.get();
     if (varvalue.contains("\n")){
       ss.err.println("Warning: Value had a \\n character in it.");
@@ -126,6 +128,10 @@ public class SetProcessor implements Com
     } else if (varname.startsWith(HIVEVAR_PREFIX)) {
       String propName = varname.substring(HIVEVAR_PREFIX.length());
       ss.getHiveVariables().put(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue));
+    } else if (varname.startsWith(METACONF_PREFIX)) {
+      String propName = varname.substring(METACONF_PREFIX.length());
+      Hive hive = Hive.get(ss.getConf());
+      hive.setMetaConf(propName, new VariableSubstitution().substitute(ss.getConf(), varvalue));
     } else {
       setConf(varname, varname, varvalue, true);
     }
@@ -178,8 +184,7 @@ public class SetProcessor implements Com
     return sortedEnvMap;
   }
 
-
-  private CommandProcessorResponse getVariable(String varname) {
+  private CommandProcessorResponse getVariable(String varname) throws Exception {
     SessionState ss = SessionState.get();
     if (varname.equals("silent")){
       ss.out.println("silent" + "=" + ss.getIsSilent());
@@ -222,6 +227,17 @@ public class SetProcessor implements Com
         ss.out.println(varname + " is undefined as a hive variable");
         return new CommandProcessorResponse(1);
       }
+    } else if (varname.indexOf(METACONF_PREFIX) == 0) {
+      String var = varname.substring(METACONF_PREFIX.length());
+      Hive hive = Hive.get(ss.getConf());
+      String value = hive.getMetaConf(var);
+      if (value != null) {
+        ss.out.println(METACONF_PREFIX + var + "=" + value);
+        return createProcessorSuccessResponse();
+      } else {
+        ss.out.println(varname + " is undefined as a hive meta variable");
+        return new CommandProcessorResponse(1);
+      }
     } else {
       dumpOption(varname);
       return createProcessorSuccessResponse();
@@ -263,10 +279,12 @@ public class SetProcessor implements Com
         return new CommandProcessorResponse(0);
       }
       return executeSetVariable(part[0],part[1]);
-    } else {
+    }
+    try {
       return getVariable(nwcmd);
+    } catch (Exception e) {
+      return new CommandProcessorResponse(1, e.getMessage(), "42000", e);
     }
-
   }
 
 // create a Schema object containing the give column

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java Fri Aug 15 15:37:46 2014
@@ -59,6 +59,8 @@ public enum HiveOperationType {
   ANALYZE_TABLE,
   ALTERTABLE_BUCKETNUM,
   ALTERPARTITION_BUCKETNUM,
+  ALTERTABLE_UPDATETABLESTATS,
+  ALTERTABLE_UPDATEPARTSTATS,
   SHOWDATABASES,
   SHOWTABLES,
   SHOWCOLUMNS,

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java Fri Aug 15 15:37:46 2014
@@ -27,6 +27,7 @@ import java.util.Set;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 
 /**
  * Mapping of operation to its required input and output privileges
@@ -43,6 +44,7 @@ public class Operation2Privilege {
     // The following fields specify the criteria on objects for this priv to be required
     private final IOType ioType;
     private final HivePrivObjectActionType actionType;
+    private final HivePrivilegeObjectType objectType;
 
 
     private PrivRequirement(SQLPrivTypeGrant[] privs, IOType ioType) {
@@ -51,11 +53,22 @@ public class Operation2Privilege {
 
     private PrivRequirement(SQLPrivTypeGrant[] privs, IOType ioType,
         HivePrivObjectActionType actionType) {
+      this(privs, ioType, actionType, null);
+    }
+
+    private PrivRequirement(SQLPrivTypeGrant[] privs, HivePrivilegeObjectType objectType) {
+      this(privs, null, null, objectType);
+    }
+
+    private PrivRequirement(SQLPrivTypeGrant[] privs, IOType ioType,
+        HivePrivObjectActionType actionType, HivePrivilegeObjectType objectType) {
       this.reqPrivs = privs;
       this.ioType = ioType;
       this.actionType = actionType;
+      this.objectType = objectType;
     }
 
+
     /**
      * Utility function that takes a input and output privilege objects
      * @param inGrant
@@ -70,6 +83,15 @@ public class Operation2Privilege {
       return privReqs;
     }
 
+    /**
+     * Utility function that converts PrivRequirement array into list
+     * @param privs
+     * @return
+     */
+    static List<PrivRequirement> newPrivRequirementList(PrivRequirement... privs) {
+      return new ArrayList<PrivRequirement>(Arrays.asList(privs));
+    }
+
     private SQLPrivTypeGrant[] getReqPrivs() {
       return reqPrivs;
     }
@@ -82,6 +104,10 @@ public class Operation2Privilege {
       return actionType;
     }
 
+    public HivePrivilegeObjectType getObjectType() {
+      return objectType;
+    }
+
   }
 
   private static Map<HiveOperationType, List<PrivRequirement>> op2Priv;
@@ -107,9 +133,9 @@ public class Operation2Privilege {
 (SEL_NOGRANT_AR,
         SEL_NOGRANT_AR)); //??
 
-    op2Priv.put(HiveOperationType.CREATEDATABASE,
-        PrivRequirement.newIOPrivRequirement
-(ADMIN_PRIV_AR, OWNER_INS_SEL_DEL_NOGRANT_AR));
+    op2Priv.put(HiveOperationType.CREATEDATABASE, PrivRequirement.newPrivRequirementList(
+        new PrivRequirement(OWNER_INS_SEL_DEL_NOGRANT_AR, HivePrivilegeObjectType.DFS_URI),
+        new PrivRequirement(OWNER_INS_SEL_DEL_NOGRANT_AR, HivePrivilegeObjectType.LOCAL_URI)));
 
     op2Priv.put(HiveOperationType.DROPDATABASE, PrivRequirement.newIOPrivRequirement
 (null, OWNER_PRIV_AR));
@@ -150,6 +176,10 @@ public class Operation2Privilege {
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERTABLE_RENAME, PrivRequirement.newIOPrivRequirement
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_UPDATETABLESTATS, PrivRequirement.newIOPrivRequirement
+(OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERTABLE_UPDATEPARTSTATS, PrivRequirement.newIOPrivRequirement
+(OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERTABLE_TOUCH, PrivRequirement.newIOPrivRequirement
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERTABLE_ARCHIVE, PrivRequirement.newIOPrivRequirement
@@ -300,9 +330,9 @@ public class Operation2Privilege {
 (null, null));
 
     // require db ownership, if there is a file require SELECT , INSERT, and DELETE
-    op2Priv.put(HiveOperationType.CREATETABLE,
-        PrivRequirement.newIOPrivRequirement
-(OWNER_INS_SEL_DEL_NOGRANT_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.CREATETABLE, PrivRequirement.newPrivRequirementList(
+        new PrivRequirement(OWNER_INS_SEL_DEL_NOGRANT_AR, IOType.INPUT),
+        new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));
 
     op2Priv.put(HiveOperationType.ALTERDATABASE, PrivRequirement.newIOPrivRequirement
 (null, ADMIN_PRIV_AR));
@@ -388,20 +418,17 @@ public class Operation2Privilege {
     List<PrivRequirement> opPrivs = op2Priv.get(hiveOpType);
     RequiredPrivileges reqPrivs = new RequiredPrivileges();
 
-    // Find the PrivRequirements that match on IOType and ActionType, and add
-    // the privilege
-    // required to reqPrivs
+    // Find the PrivRequirements that match on IOType, ActionType, and HivePrivilegeObjectType add
+    // the privilege required to reqPrivs
     for (PrivRequirement opPriv : opPrivs) {
-      if (opPriv.getIOType() != ioType) {
+      if (opPriv.getIOType() != null && opPriv.getIOType() != ioType) {
+        continue;
+      }
+      if (opPriv.getActionType() != null && opPriv.getActionType() != hObj.getActionType()) {
         continue;
       }
-      if (opPriv.getActionType() != null) {
-        // if action in PrivRequirement is null, it means that
-        // the privileges are required irrespective of hObj's action type
-        // If it is not null, action type has to match
-        if (hObj.getActionType() != opPriv.getActionType()) {
-          continue;
-        }
+      if (opPriv.getObjectType() != null && opPriv.getObjectType() != hObj.getType()) {
+        continue;
       }
       reqPrivs.addAll(opPriv.getReqPrivs());
     }

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java Fri Aug 15 15:37:46 2014
@@ -94,6 +94,11 @@ public class SQLStdHiveAuthorizationVali
       RequiredPrivileges requiredPrivs = Operation2Privilege.getRequiredPrivs(hiveOpType, hiveObj,
           ioType);
 
+      if(requiredPrivs.getRequiredPrivilegeSet().isEmpty()){
+        // no privileges required, so don't need to check this object privileges
+        continue;
+      }
+
       // find available privileges
       RequiredPrivileges availPrivs = new RequiredPrivileges(); //start with an empty priv set;
       switch (hiveObj.getType()) {

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/repair.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/repair.q?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/repair.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/repair.q Fri Aug 15 15:37:46 2014
@@ -8,10 +8,10 @@ dfs ${system:test.dfs.mkdir} ${system:te
 dfs ${system:test.dfs.mkdir} ${system:test.warehouse.dir}/repairtable/p1=b/p2=a;
 dfs -touchz ${system:test.warehouse.dir}/repairtable/p1=b/p2=a/datafile;
 
-MSCK TABLE repairtable;
+MSCK TABLE default.repairtable;
 
-MSCK REPAIR TABLE repairtable;
+MSCK REPAIR TABLE default.repairtable;
 
 MSCK TABLE repairtable;
 
-DROP TABLE repairtable;
+DROP TABLE default.repairtable;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/show_partitions.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/show_partitions.q?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/show_partitions.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/show_partitions.q Fri Aug 15 15:37:46 2014
@@ -3,3 +3,30 @@ SHOW PARTITIONS default.srcpart;
 SHOW PARTITIONS srcpart PARTITION(hr='11');
 SHOW PARTITIONS srcpart PARTITION(ds='2008-04-08');
 SHOW PARTITIONS srcpart PARTITION(ds='2008-04-08', hr='12');
+
+
+SHOW PARTITIONS default.srcpart;
+SHOW PARTITIONS default.srcpart PARTITION(hr='11');
+SHOW PARTITIONS default.srcpart PARTITION(ds='2008-04-08');
+SHOW PARTITIONS default.srcpart PARTITION(ds='2008-04-08', hr='12');
+
+CREATE DATABASE db1;
+USE db1;
+
+CREATE TABLE srcpart (key1 INT, value1 STRING) PARTITIONED BY (ds STRING, hr STRING);
+ALTER TABLE srcpart ADD PARTITION (ds='3', hr='3');
+ALTER TABLE srcpart ADD PARTITION (ds='4', hr='4');
+ALTER TABLE srcpart ADD PARTITION (ds='4', hr='5');
+
+-- from db1 to default db
+SHOW PARTITIONS default.srcpart PARTITION(hr='11');
+SHOW PARTITIONS default.srcpart PARTITION(ds='2008-04-08', hr='12');
+
+-- from db1 to db1
+SHOW PARTITIONS srcpart PARTITION(ds='4');
+SHOW PARTITIONS srcpart PARTITION(ds='3', hr='3');
+
+use default;
+-- from default to db1
+SHOW PARTITIONS db1.srcpart PARTITION(ds='4');
+SHOW PARTITIONS db1.srcpart PARTITION(ds='3', hr='3');
\ No newline at end of file

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/show_tblproperties.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/show_tblproperties.q?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/show_tblproperties.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/show_tblproperties.q Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 
 create table tmpfoo (a String);
 show tblproperties tmpfoo("bar");
+show tblproperties default.tmpfoo("bar");
 
 alter table tmpfoo set tblproperties ("bar" = "bar value");
 alter table tmpfoo set tblproperties ("tmp" = "true");
@@ -8,4 +9,26 @@ alter table tmpfoo set tblproperties ("t
 show tblproperties tmpfoo;
 show tblproperties tmpfoo("bar");
 
-drop table tmpfoo;
+show tblproperties default.tmpfoo;
+show tblproperties default.tmpfoo("bar");
+
+CREATE DATABASE db1;
+USE db1;
+
+CREATE TABLE tmpfoo (b STRING);
+alter table tmpfoo set tblproperties ("bar" = "bar value1");
+alter table tmpfoo set tblproperties ("tmp" = "true1");
+
+-- from db1 to default db
+show tblproperties default.tmpfoo;
+show tblproperties default.tmpfoo("bar");
+
+-- from db1 to db1
+show tblproperties tmpfoo;
+show tblproperties tmpfoo("bar");
+
+use default;
+-- from default to db1
+show tblproperties db1.tmpfoo;
+show tblproperties db1.tmpfoo("bar");
+

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/virtual_column.q
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/virtual_column.q?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/virtual_column.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/virtual_column.q Fri Aug 15 15:37:46 2014
@@ -14,6 +14,3 @@ CREATE TABLE src_index_test_rc (key int,
 set hive.io.rcfile.record.buffer.size = 1024;
 INSERT OVERWRITE TABLE src_index_test_rc SELECT * FROM src;
 select INPUT__FILE__NAME, key, BLOCK__OFFSET__INSIDE__FILE from src_index_test_rc order by key;
-
-DROP TABLE src_index_test_rc;
-DROP INDEX src_index on src_index_test_rc;

Modified: hive/branches/spark/ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/add_partition_with_whitelist.q.out Fri Aug 15 15:37:46 2014
@@ -3,6 +3,7 @@ PREHOOK: query: -- This pattern matches 
 CREATE TABLE part_whitelist_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@part_whitelist_test
 POSTHOOK: query: -- This pattern matches all printable ASCII characters (disallow unicode) and disallows commas
 
 CREATE TABLE part_whitelist_test (key STRING, value STRING) PARTITIONED BY (ds STRING)

Modified: hive/branches/spark/ql/src/test/results/clientnegative/addpart1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/addpart1.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/addpart1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/addpart1.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table addpart1 (a int) partitioned by (b string, c string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@addpart1
 POSTHOOK: query: create table addpart1 (a int) partitioned by (b string, c string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table src_rc_concatenate_test(key int, value string) stored as rcfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@src_rc_concatenate_test
 POSTHOOK: query: create table src_rc_concatenate_test(key int, value string) stored as rcfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -66,8 +67,10 @@ POSTHOOK: Input: default@src_rc_concaten
 214	-7678496319
 PREHOOK: query: create index src_rc_concatenate_test_index on table src_rc_concatenate_test(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2")
 PREHOOK: type: CREATEINDEX
+PREHOOK: Input: default@src_rc_concatenate_test
 POSTHOOK: query: create index src_rc_concatenate_test_index on table src_rc_concatenate_test(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2")
 POSTHOOK: type: CREATEINDEX
+POSTHOOK: Input: default@src_rc_concatenate_test
 POSTHOOK: Output: default@default__src_rc_concatenate_test_src_rc_concatenate_test_index__
 PREHOOK: query: show indexes on src_rc_concatenate_test
 PREHOOK: type: SHOWINDEXES

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_file_format.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_file_format.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_file_format.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_file_format.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table alter_file_format_test (key int, value string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_file_format_test
 POSTHOOK: query: create table alter_file_format_test (key int, value string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_non_native.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_non_native.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_non_native.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_non_native.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: CREATE TABLE non_native1
 STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@non_native1
 POSTHOOK: query: CREATE TABLE non_native1(key int, value string) 
 STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_2columns.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- create testing table
 create table alter_coltype(key string, value string) partitioned by (dt string, ts string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_coltype
 POSTHOOK: query: -- create testing table
 create table alter_coltype(key string, value string) partitioned by (dt string, ts string)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidcolname.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- create testing table
 create table alter_coltype(key string, value string) partitioned by (dt string, ts string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_coltype
 POSTHOOK: query: -- create testing table
 create table alter_coltype(key string, value string) partitioned by (dt string, ts string)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_coltype_invalidtype.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- create testing table
 create table alter_coltype(key string, value string) partitioned by (dt string, ts string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_coltype
 POSTHOOK: query: -- create testing table
 create table alter_coltype(key string, value string) partitioned by (dt string, ts string)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_invalidspec.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- Create table
 create table if not exists alter_part_invalidspec(key string, value string ) partitioned by (year string, month string) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_part_invalidspec
 POSTHOOK: query: -- Create table
 create table if not exists alter_part_invalidspec(key string, value string ) partitioned by (year string, month string) stored as textfile
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- Create table
 create table if not exists alter_part_nodrop_part(key string, value string ) partitioned by (year string, month string) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_part_nodrop_part
 POSTHOOK: query: -- Create table
 create table if not exists alter_part_nodrop_part(key string, value string ) partitioned by (year string, month string) stored as textfile
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_nodrop_table.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- Create table
 create table if not exists alter_part_nodrop_table(key string, value string ) partitioned by (year string, month string) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_part_nodrop_table
 POSTHOOK: query: -- Create table
 create table if not exists alter_part_nodrop_table(key string, value string ) partitioned by (year string, month string) stored as textfile
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_offline.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_offline.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_offline.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_offline.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- create table
 create table if not exists alter_part_offline (key string, value string ) partitioned by (year string, month string) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_part_offline
 POSTHOOK: query: -- create table
 create table if not exists alter_part_offline (key string, value string ) partitioned by (year string, month string) stored as textfile
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_partition_with_whitelist.q.out Fri Aug 15 15:37:46 2014
@@ -3,6 +3,7 @@ PREHOOK: query: -- This pattern matches 
 CREATE TABLE part_whitelist_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@part_whitelist_test
 POSTHOOK: query: -- This pattern matches all printable ASCII characters (disallow unicode) and disallows commas
 
 CREATE TABLE part_whitelist_test (key STRING, value STRING) PARTITIONED BY (ds STRING)

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_rename_partition_src
 POSTHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -16,6 +17,7 @@ POSTHOOK: Output: default@alter_rename_p
 PREHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_rename_partition
 POSTHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure2.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_rename_partition_src
 POSTHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -16,6 +17,7 @@ POSTHOOK: Output: default@alter_rename_p
 PREHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_rename_partition
 POSTHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_rename_partition_failure3.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_rename_partition_src
 POSTHOOK: query: create table alter_rename_partition_src ( col1 string ) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
@@ -16,6 +17,7 @@ POSTHOOK: Output: default@alter_rename_p
 PREHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@alter_rename_partition
 POSTHOOK: query: create table alter_rename_partition ( col1 string ) partitioned by (pcol1 string , pcol2 string) stored as sequencefile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_table_add_partition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_table_add_partition.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_table_add_partition.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_table_add_partition.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table mp (a int) partitioned by (b int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@mp
 POSTHOOK: query: create table mp (a int) partitioned by (b int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_table_wrong_regex.q.out Fri Aug 15 15:37:46 2014
@@ -7,6 +7,7 @@ PREHOOK: query: create table aa ( test S
   WITH SERDEPROPERTIES ("input.regex" = "(.*)", "output.format.string" = "$1s")
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@aa
 POSTHOOK: query: create table aa ( test STRING )
   ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'
   WITH SERDEPROPERTIES ("input.regex" = "(.*)", "output.format.string" = "$1s")

Modified: hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure8.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure8.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure8.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/alter_view_failure8.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- should fail:  can't u
 CREATE TABLE invites (foo INT, bar STRING) PARTITIONED BY (ds STRING)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@invites
 POSTHOOK: query: -- should fail:  can't use ALTER VIEW on a table
 CREATE TABLE invites (foo INT, bar STRING) PARTITIONED BY (ds STRING)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/altern1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/altern1.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/altern1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/altern1.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table altern1(a int, b int) partitioned by (ds string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@altern1
 POSTHOOK: query: create table altern1(a int, b int) partitioned by (ds string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive1.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive1.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE srcpart_archived LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@srcpart_archived
 POSTHOOK: query: -- Tests trying to archive a partition twice.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive2.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive2.q.out Fri Aug 15 15:37:46 2014
@@ -11,6 +11,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: create table tstsrcpart like srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: create table tstsrcpart like srcpart
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_corrupt.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_corrupt.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_corrupt.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_corrupt.q.out Fri Aug 15 15:37:46 2014
@@ -1,7 +1,9 @@
 PREHOOK: query: USE default
 PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
 POSTHOOK: query: USE default
 POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
 PREHOOK: query: drop table tstsrcpart
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: drop table tstsrcpart
@@ -9,6 +11,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: create table tstsrcpart like srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: create table tstsrcpart like srcpart
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_insert1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_insert1.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_insert1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_insert1.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to inser
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to insert into archived partition.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_insert2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_insert2.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_insert2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_insert2.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to inser
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to insert into archived partition.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_insert3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_insert3.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_insert3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_insert3.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to creat
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to create partition inside of archived directory.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_insert4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_insert4.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_insert4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_insert4.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to (poss
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to (possible) dynamic insert into archived partition.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_multi1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_multi1.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_multi1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_multi1.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to archive a partition twice.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_multi2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_multi2.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_multi2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_multi2.q.out Fri Aug 15 15:37:46 2014
@@ -11,6 +11,7 @@ POSTHOOK: type: DROPTABLE
 PREHOOK: query: create table tstsrcpart like srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: create table tstsrcpart like srcpart
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_multi3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_multi3.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_multi3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_multi3.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to archive outer partition group containing other partition inside.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_multi4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_multi4.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_multi4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_multi4.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to archive inner partition contained in archived partition group.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_multi5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_multi5.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_multi5.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_multi5.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to unarc
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to unarchive outer partition group containing other partition inside.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_multi6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_multi6.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_multi6.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_multi6.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to unarc
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to unarchive inner partition contained in archived partition group.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_multi7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_multi7.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_multi7.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_multi7.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE tstsrcpart LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tstsrcpart
 POSTHOOK: query: -- Tests trying to archive a partition group with custom locations.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec1.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec1.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE srcpart_archived LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@srcpart_archived
 POSTHOOK: query: -- Tests trying to archive a partition twice.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec2.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec2.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE srcpart_archived LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@srcpart_archived
 POSTHOOK: query: -- Tests trying to archive a partition twice.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec3.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec3.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE srcpart_archived LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@srcpart_archived
 POSTHOOK: query: -- Tests trying to archive a partition twice.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec4.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec4.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE srcpart_archived LIKE srcpart
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@srcpart_archived
 POSTHOOK: query: -- Tests trying to archive a partition twice.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec5.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec5.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/archive_partspec5.q.out Fri Aug 15 15:37:46 2014
@@ -4,6 +4,7 @@ PREHOOK: query: -- Tests trying to archi
 CREATE TABLE srcpart_archived (key string, value string) partitioned by (ds string, hr int, min int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@srcpart_archived
 POSTHOOK: query: -- Tests trying to archive a partition twice.
 -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19)
 

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_addpartition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_addpartition.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_addpartition.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_addpartition.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- check add partition w
 create table tpart(i int, j int) partitioned by (k string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tpart
 POSTHOOK: query: -- check add partition without insert privilege
 create table tpart(i int, j int) partitioned by (k string)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out Fri Aug 15 15:37:46 2014
@@ -2,8 +2,10 @@
 
 create database dbao
 PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:dbao
 #### A masked pattern was here ####
 
 create database dbao
 POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:dbao
 FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation ALTERDATABASE_OWNER [[ADMIN PRIVILEGE] on Object [type=DATABASE, name=dbao]]

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_createview.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_createview.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_createview.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_createview.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- check create view wit
 create table t1(i int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t1
 POSTHOOK: query: -- check create view without select privileges
 create table t1(i int)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_ctas.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_ctas.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_ctas.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_ctas.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- check query without s
 create table t1(i int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t1
 POSTHOOK: query: -- check query without select privilege fails
 create table t1(i int)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- check if alter table 
 create table t1(i int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@t1
 POSTHOOK: query: -- check if alter table fails as different user
 create table t1(i int)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out Fri Aug 15 15:37:46 2014
@@ -1,15 +1,19 @@
 PREHOOK: query: -- ensure that drop database cascade works
 create database dba1
 PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:dba1
 POSTHOOK: query: -- ensure that drop database cascade works
 create database dba1
 POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:dba1
 PREHOOK: query: create table dba1.tab1(i int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:dba1
+PREHOOK: Output: dba1@dba1.tab1
 POSTHOOK: query: create table dba1.tab1(i int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:dba1
+POSTHOOK: Output: dba1@dba1.tab1
 POSTHOOK: Output: dba1@tab1
 PREHOOK: query: drop database dba1 cascade
 PREHOOK: type: DROPDATABASE
@@ -24,15 +28,19 @@ POSTHOOK: Output: dba1@tab1
 PREHOOK: query: -- check if drop database fails if the db has a table for which user does not have permission
 create database dba2
 PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:dba2
 POSTHOOK: query: -- check if drop database fails if the db has a table for which user does not have permission
 create database dba2
 POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:dba2
 PREHOOK: query: create table dba2.tab2(i int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:dba2
+PREHOOK: Output: dba2@dba2.tab2
 POSTHOOK: query: create table dba2.tab2(i int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:dba2
+POSTHOOK: Output: dba2@dba2.tab2
 POSTHOOK: Output: dba2@tab2
 PREHOOK: query: set role ADMIN
 PREHOOK: type: SHOW_ROLES

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out Fri Aug 15 15:37:46 2014
@@ -1,9 +1,11 @@
 #### A masked pattern was here ####
 create database dba1
 PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:dba1
 #### A masked pattern was here ####
 create database dba1
 POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:dba1
 PREHOOK: query: set role ADMIN
 PREHOOK: type: SHOW_ROLES
 POSTHOOK: query: set role ADMIN
@@ -38,8 +40,10 @@ public
 
 PREHOOK: query: create database dba2
 PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:dba2
 POSTHOOK: query: create database dba2
 POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:dba2
 PREHOOK: query: show current roles
 PREHOOK: type: SHOW_ROLES
 POSTHOOK: query: show current roles

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_droppartition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_droppartition.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_droppartition.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_droppartition.q.out Fri Aug 15 15:37:46 2014
@@ -2,6 +2,7 @@ PREHOOK: query: -- check drop partition 
 create table tpart(i int, j int) partitioned by (k string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@tpart
 POSTHOOK: query: -- check drop partition without delete privilege
 create table tpart(i int, j int) partitioned by (k string)
 POSTHOOK: type: CREATETABLE

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_1.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_1.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table authorization_fail_1 (key int, value string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@authorization_fail_1
 POSTHOOK: query: create table authorization_fail_1 (key int, value string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_2.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_2.q.out Fri Aug 15 15:37:46 2014
@@ -1,6 +1,7 @@
 PREHOOK: query: create table authorization_fail_2 (key int, value string) partitioned by (ds string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@authorization_fail_2
 POSTHOOK: query: create table authorization_fail_2 (key int, value string) partitioned by (ds string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_3.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_3.q.out Fri Aug 15 15:37:46 2014
@@ -3,6 +3,7 @@ PREHOOK: query: -- SORT_BEFORE_DIFF
 create table authorization_fail_3 (key int, value string) partitioned by (ds string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@authorization_fail_3
 POSTHOOK: query: -- SORT_BEFORE_DIFF
 
 create table authorization_fail_3 (key int, value string) partitioned by (ds string)

Modified: hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_4.q.out?rev=1618215&r1=1618214&r2=1618215&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientnegative/authorization_fail_4.q.out Fri Aug 15 15:37:46 2014
@@ -3,6 +3,7 @@ PREHOOK: query: -- SORT_BEFORE_DIFF
 create table authorization_fail_4 (key int, value string) partitioned by (ds string)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
+PREHOOK: Output: default@authorization_fail_4
 POSTHOOK: query: -- SORT_BEFORE_DIFF
 
 create table authorization_fail_4 (key int, value string) partitioned by (ds string)