You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/02/05 22:09:25 UTC

svn commit: r1564925 [1/2] - in /hive/branches/tez: ./ hbase-handler/src/java/org/apache/hadoop/hive/hbase/ hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/ hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api...

Author: gunther
Date: Wed Feb  5 21:09:23 2014
New Revision: 1564925

URL: http://svn.apache.org/r1564925
Log:
Merge latest trunk into branch. (Gunther Hagleitner)

Added:
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/VersionDelegator.java
      - copied unchanged from r1564922, hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/VersionDelegator.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMath.java
      - copied unchanged from r1564922, hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMath.java
    hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFMath.java
      - copied unchanged from r1564922, hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFMath.java
    hive/branches/tez/ql/src/test/queries/clientnegative/authorization_public_create.q
      - copied unchanged from r1564922, hive/trunk/ql/src/test/queries/clientnegative/authorization_public_create.q
    hive/branches/tez/ql/src/test/queries/clientnegative/authorization_public_drop.q
      - copied unchanged from r1564922, hive/trunk/ql/src/test/queries/clientnegative/authorization_public_drop.q
    hive/branches/tez/ql/src/test/queries/clientnegative/authorize_grant_public.q
      - copied unchanged from r1564922, hive/trunk/ql/src/test/queries/clientnegative/authorize_grant_public.q
    hive/branches/tez/ql/src/test/queries/clientnegative/authorize_revoke_public.q
      - copied unchanged from r1564922, hive/trunk/ql/src/test/queries/clientnegative/authorize_revoke_public.q
    hive/branches/tez/ql/src/test/results/clientnegative/authorization_public_create.q.out
      - copied unchanged from r1564922, hive/trunk/ql/src/test/results/clientnegative/authorization_public_create.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/authorization_public_drop.q.out
      - copied unchanged from r1564922, hive/trunk/ql/src/test/results/clientnegative/authorization_public_drop.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/authorize_grant_public.q.out
      - copied unchanged from r1564922, hive/trunk/ql/src/test/results/clientnegative/authorize_grant_public.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
      - copied unchanged from r1564922, hive/trunk/ql/src/test/results/clientnegative/authorize_revoke_public.q.out
Modified:
    hive/branches/tez/   (props changed)
    hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
    hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java
    hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
    hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
    hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
    hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java
    hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
    hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java
    hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure4.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure6.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure7.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure9.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/analyze_view.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/authorization_fail_7.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view1.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view4.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view5.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view6.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view7.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view8.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_view_failure1.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_view_failure2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/create_view_failure4.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/drop_table_failure2.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/insert_view_failure.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/invalidate_view1.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/load_view_failure.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/recursive_view.q.out
    hive/branches/tez/ql/src/test/results/clientnegative/unset_view_property.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/alter_view_as_select.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/alter_view_rename.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/authorization_1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/authorization_1_sql_std.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/authorization_5.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/authorization_8.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/authorization_role_grant1.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/authorization_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/create_big_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/create_like_tbl_props.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/create_like_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/create_or_replace_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/create_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/create_view_partitioned.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/create_view_translate.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/ctas_char.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/ctas_date.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/ctas_varchar.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/database_drop.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/describe_formatted_view_partitioned.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/describe_formatted_view_partitioned_json.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/explain_dependency.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/explain_logical.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/join_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/ppd_union_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/ptf.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/quotedid_basic.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/show_create_table_view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_exists.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_exists_having.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_notin.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/subquery_views.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/union_top_level.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/unset_table_view_property.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/view.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/view_cast.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/view_inputs.q.out
    hive/branches/tez/ql/src/test/results/clientpositive/windowing.q.out

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1564131-1564922

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Wed Feb  5 21:09:23 2014
@@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.mapred.TableOutputFormat;
-import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.hbase.mapred.TableMapReduceUtil;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.hbase.HBaseSerDe.ColumnMapping;
@@ -53,7 +53,6 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
-import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
@@ -153,9 +152,8 @@ public class HBaseStorageHandler extends
       String tableName = getHBaseTableName(tbl);
       Map<String, String> serdeParam = tbl.getSd().getSerdeInfo().getParameters();
       String hbaseColumnsMapping = serdeParam.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
-      List<ColumnMapping> columnsMapping = null;
 
-      columnsMapping = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
+      List<ColumnMapping> columnsMapping = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
 
       HTableDescriptor tableDesc;
 

Modified: hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java (original)
+++ hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatCreateTableDesc.java Wed Feb  5 21:09:23 2014
@@ -19,6 +19,7 @@
 package org.apache.hive.hcatalog.api;
 
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -37,6 +38,7 @@ import org.apache.hadoop.hive.ql.io.RCFi
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
@@ -72,6 +74,7 @@ public class HCatCreateTableDesc {
   private String outputformat;
   private String serde;
   private String storageHandler;
+  private Map<String, String> serdeParams;
 
   private HCatCreateTableDesc(String dbName, String tableName, List<HCatFieldSchema> columns) {
     this.dbName = dbName;
@@ -146,6 +149,11 @@ public class HCatCreateTableDesc {
       }
     }
     newTable.setSd(sd);
+    if(serdeParams != null) {
+      for(Map.Entry<String, String> param : serdeParams.entrySet()) {
+        sd.getSerdeInfo().putToParameters(param.getKey(), param.getValue());
+      }
+    }
     if (this.partCols != null) {
       ArrayList<FieldSchema> hivePtnCols = new ArrayList<FieldSchema>();
       for (HCatFieldSchema fs : this.partCols) {
@@ -296,6 +304,12 @@ public class HCatCreateTableDesc {
   public String getDatabaseName() {
     return this.dbName;
   }
+ /**
+   * Gets the SerDe parameters; for example see {@link org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder#fieldsTerminatedBy(char)}
+   */
+  public Map<String, String> getSerdeParams() {
+    return serdeParams;
+  }
 
   @Override
   public String toString() {
@@ -325,7 +339,9 @@ public class HCatCreateTableDesc {
       : "outputformat=null")
       + (serde != null ? "serde=" + serde + ", " : "serde=null")
       + (storageHandler != null ? "storageHandler=" + storageHandler
-      : "storageHandler=null") + "]";
+      : "storageHandler=null") 
+      + ",serdeParams=" + (serdeParams == null ? "null" : serdeParams)
+      + "]";
   }
 
   public static class Builder {
@@ -344,6 +360,7 @@ public class HCatCreateTableDesc {
     private Map<String, String> tblProps;
     private boolean ifNotExists;
     private String dbName;
+    private Map<String, String> serdeParams;
 
 
     private Builder(String dbName, String tableName, List<HCatFieldSchema> columns) {
@@ -466,7 +483,52 @@ public class HCatCreateTableDesc {
       this.fileFormat = format;
       return this;
     }
-
+    /**
+     * See <i>row_format</i> element of CREATE_TABLE DDL for Hive.
+     */
+    public Builder fieldsTerminatedBy(char delimiter) {
+      return serdeParam(serdeConstants.FIELD_DELIM, Character.toString(delimiter));
+    }
+    /**
+     * See <i>row_format</i> element of CREATE_TABLE DDL for Hive.
+     */
+    public Builder escapeChar(char escapeChar) {
+      return serdeParam(serdeConstants.ESCAPE_CHAR, Character.toString(escapeChar));
+    }
+    /**
+     * See <i>row_format</i> element of CREATE_TABLE DDL for Hive.
+     */
+    public Builder collectionItemsTerminatedBy(char delimiter) {
+      return serdeParam(serdeConstants.COLLECTION_DELIM, Character.toString(delimiter));
+    }
+    /**
+     * See <i>row_format</i> element of CREATE_TABLE DDL for Hive.
+     */
+    public Builder mapKeysTerminatedBy(char delimiter) {
+      return serdeParam(serdeConstants.MAPKEY_DELIM, Character.toString(delimiter));
+    }
+    /**
+     * See <i>row_format</i> element of CREATE_TABLE DDL for Hive.
+     */
+    public Builder linesTerminatedBy(char delimiter) {
+      return serdeParam(serdeConstants.LINE_DELIM, Character.toString(delimiter));
+    }
+    /**
+     * See <i>row_format</i> element of CREATE_TABLE DDL for Hive.
+     */
+    public Builder nullDefinedAs(char nullChar) {
+      return serdeParam(serdeConstants.SERIALIZATION_NULL_FORMAT, Character.toString(nullChar));
+    }
+    /**
+     * used for setting arbitrary SerDe parameter
+     */
+    public Builder serdeParam(String paramName, String value) {
+      if(serdeParams == null) {
+        serdeParams = new HashMap<String, String>();
+      }
+      serdeParams.put(paramName, value);
+      return this;
+    }
     /**
      * Builds the HCatCreateTableDesc.
      *
@@ -514,6 +576,7 @@ public class HCatCreateTableDesc {
           .getName();
         LOG.info("Table output format:" + desc.outputformat);
       }
+      desc.serdeParams = this.serdeParams;
       return desc;
     }
   }

Modified: hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java (original)
+++ hive/branches/tez/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java Wed Feb  5 21:09:23 2014
@@ -48,6 +48,7 @@ public class HCatTable {
   private String dbName;
   private String serde;
   private String location;
+  private Map<String, String> serdeParams;
 
   HCatTable(Table hiveTable) throws HCatException {
     this.tableName = hiveTable.getTableName();
@@ -73,6 +74,7 @@ public class HCatTable {
     tblProps = hiveTable.getParameters();
     serde = hiveTable.getSd().getSerdeInfo().getSerializationLib();
     location = hiveTable.getSd().getLocation();
+    serdeParams = hiveTable.getSd().getSerdeInfo().getParameters();
   }
 
   /**
@@ -200,6 +202,12 @@ public class HCatTable {
   public String getLocation() {
     return location;
   }
+  /**
+   * Returns parameters such as field delimiter,etc.
+   */
+  public Map<String, String> getSerdeParams() {
+    return serdeParams;
+  }
 
   @Override
   public String toString() {
@@ -222,6 +230,8 @@ public class HCatTable {
       + ", " : "storageHandler=null")
       + (tblProps != null ? "tblProps=" + tblProps + ", " : "tblProps=null")
       + (serde != null ? "serde=" + serde + ", " : "serde=")
-      + (location != null ? "location=" + location : "location=") + "]";
+      + (location != null ? "location=" + location : "location=")
+      + ",serdeParams=" + (serdeParams == null ? "null" : serdeParams)
+      + "]";
   }
 }

Modified: hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java (original)
+++ hive/branches/tez/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java Wed Feb  5 21:09:23 2014
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
 import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
@@ -161,13 +162,28 @@ public class TestHCatClient {
 
     client.dropTable(db, tableOne, true);
     HCatCreateTableDesc tableDesc2 = HCatCreateTableDesc.create(db,
-      tableTwo, cols).build();
+      tableTwo, cols).fieldsTerminatedBy('\001').escapeChar('\002').linesTerminatedBy('\003').
+      mapKeysTerminatedBy('\004').collectionItemsTerminatedBy('\005').nullDefinedAs('\006').build();
     client.createTable(tableDesc2);
     HCatTable table2 = client.getTable(db, tableTwo);
     assertTrue(table2.getInputFileFormat().equalsIgnoreCase(
       TextInputFormat.class.getName()));
     assertTrue(table2.getOutputFileFormat().equalsIgnoreCase(
       IgnoreKeyTextOutputFormat.class.getName()));
+    assertTrue("SerdeParams not found", table2.getSerdeParams() != null);
+    assertEquals("checking " + serdeConstants.FIELD_DELIM, Character.toString('\001'),
+      table2.getSerdeParams().get(serdeConstants.FIELD_DELIM));
+    assertEquals("checking " + serdeConstants.ESCAPE_CHAR, Character.toString('\002'),
+      table2.getSerdeParams().get(serdeConstants.ESCAPE_CHAR));
+    assertEquals("checking " + serdeConstants.LINE_DELIM, Character.toString('\003'),
+      table2.getSerdeParams().get(serdeConstants.LINE_DELIM));
+    assertEquals("checking " + serdeConstants.MAPKEY_DELIM, Character.toString('\004'),
+      table2.getSerdeParams().get(serdeConstants.MAPKEY_DELIM));
+    assertEquals("checking " + serdeConstants.COLLECTION_DELIM, Character.toString('\005'),
+      table2.getSerdeParams().get(serdeConstants.COLLECTION_DELIM));
+    assertEquals("checking " + serdeConstants.SERIALIZATION_NULL_FORMAT, Character.toString('\006'),
+      table2.getSerdeParams().get(serdeConstants.SERIALIZATION_NULL_FORMAT));
+    
     assertEquals((expectedDir + "/" + db + ".db/" + tableTwo).toLowerCase(), table2.getLocation().toLowerCase());
     client.close();
   }

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Wed Feb  5 21:09:23 2014
@@ -154,6 +154,39 @@ public class Server {
   }
 
   /**
+   * Get version of hadoop software being run by this WebHCat server
+   */
+  @GET
+  @Path("version/hadoop")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response hadoopVersion()  throws IOException {
+    VersionDelegator d = new VersionDelegator(appConf);
+    return d.getVersion("hadoop");
+  }
+
+  /**
+   * Get version of hive software being run by this WebHCat server
+   */
+  @GET
+  @Path("version/hive")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response hiveVersion()  throws IOException {
+    VersionDelegator d = new VersionDelegator(appConf);
+    return d.getVersion("hive");
+  }
+
+  /**
+   * Get version of hive software being run by this WebHCat server
+   */
+  @GET
+  @Path("version/pig")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response pigVersion()  throws IOException {
+    VersionDelegator d = new VersionDelegator(appConf);
+    return d.getVersion("pig");
+  }
+
+  /**
    * Execute an hcat ddl expression on the local box.  It is run
    * as the authenticated user and rate limited.
    */

Modified: hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java (original)
+++ hive/branches/tez/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/TestWebHCatE2e.java Wed Feb  5 21:09:23 2014
@@ -197,6 +197,39 @@ public class TestWebHCatE2e {
       ErrorMsg.INVALID_TABLE.getErrorCode(),
       getErrorCode(p.responseBody));
   }
+
+  @Test
+  public void getHadoopVersion() throws Exception {
+    MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/version/hadoop",
+        HTTP_METHOD_TYPE.GET);
+    Assert.assertEquals(HttpStatus.OK_200, p.httpStatusCode);
+    Map<String, Object> props = JsonBuilder.jsonToMap(p.responseBody);
+    Assert.assertEquals("hadoop", props.get("module"));
+    Assert.assertTrue(p.getAssertMsg(),
+        ((String)props.get("version")).matches("[1-2].[0-9]+.[0-9]+.*"));
+  }
+
+  @Test
+  public void getHiveVersion() throws Exception {
+    MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/version/hive",
+        HTTP_METHOD_TYPE.GET);
+    Assert.assertEquals(HttpStatus.OK_200, p.httpStatusCode);
+    Map<String, Object> props = JsonBuilder.jsonToMap(p.responseBody);
+    Assert.assertEquals("hive", props.get("module"));
+    Assert.assertTrue(p.getAssertMsg(),
+        ((String) props.get("version")).matches("0.[0-9]+.[0-9]+.*"));
+  }
+
+  @Test
+  public void getPigVersion() throws Exception {
+    MethodCallRetVal p = doHttpCall(templetonBaseUrl + "/version/pig",
+        HTTP_METHOD_TYPE.GET);
+    Assert.assertEquals(HttpStatus.NOT_IMPLEMENTED_501, p.httpStatusCode);
+    Map<String, Object> props = JsonBuilder.jsonToMap(p.responseBody);
+    Assert.assertEquals(p.getAssertMsg(), "Pig version request not yet " +
+        "implemented", (String)props.get("error"));
+  }
+
   /**
    * It's expected that Templeton returns a properly formatted JSON object when it
    * encounters an error.  It should have {@code ERROR_CODE} element in it which

Modified: hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/branches/tez/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Wed Feb  5 21:09:23 2014
@@ -1178,7 +1178,8 @@ public class TestJdbcDriver extends Test
     ResultSet res = stmt.getResultSet();
     assertTrue(res.next());
     assertEquals("role1", res.getString(1));
-    assertFalse(res.next());
+    assertTrue(res.next());
+    assertEquals("PUBLIC", res.getString(1));
     res.close();
   }
 }

Modified: hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/tez/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Wed Feb  5 21:09:23 2014
@@ -589,7 +589,9 @@ public class QTestUtil {
 
     List<String> roleNames = db.getAllRoleNames();
       for (String roleName : roleNames) {
-        db.dropRole(roleName);
+        if (!"PUBLIC".equals(roleName)) {
+          db.dropRole(roleName);
+        }
     }
     // allocate and initialize a new conf since a test can
     // modify conf by using 'set' commands

Modified: hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/tez/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Wed Feb  5 21:09:23 2014
@@ -171,6 +171,7 @@ public class HiveMetaStore extends Thrif
    */
   private static final int DEFAULT_HIVE_METASTORE_PORT = 9083;
   public static final String ADMIN = "ADMIN";
+  public static final String PUBLIC = "PUBLIC";
 
   private static HadoopThriftAuthBridge.Server saslServer;
   private static boolean useSasl;
@@ -196,7 +197,7 @@ public class HiveMetaStore extends Thrif
       IHMSHandler {
     public static final Log LOG = HiveMetaStore.LOG;
     private static boolean createDefaultDB = false;
-    private static boolean adminCreated = false;
+    private static boolean defaultRolesCreated = false;
     private String rawStoreClassName;
     private final HiveConf hiveConf; // stores datastore (jpox) properties,
                                      // right now they come from jpox.properties
@@ -351,7 +352,7 @@ public class HiveMetaStore extends Thrif
 
       synchronized (HMSHandler.class) {
         createDefaultDB();
-        createAdminRoleNAddUsers();
+        createDefaultRolesNAddUsers();
       }
 
       if (hiveConf.getBoolean("hive.metastore.metrics.enabled", false)) {
@@ -394,6 +395,7 @@ public class HiveMetaStore extends Thrif
       return threadLocalId.get() + ": " + s;
     }
 
+    @Override
     public void setConf(Configuration conf) {
       threadLocalConf.set(conf);
       RawStore ms = threadLocalMS.get();
@@ -474,10 +476,10 @@ public class HiveMetaStore extends Thrif
       }
     }
 
-    private void createAdminRoleNAddUsers() throws MetaException {
+    private void createDefaultRolesNAddUsers() throws MetaException {
 
-      if(adminCreated) {
-        LOG.debug("Admin role already created previously.");        
+      if(defaultRolesCreated) {
+        LOG.debug("Admin role already created previously.");
         return;
       }
       Class<?> authCls;
@@ -498,12 +500,21 @@ public class HiveMetaStore extends Thrif
       try {
         ms.addRole(ADMIN, ADMIN);
       } catch (InvalidObjectException e) {
-        LOG.debug("admin role already exists",e);
+        LOG.debug(ADMIN +" role already exists",e);
       } catch (NoSuchObjectException e) {
         // This should never be thrown.
-        LOG.warn("Unexpected exception while adding ADMIN role" , e);
+        LOG.warn("Unexpected exception while adding " +ADMIN+" roles" , e);
       }
-      LOG.info("Added admin role in metastore");
+      LOG.info("Added "+ ADMIN+ " role in metastore");
+      try {
+        ms.addRole(PUBLIC, PUBLIC);
+      } catch (InvalidObjectException e) {
+        LOG.debug(PUBLIC + " role already exists",e);
+      } catch (NoSuchObjectException e) {
+        // This should never be thrown.
+        LOG.warn("Unexpected exception while adding "+PUBLIC +" roles" , e);
+      }
+      LOG.info("Added "+PUBLIC+ " role in metastore");
       // now grant all privs to admin
       PrivilegeBag privs = new PrivilegeBag();
       privs.addToPrivileges(new HiveObjectPrivilege( new HiveObjectRef(HiveObjectType.GLOBAL, null,
@@ -553,7 +564,7 @@ public class HiveMetaStore extends Thrif
           LOG.debug(userName + " already in admin role", e);
         }
       }
-      adminCreated = true;
+      defaultRolesCreated = true;
     }
 
     private void logInfo(String m) {
@@ -690,6 +701,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public void create_database(final Database db)
         throws AlreadyExistsException, InvalidObjectException, MetaException {
       startFunction("create_database", ": " + db.toString());
@@ -722,6 +734,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public Database get_database(final String name) throws NoSuchObjectException,
         MetaException {
       startFunction("get_database", ": " + name);
@@ -745,6 +758,7 @@ public class HiveMetaStore extends Thrif
       return db;
     }
 
+    @Override
     public void alter_database(final String dbName, final Database db)
         throws NoSuchObjectException, TException, MetaException {
       startFunction("alter_database" + dbName);
@@ -881,6 +895,7 @@ public class HiveMetaStore extends Thrif
           parent.toString() : parent.toString() + Path.SEPARATOR);
     }
 
+    @Override
     public void drop_database(final String dbName, final boolean deleteData, final boolean cascade)
         throws NoSuchObjectException, InvalidOperationException, MetaException {
 
@@ -914,6 +929,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public List<String> get_databases(final String pattern) throws MetaException {
       startFunction("get_databases", ": " + pattern);
 
@@ -934,6 +950,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
+    @Override
     public List<String> get_all_databases() throws MetaException {
       startFunction("get_all_databases");
 
@@ -975,6 +992,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public boolean create_type(final Type type) throws AlreadyExistsException,
         MetaException, InvalidObjectException {
       startFunction("create_type", ": " + type.toString());
@@ -1001,6 +1019,7 @@ public class HiveMetaStore extends Thrif
       return success;
     }
 
+    @Override
     public Type get_type(final String name) throws MetaException, NoSuchObjectException {
       startFunction("get_type", ": " + name);
 
@@ -1051,6 +1070,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public boolean drop_type(final String name) throws MetaException, NoSuchObjectException {
       startFunction("drop_type", ": " + name);
 
@@ -1074,6 +1094,7 @@ public class HiveMetaStore extends Thrif
       return success;
     }
 
+    @Override
     public Map<String, Type> get_type_all(String name) throws MetaException {
       // TODO Auto-generated method stub
       startFunction("get_type_all", ": " + name);
@@ -1458,6 +1479,7 @@ public class HiveMetaStore extends Thrif
       return MetaStoreUtils.isIndexTable(table);
     }
 
+    @Override
     public Table get_table(final String dbname, final String name) throws MetaException,
         NoSuchObjectException {
       Table t = null;
@@ -1500,6 +1522,7 @@ public class HiveMetaStore extends Thrif
      * @throws InvalidOperationException
      * @throws UnknownDBException
      */
+    @Override
     public List<Table> get_table_objects_by_name(final String dbname, final List<String> names)
         throws MetaException, InvalidOperationException, UnknownDBException {
       List<Table> tables = null;
@@ -1829,6 +1852,7 @@ public class HiveMetaStore extends Thrif
       return result;
     }
 
+    @Override
     public int add_partitions(final List<Partition> parts) throws MetaException,
         InvalidObjectException, AlreadyExistsException {
       startFunction("add_partition");
@@ -2212,6 +2236,7 @@ public class HiveMetaStore extends Thrif
 
     }
 
+    @Override
     public Partition get_partition(final String db_name, final String tbl_name,
         final List<String> part_vals) throws MetaException, NoSuchObjectException {
       startPartitionFunction("get_partition", db_name, tbl_name, part_vals);
@@ -2260,6 +2285,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
+    @Override
     public List<Partition> get_partitions(final String db_name, final String tbl_name,
         final short max_parts) throws NoSuchObjectException, MetaException {
       startTableFunction("get_partitions", db_name, tbl_name);
@@ -2309,6 +2335,7 @@ public class HiveMetaStore extends Thrif
 
     }
 
+    @Override
     public List<String> get_partition_names(final String db_name, final String tbl_name,
         final short max_parts) throws MetaException {
       startTableFunction("get_partition_names", db_name, tbl_name);
@@ -2485,6 +2512,7 @@ public class HiveMetaStore extends Thrif
       throw new MetaException("Not yet implemented");
     }
 
+    @Override
     public void alter_index(final String dbname, final String base_table_name,
         final String index_name, final Index newIndex)
         throws InvalidOperationException, MetaException {
@@ -2516,6 +2544,7 @@ public class HiveMetaStore extends Thrif
       return;
     }
 
+    @Override
     public String getVersion() throws TException {
       endFunction(startFunction("getVersion"), true, null);
       return "3.0";
@@ -2575,6 +2604,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public List<String> get_tables(final String dbname, final String pattern)
         throws MetaException {
       startFunction("get_tables", ": db=" + dbname + " pat=" + pattern);
@@ -2596,6 +2626,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
+    @Override
     public List<String> get_all_tables(final String dbname) throws MetaException {
       startFunction("get_all_tables", ": db=" + dbname);
 
@@ -2616,6 +2647,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
+    @Override
     public List<FieldSchema> get_fields(String db, String tableName)
         throws MetaException, UnknownTableException, UnknownDBException {
       startFunction("get_fields", ": db=" + db + "tbl=" + tableName);
@@ -2675,6 +2707,7 @@ public class HiveMetaStore extends Thrif
      * @throws UnknownTableException
      * @throws UnknownDBException
      */
+    @Override
     public List<FieldSchema> get_schema(String db, String tableName)
         throws MetaException, UnknownTableException, UnknownDBException {
       startFunction("get_schema", ": db=" + db + "tbl=" + tableName);
@@ -2721,6 +2754,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public String getCpuProfile(int profileDurationInSec) throws TException {
       return "";
     }
@@ -2731,6 +2765,7 @@ public class HiveMetaStore extends Thrif
      * were an exception thrown while retrieving the variable, or if name is
      * null, defaultValue is returned.
      */
+    @Override
     public String get_config_value(String name, String defaultValue)
         throws TException, ConfigValSecurityException {
       startFunction("get_config_value", ": name=" + name + " defaultValue="
@@ -2818,6 +2853,7 @@ public class HiveMetaStore extends Thrif
       return p;
     }
 
+    @Override
     public Partition get_partition_by_name(final String db_name, final String tbl_name,
         final String part_name) throws MetaException, NoSuchObjectException, TException {
 
@@ -3272,6 +3308,7 @@ public class HiveMetaStore extends Thrif
       return convertedPartName;
     }
 
+    @Override
     public ColumnStatistics get_table_column_statistics(String dbName, String tableName,
       String colName) throws NoSuchObjectException, MetaException, TException,
       InvalidInputException, InvalidObjectException
@@ -3292,6 +3329,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public TableStatsResult get_table_statistics_req(TableStatsRequest request)
         throws MetaException, NoSuchObjectException, TException {
       String dbName = request.getDbName(), tblName = request.getTblName();
@@ -3308,6 +3346,7 @@ public class HiveMetaStore extends Thrif
       return result;
     }
 
+    @Override
     public ColumnStatistics get_partition_column_statistics(String dbName, String tableName,
       String partName, String colName) throws NoSuchObjectException, MetaException,
       InvalidInputException, TException, InvalidObjectException {
@@ -3333,6 +3372,7 @@ public class HiveMetaStore extends Thrif
       return statsObj;
     }
 
+    @Override
     public PartitionsStatsResult get_partitions_statistics_req(PartitionsStatsRequest request)
         throws MetaException, NoSuchObjectException, TException {
       String dbName = request.getDbName(), tblName = request.getTblName();
@@ -3354,6 +3394,7 @@ public class HiveMetaStore extends Thrif
       return result;
     }
 
+    @Override
     public boolean update_table_column_statistics(ColumnStatistics colStats)
       throws NoSuchObjectException,InvalidObjectException,MetaException,TException,
       InvalidInputException
@@ -3392,6 +3433,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public boolean update_partition_column_statistics(ColumnStatistics colStats)
       throws NoSuchObjectException,InvalidObjectException,MetaException,TException,
       InvalidInputException
@@ -3438,6 +3480,7 @@ public class HiveMetaStore extends Thrif
       }
     }
 
+    @Override
     public boolean delete_partition_column_statistics(String dbName, String tableName,
       String partName, String colName) throws NoSuchObjectException, MetaException,
       InvalidObjectException, TException, InvalidInputException
@@ -3462,6 +3505,7 @@ public class HiveMetaStore extends Thrif
       return ret;
     }
 
+    @Override
     public boolean delete_table_column_statistics(String dbName, String tableName, String colName)
       throws NoSuchObjectException, MetaException, InvalidObjectException, TException,
       InvalidInputException
@@ -3674,7 +3718,10 @@ public class HiveMetaStore extends Thrif
         final String grantor, final PrincipalType grantorType, final boolean grantOption)
         throws MetaException, TException {
       incrementCounter("add_role_member");
-
+      if (PUBLIC.equals(roleName)) {
+        throw new MetaException("No user can be added to " + PUBLIC +". Since all users implictly"
+        + " belong to " + PUBLIC + " role.");
+      }
       Boolean ret = null;
       try {
         RawStore ms = getMS();
@@ -3719,14 +3766,13 @@ public class HiveMetaStore extends Thrif
       return false;
     }
 
+    @Override
     public List<Role> list_roles(final String principalName,
         final PrincipalType principalType) throws MetaException, TException {
       incrementCounter("list_roles");
 
-      List<Role> ret = null;
+      List<Role> result = new ArrayList<Role>();
       try {
-
-        List<Role> result = new ArrayList<Role>();
         List<MRoleMap> roleMap = getMS().listRoles(principalName, principalType);
         if (roleMap != null) {
           for (MRoleMap role : roleMap) {
@@ -3735,14 +3781,14 @@ public class HiveMetaStore extends Thrif
                 .getCreateTime(), r.getOwnerName()));
           }
         }
-        ret = result;
+        // all users by default belongs to public role
+        result.add(new Role(PUBLIC,0,PUBLIC));
+        return result;
       } catch (MetaException e) {
         throw e;
       } catch (Exception e) {
         throw new RuntimeException(e);
       }
-
-      return ret;
     }
 
     @Override
@@ -3750,6 +3796,9 @@ public class HiveMetaStore extends Thrif
         throws MetaException, TException {
       incrementCounter("create_role");
 
+      if (PUBLIC.equals(role.getRoleName())) {
+         throw new MetaException(PUBLIC + " role implictly exists. It can't be created.");
+      }
       Boolean ret = null;
       try {
         ret = getMS().addRole(role.getRoleName(), role.getOwnerName());
@@ -3765,7 +3814,9 @@ public class HiveMetaStore extends Thrif
     public boolean drop_role(final String roleName)
         throws MetaException, TException {
       incrementCounter("drop_role");
-
+      if (ADMIN.equals(roleName) || PUBLIC.equals(roleName)) {
+        throw new MetaException(PUBLIC + "/" + ADMIN +" role can't be dropped.");
+      }
       Boolean ret = null;
       try {
         ret = getMS().removeRole(roleName);
@@ -3784,19 +3835,18 @@ public class HiveMetaStore extends Thrif
       List<String> ret = null;
       try {
         ret = getMS().listRoleNames();
+        return ret;
       } catch (MetaException e) {
         throw e;
       } catch (Exception e) {
         throw new RuntimeException(e);
       }
-      return ret;
     }
 
     @Override
     public boolean grant_privileges(final PrivilegeBag privileges) throws MetaException,
         TException {
       incrementCounter("grant_privileges");
-
       Boolean ret = null;
       try {
         ret = getMS().grantPrivileges(privileges);
@@ -3813,6 +3863,9 @@ public class HiveMetaStore extends Thrif
         final PrincipalType principalType) throws MetaException, TException {
       incrementCounter("remove_role_member");
 
+      if (PUBLIC.equals(roleName)) {
+        throw new MetaException(PUBLIC + " role can't be revoked.");
+      }
       Boolean ret = null;
       try {
         RawStore ms = getMS();
@@ -3830,7 +3883,6 @@ public class HiveMetaStore extends Thrif
     public boolean revoke_privileges(final PrivilegeBag privileges)
         throws MetaException, TException {
       incrementCounter("revoke_privileges");
-
       Boolean ret = null;
       try {
         ret = getMS().revokePrivileges(privileges);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Wed Feb  5 21:09:23 2014
@@ -421,28 +421,6 @@ public final class GenMapRedUtils {
     setTaskPlan(alias_id, topOp, task, local, opProcCtx, null);
   }
 
-  private static ReadEntity getParentViewInfo(String alias_id,
-      Map<String, ReadEntity> viewAliasToInput) {
-    String[] aliases = alias_id.split(":");
-
-    String currentAlias = null;
-    ReadEntity currentInput = null;
-    // Find the immediate parent possible.
-    // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
-    // -> implies depends on.
-    // T's parent would be V1
-    for (int pos = 0; pos < aliases.length; pos++) {
-      currentAlias = currentAlias == null ? aliases[pos] : currentAlias + ":" + aliases[pos];
-      ReadEntity input = viewAliasToInput.get(currentAlias);
-      if (input == null) {
-        return currentInput;
-      }
-      currentInput = input;
-    }
-
-    return currentInput;
-  }
-
   /**
    * set the current task in the mapredWork.
    *
@@ -572,7 +550,8 @@ public final class GenMapRedUtils {
     // Track the dependencies for the view. Consider a query like: select * from V;
     // where V is a view of the form: select * from T
     // The dependencies should include V at depth 0, and T at depth 1 (inferred).
-    ReadEntity parentViewInfo = getParentViewInfo(alias_id, parseCtx.getViewAliasToInput());
+    Map<String, ReadEntity> viewToInput = parseCtx.getViewAliasToInput();
+    ReadEntity parentViewInfo = PlanUtils.getParentViewInfo(alias_id, viewToInput);
 
     // The table should also be considered a part of inputs, even if the table is a
     // partitioned table and whether any partition is selected or not

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java Wed Feb  5 21:09:23 2014
@@ -155,8 +155,9 @@ public class SimpleFetchOptimizer implem
     if (table == null) {
       return null;
     }
+    ReadEntity parent = PlanUtils.getParentViewInfo(alias, pctx.getViewAliasToInput());
     if (!table.isPartitioned()) {
-      return checkOperators(new FetchData(table, splitSample), ts, aggressive, false);
+      return checkOperators(new FetchData(parent, table, splitSample), ts, aggressive, false);
     }
 
     boolean bypassFilter = false;
@@ -168,7 +169,7 @@ public class SimpleFetchOptimizer implem
       PrunedPartitionList pruned = pctx.getPrunedPartitions(alias, ts);
       if (aggressive || !pruned.hasUnknownPartitions()) {
         bypassFilter &= !pruned.hasUnknownPartitions();
-        return checkOperators(new FetchData(table, pruned, splitSample), ts,
+        return checkOperators(new FetchData(parent, table, pruned, splitSample), ts,
             aggressive, bypassFilter);
       }
     }
@@ -205,6 +206,7 @@ public class SimpleFetchOptimizer implem
 
   private class FetchData {
 
+    private final ReadEntity parent;
     private final Table table;
     private final SplitSample splitSample;
     private final PrunedPartitionList partsList;
@@ -216,13 +218,16 @@ public class SimpleFetchOptimizer implem
     // this is always non-null when conversion is completed
     private Operator<?> fileSink;
 
-    private FetchData(Table table, SplitSample splitSample) {
+    private FetchData(ReadEntity parent, Table table, SplitSample splitSample) {
+      this.parent = parent;
       this.table = table;
       this.partsList = null;
       this.splitSample = splitSample;
     }
 
-    private FetchData(Table table, PrunedPartitionList partsList, SplitSample splitSample) {
+    private FetchData(ReadEntity parent, Table table, PrunedPartitionList partsList,
+        SplitSample splitSample) {
+      this.parent = parent;
       this.table = table;
       this.partsList = partsList;
       this.splitSample = splitSample;
@@ -231,7 +236,7 @@ public class SimpleFetchOptimizer implem
     private FetchWork convertToWork() throws HiveException {
       inputs.clear();
       if (!table.isPartitioned()) {
-        inputs.add(new ReadEntity(table));
+        inputs.add(new ReadEntity(table, parent));
         FetchWork work = new FetchWork(table.getPath(), Utilities.getTableDesc(table));
         PlanUtils.configureInputJobPropertiesForStorageHandler(work.getTblDesc());
         work.setSplitSample(splitSample);
@@ -241,12 +246,12 @@ public class SimpleFetchOptimizer implem
       List<PartitionDesc> partP = new ArrayList<PartitionDesc>();
 
       for (Partition partition : partsList.getNotDeniedPartns()) {
-        inputs.add(new ReadEntity(partition));
+        inputs.add(new ReadEntity(partition, parent));
         listP.add(partition.getDataLocation());
         partP.add(Utilities.getPartitionDesc(partition));
       }
       Table sourceTable = partsList.getSourceTable();
-      inputs.add(new ReadEntity(sourceTable));
+      inputs.add(new ReadEntity(sourceTable, parent));
       TableDesc table = Utilities.getTableDesc(sourceTable);
       FetchWork work = new FetchWork(listP, partP, table);
       if (!work.getPartDesc().isEmpty()) {
@@ -261,7 +266,9 @@ public class SimpleFetchOptimizer implem
     // single direct fetching, which means FS is not needed any more when conversion completed.
     // rows forwarded will be received by ListSinkOperator, which is replacing FS
     private ListSinkOperator completed(ParseContext pctx, FetchWork work) {
-      pctx.getSemanticInputs().addAll(inputs);
+      for (ReadEntity input : inputs) {
+        PlanUtils.addInput(pctx.getSemanticInputs(), input);
+      }
       return replaceFSwithLS(fileSink, work.getSerializationNullFormat());
     }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Feb  5 21:09:23 2014
@@ -8917,6 +8917,15 @@ public class SemanticAnalyzer extends Ba
     resultSchema =
         convertRowSchemaToViewSchema(opParseCtx.get(sinkOp).getRowResolver());
 
+    ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner,
+        opToPartList, topOps, topSelOps, opParseCtx, joinContext, smbMapJoinContext,
+        topToTable, topToTableProps, fsopToTable,
+        loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
+        listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
+        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
+        opToPartToSkewedPruner, viewAliasToInput,
+        reduceSinkOperatorsAddedByEnforceBucketingSorting, queryProperties);
+
     if (createVwDesc != null) {
       saveViewDefinition();
 
@@ -8930,18 +8939,15 @@ public class SemanticAnalyzer extends Ba
       // skip the rest of this method.
       ctx.setResDir(null);
       ctx.setResFile(null);
+
+      try {
+        PlanUtils.addInputsForView(pCtx);
+      } catch (HiveException e) {
+        throw new SemanticException(e);
+      }
       return;
     }
 
-    ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner,
-        opToPartList, topOps, topSelOps, opParseCtx, joinContext, smbMapJoinContext,
-        topToTable, topToTableProps, fsopToTable,
-        loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
-        listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
-        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
-        opToPartToSkewedPruner, viewAliasToInput,
-        reduceSinkOperatorsAddedByEnforceBucketingSorting, queryProperties);
-
     // Generate table access stats if required
     if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_TABLEKEYS) == true) {
       TableAccessAnalyzer tableAccessAnalyzer = new TableAccessAnalyzer(pCtx);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Wed Feb  5 21:09:23 2014
@@ -70,8 +70,8 @@ public enum HiveOperation {
   DROPFUNCTION("DROPFUNCTION", null, null),
   CREATEMACRO("CREATEMACRO", null, null),
   DROPMACRO("DROPMACRO", null, null),
-  CREATEVIEW("CREATEVIEW", null, null),
-  DROPVIEW("DROPVIEW", null, null),
+  CREATEVIEW("CREATEVIEW", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.CREATE}),
+  DROPVIEW("DROPVIEW", null, new Privilege[]{Privilege.DROP}),
   CREATEINDEX("CREATEINDEX", null, null),
   DROPINDEX("DROPINDEX", null, null),
   ALTERINDEX_REBUILD("ALTERINDEX_REBUILD", null, null),

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Wed Feb  5 21:09:23 2014
@@ -36,7 +36,9 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
@@ -49,6 +51,8 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -938,4 +942,42 @@ public final class PlanUtils {
     sb.append(expr.getTypeString());
     sb.append(")");
   }
+
+  public static void addInputsForView(ParseContext parseCtx) throws HiveException {
+    Set<ReadEntity> inputs = parseCtx.getSemanticInputs();
+    for (Map.Entry<String, Operator<?>> entry : parseCtx.getTopOps().entrySet()) {
+      if (!(entry.getValue() instanceof TableScanOperator)) {
+        continue;
+      }
+      String alias = entry.getKey();
+      TableScanOperator topOp = (TableScanOperator) entry.getValue();
+      ReadEntity parentViewInfo = getParentViewInfo(alias, parseCtx.getViewAliasToInput());
+
+      // Adds tables only for create view (PPD filter can be appended by outer query)
+      Table table = parseCtx.getTopToTable().get(topOp);
+      PlanUtils.addInput(inputs, new ReadEntity(table, parentViewInfo));
+    }
+  }
+
+  public static ReadEntity getParentViewInfo(String alias_id,
+      Map<String, ReadEntity> viewAliasToInput) {
+    String[] aliases = alias_id.split(":");
+
+    String currentAlias = null;
+    ReadEntity currentInput = null;
+    // Find the immediate parent possible.
+    // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
+    // -> implies depends on.
+    // T's parent would be V1
+    for (int pos = 0; pos < aliases.length; pos++) {
+      currentAlias = currentAlias == null ? aliases[pos] : currentAlias + ":" + aliases[pos];
+      ReadEntity input = viewAliasToInput.get(currentAlias);
+      if (input == null) {
+        return currentInput;
+      }
+      currentInput = input;
+    }
+
+    return currentInput;
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAcos.java Wed Feb  5 21:09:23 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncACosDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncACosLongToDouble;
@@ -34,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.
     + "  > SELECT _FUNC_(1) FROM src LIMIT 1;\n" + "  0\n"
     + "  > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + "  NULL")
 @VectorizedExpressions({FuncACosLongToDouble.class, FuncACosDoubleToDouble.class})
-public class UDFAcos extends UDF {
+public class UDFAcos extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFAcos() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAsin.java Wed Feb  5 21:09:23 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncASinDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncASinLongToDouble;
@@ -36,7 +35,7 @@ import org.apache.hadoop.hive.serde2.io.
     + "  0\n"
     + "  > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + "  NULL")
 @VectorizedExpressions({FuncASinLongToDouble.class, FuncASinDoubleToDouble.class})
-public class UDFAsin extends UDF {
+public class UDFAsin extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFAsin() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFAtan.java Wed Feb  5 21:09:23 2014
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncATanDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncATanLongToDouble;
@@ -35,7 +34,7 @@ import org.apache.hadoop.hive.serde2.io.
         "  0"
     )
 @VectorizedExpressions({FuncATanLongToDouble.class, FuncATanDoubleToDouble.class})
-public class UDFAtan extends UDF {
+public class UDFAtan extends UDFMath {
 
   @SuppressWarnings("unused")
   private static Log LOG = LogFactory.getLog(UDFAtan.class.getName());

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCos.java Wed Feb  5 21:09:23 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCosDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCosLongToDouble;
@@ -34,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.
     extended = "Example:\n "
     + "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + "  1")
 @VectorizedExpressions({FuncCosDoubleToDouble.class, FuncCosLongToDouble.class})
-public class UDFCos extends UDF {
+public class UDFCos extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFCos() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFExp.java Wed Feb  5 21:09:23 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncExpDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncExpLongToDouble;
@@ -34,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.
     extended = "Example:\n "
     + "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + "  1")
 @VectorizedExpressions({FuncExpDoubleToDouble.class, FuncExpLongToDouble.class})
-public class UDFExp extends UDF {
+public class UDFExp extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFExp() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLn.java Wed Feb  5 21:09:23 2014
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 /**
  * UDFLn.
@@ -34,7 +35,7 @@ import org.apache.hadoop.hive.serde2.io.
     extended = "Example:\n"
     + "  > SELECT _FUNC_(1) FROM src LIMIT 1;\n" + "  0")
 @VectorizedExpressions({FuncLnLongToDouble.class, FuncLnDoubleToDouble.class})
-public class UDFLn extends UDF {
+public class UDFLn extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFLn() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog.java Wed Feb  5 21:09:23 2014
@@ -19,13 +19,13 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseLongToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 /**
  * UDFLog.
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.serde2.io.
     + "  > SELECT _FUNC_(13, 13) FROM src LIMIT 1;\n" + "  1")
 @VectorizedExpressions({FuncLogWithBaseLongToDouble.class, FuncLogWithBaseDoubleToDouble.class,
   FuncLnLongToDouble.class, FuncLnDoubleToDouble.class})
-public class UDFLog extends UDF {
+public class UDFLog extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFLog() {
@@ -59,12 +59,54 @@ public class UDFLog extends UDF {
    * Returns the logarithm of "a" with base "base".
    */
   public DoubleWritable evaluate(DoubleWritable base, DoubleWritable a) {
-    if (a == null || a.get() <= 0.0 || base == null || base.get() <= 1.0) {
+    if (a == null || base == null) {
+      return null;
+    }
+    return log(base.get(), a.get());
+  }
+
+  private DoubleWritable log(double base, double input) {
+    if( base <= 1.0 || input <= 0.0) {
+      return null;
+    }
+    result.set(Math.log(input) / Math.log(base));
+    return result;
+  }
+
+  /**
+   * Get the logarithm of the given decimal with the given base.
+   */
+  public DoubleWritable evaluate(DoubleWritable base, HiveDecimalWritable writable) {
+    if (base == null || writable == null) {
+      return null;
+    }
+    double d = writable.getHiveDecimal().bigDecimalValue().doubleValue();
+    return log(base.get(), d);
+  }
+
+  /**
+   * Get the logarithm of input with the given decimal as the base.
+   */
+  public DoubleWritable evaluate(HiveDecimalWritable base, DoubleWritable d) {
+    if (base == null || d == null) {
       return null;
-    } else {
-      result.set(Math.log(a.get()) / Math.log(base.get()));
-      return result;
     }
+
+    double b = base.getHiveDecimal().bigDecimalValue().doubleValue();
+    return log(b, d.get());
+  }
+
+  /**
+   * Get the logarithm of the given decimal input with the given decimal base.
+   */
+  public DoubleWritable evaluate(HiveDecimalWritable baseWritable, HiveDecimalWritable writable) {
+    if (baseWritable == null || writable == null) {
+      return null;
+    }
+
+    double base = baseWritable.getHiveDecimal().bigDecimalValue().doubleValue();
+    double d = writable.getHiveDecimal().bigDecimalValue().doubleValue();
+    return log(base, d);
   }
 
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog10.java Wed Feb  5 21:09:23 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog10DoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog10LongToDouble;
@@ -34,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.
     extended = "Example:\n"
     + "  > SELECT _FUNC_(10) FROM src LIMIT 1;\n" + "  1")
 @VectorizedExpressions({FuncLog10LongToDouble.class, FuncLog10DoubleToDouble.class})
-public class UDFLog10 extends UDF {
+public class UDFLog10 extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFLog10() {
@@ -51,4 +50,5 @@ public class UDFLog10 extends UDF {
       return result;
     }
   }
+
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLog2.java Wed Feb  5 21:09:23 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog2DoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog2LongToDouble;
@@ -34,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.
     extended = "Example:\n"
     + "  > SELECT _FUNC_(2) FROM src LIMIT 1;\n" + "  1")
 @VectorizedExpressions({FuncLog2LongToDouble.class, FuncLog2DoubleToDouble.class})
-public class UDFLog2 extends UDF {
+public class UDFLog2 extends UDFMath {
   private static double log2 = Math.log(2.0);
 
   private final DoubleWritable result = new DoubleWritable();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRadians.java Wed Feb  5 21:09:23 2014
@@ -20,13 +20,11 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRadiansDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRadiansLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 
-
 @Description(
       name = "radians",
       value = "_FUNC_(x) - Converts degrees to radians",
@@ -35,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.
           "  1.5707963267949mo\n"
       )
 @VectorizedExpressions({FuncRadiansLongToDouble.class, FuncRadiansDoubleToDouble.class})
-public class UDFRadians extends UDF {
+public class UDFRadians extends UDFMath {
 
   @SuppressWarnings("unused")
   private static Log LOG = LogFactory.getLog(UDFRadians.class.getName());

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSin.java Wed Feb  5 21:09:23 2014
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinLongToDouble;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 /**
  * UDFSin.
@@ -34,7 +35,7 @@ import org.apache.hadoop.hive.serde2.io.
     extended = "Example:\n "
     + "  > SELECT _FUNC_(0) FROM src LIMIT 1;\n" + "  0")
 @VectorizedExpressions({FuncSinLongToDouble.class, FuncSinDoubleToDouble.class})
-public class UDFSin extends UDF {
+public class UDFSin extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFSin() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSqrt.java Wed Feb  5 21:09:23 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtLongToDouble;
@@ -33,7 +32,7 @@ import org.apache.hadoop.hive.serde2.io.
     extended = "Example:\n "
     + "  > SELECT _FUNC_(4) FROM src LIMIT 1;\n" + "  2")
 @VectorizedExpressions({FuncSqrtLongToDouble.class, FuncSqrtDoubleToDouble.class})
-public class UDFSqrt extends UDF {
+public class UDFSqrt extends UDFMath {
   private final DoubleWritable result = new DoubleWritable();
 
   public UDFSqrt() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTan.java Wed Feb  5 21:09:23 2014
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.udf;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanLongToDouble;
@@ -35,7 +34,7 @@ import org.apache.hadoop.hive.serde2.io.
     		"  1"
     )
 @VectorizedExpressions({FuncTanLongToDouble.class, FuncTanDoubleToDouble.class})
-public class UDFTan extends UDF {
+public class UDFTan extends UDFMath {
 
   @SuppressWarnings("unused")
   private static Log LOG = LogFactory.getLog(UDFTan.class.getName());

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out Wed Feb  5 21:09:23 2014
@@ -4,12 +4,14 @@ SELECT key, value
 FROM src
 WHERE key=86
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
 POSTHOOK: query: CREATE VIEW testViewPart PARTITIONED ON (value)
 AS
 SELECT key, value
 FROM src
 WHERE key=86
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
 POSTHOOK: Output: default@testViewPart
 PREHOOK: query: ALTER VIEW testViewPart 
 ADD PARTITION (value='val_86') PARTITION (value='val_xyz')

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure.q.out Wed Feb  5 21:09:23 2014
@@ -4,7 +4,9 @@ POSTHOOK: query: DROP VIEW xxx3
 POSTHOOK: type: DROPVIEW
 PREHOOK: query: CREATE VIEW xxx3 AS SELECT * FROM src
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
 POSTHOOK: query: CREATE VIEW xxx3 AS SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
 POSTHOOK: Output: default@xxx3
 FAILED: SemanticException [Error 10131]: To alter a view you need to use the ALTER VIEW command.

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure2.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure2.q.out Wed Feb  5 21:09:23 2014
@@ -7,10 +7,12 @@ PARTITIONED ON (value)
 AS 
 SELECT * FROM src
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
 POSTHOOK: query: CREATE VIEW xxx4 
 PARTITIONED ON (value)
 AS 
 SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
 POSTHOOK: Output: default@xxx4
 FAILED: SemanticException [Error 10131]: To alter a view you need to use the ALTER VIEW command.

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure4.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure4.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure4.q.out Wed Feb  5 21:09:23 2014
@@ -7,10 +7,12 @@ PARTITIONED ON (value)
 AS 
 SELECT * FROM src
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
 POSTHOOK: query: CREATE VIEW xxx5
 PARTITIONED ON (value)
 AS 
 SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
 POSTHOOK: Output: default@xxx5
 FAILED: SemanticException LOCATION clause illegal for view partition

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure5.q.out Wed Feb  5 21:09:23 2014
@@ -7,10 +7,12 @@ PARTITIONED ON (value)
 AS 
 SELECT * FROM src
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
 POSTHOOK: query: CREATE VIEW xxx6
 PARTITIONED ON (value)
 AS 
 SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
 POSTHOOK: Output: default@xxx6
 FAILED: SemanticException Partition spec {v=val_86} contains non-partition columns

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure6.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure6.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure6.q.out Wed Feb  5 21:09:23 2014
@@ -7,11 +7,13 @@ PARTITIONED ON (key)
 AS 
 SELECT hr,key FROM srcpart
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@srcpart
 POSTHOOK: query: CREATE VIEW xxx7
 PARTITIONED ON (key)
 AS 
 SELECT hr,key FROM srcpart
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@srcpart
 POSTHOOK: Output: default@xxx7
 FAILED: SemanticException [Error 10041]: No partition predicate found for Alias "xxx7:srcpart" Table "srcpart"
 FAILED: SemanticException [Error 10056]: The query does not reference any valid partition. To run this query, set hive.mapred.mode=nonstrict

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure7.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure7.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure7.q.out Wed Feb  5 21:09:23 2014
@@ -7,10 +7,12 @@ PARTITIONED ON (ds,hr)
 AS 
 SELECT key,ds,hr FROM srcpart
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@srcpart
 POSTHOOK: query: CREATE VIEW xxx8
 PARTITIONED ON (ds,hr)
 AS 
 SELECT key,ds,hr FROM srcpart
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@srcpart
 POSTHOOK: Output: default@xxx8
 FAILED: SemanticException partition spec {ds=2011-01-01} doesn't contain all (2) partition columns

Modified: hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure9.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure9.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/alter_view_failure9.q.out Wed Feb  5 21:09:23 2014
@@ -6,9 +6,11 @@ PREHOOK: query: CREATE VIEW xxx4 
 AS 
 SELECT * FROM src
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
 POSTHOOK: query: CREATE VIEW xxx4 
 AS 
 SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
 POSTHOOK: Output: default@xxx4
 FAILED: SemanticException [Error 10131]: To alter a view you need to use the ALTER VIEW command.

Modified: hive/branches/tez/ql/src/test/results/clientnegative/analyze_view.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/analyze_view.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/analyze_view.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/analyze_view.q.out Wed Feb  5 21:09:23 2014
@@ -4,7 +4,9 @@ POSTHOOK: query: DROP VIEW av
 POSTHOOK: type: DROPVIEW
 PREHOOK: query: CREATE VIEW av AS SELECT * FROM src
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
 POSTHOOK: query: CREATE VIEW av AS SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
 POSTHOOK: Output: default@av
 FAILED: SemanticException [Error 10091]: ANALYZE is not supported for views

Modified: hive/branches/tez/ql/src/test/results/clientnegative/authorization_fail_7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/authorization_fail_7.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/authorization_fail_7.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/authorization_fail_7.q.out Wed Feb  5 21:09:23 2014
@@ -26,6 +26,7 @@ PREHOOK: type: SHOW_ROLE_GRANT
 POSTHOOK: query: show role grant user hive_test_user
 POSTHOOK: type: SHOW_ROLE_GRANT
 hive_test_role_fail
+PUBLIC
 PREHOOK: query: show grant role hive_test_role_fail on table authorization_fail
 PREHOOK: type: SHOW_GRANT
 POSTHOOK: query: show grant role hive_test_role_fail on table authorization_fail

Modified: hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view1.q.out?rev=1564925&r1=1564924&r2=1564925&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/create_or_replace_view1.q.out Wed Feb  5 21:09:23 2014
@@ -8,8 +8,10 @@ drop view v
 POSTHOOK: type: DROPVIEW
 PREHOOK: query: create view v partitioned on (ds, hr) as select * from srcpart
 PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@srcpart
 POSTHOOK: query: create view v partitioned on (ds, hr) as select * from srcpart
 POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@srcpart
 POSTHOOK: Output: default@v
 PREHOOK: query: alter view v add partition (ds='1',hr='2')
 PREHOOK: type: ALTERTABLE_ADDPARTS