You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/01/28 06:48:10 UTC

svn commit: r1561947 [12/17] - in /hive/branches/tez: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ cli/ cli/src/java/org/apache/hadoop/hive/cli/ common/ common/src/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Tue Jan 28 05:48:03 2014
@@ -95,17 +95,19 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_SHOW_GRANT, HiveOperation.SHOW_GRANT);
     commandType.put(HiveParser.TOK_GRANT_ROLE, HiveOperation.GRANT_ROLE);
     commandType.put(HiveParser.TOK_REVOKE_ROLE, HiveOperation.REVOKE_ROLE);
+    commandType.put(HiveParser.TOK_SHOW_ROLES, HiveOperation.SHOW_ROLES);
     commandType.put(HiveParser.TOK_SHOW_ROLE_GRANT, HiveOperation.SHOW_ROLE_GRANT);
     commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, HiveOperation.ALTERDATABASE);
     commandType.put(HiveParser.TOK_DESCDATABASE, HiveOperation.DESCDATABASE);
     commandType.put(HiveParser.TOK_ALTERTABLE_SKEWED, HiveOperation.ALTERTABLE_SKEWED);
     commandType.put(HiveParser.TOK_ANALYZE, HiveOperation.ANALYZE_TABLE);
     commandType.put(HiveParser.TOK_ALTERVIEW_RENAME, HiveOperation.ALTERVIEW_RENAME);
+    commandType.put(HiveParser.TOK_ALTERTABLE_PARTCOLTYPE, HiveOperation.ALTERTABLE_PARTCOLTYPE);
   }
 
   static {
     tablePartitionCommandType.put(
-        HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE,
+        HiveParser.TOK_ALTERTABLE_PROTECTMODE,
         new HiveOperation[] { HiveOperation.ALTERTABLE_PROTECTMODE,
             HiveOperation.ALTERPARTITION_PROTECTMODE });
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_FILEFORMAT,
@@ -114,7 +116,7 @@ public final class SemanticAnalyzerFacto
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_LOCATION,
         new HiveOperation[] { HiveOperation.ALTERTABLE_LOCATION,
             HiveOperation.ALTERPARTITION_LOCATION });
-    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES,
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_MERGEFILES,
         new HiveOperation[] {HiveOperation.ALTERTABLE_MERGEFILES,
             HiveOperation.ALTERPARTITION_MERGEFILES });
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER,
@@ -171,6 +173,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_DROPTABLE_PROPERTIES:
       case HiveParser.TOK_ALTERTABLE_SERIALIZER:
       case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
+      case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
       case HiveParser.TOK_ALTERINDEX_REBUILD:
       case HiveParser.TOK_ALTERINDEX_PROPERTIES:
       case HiveParser.TOK_ALTERVIEW_PROPERTIES:
@@ -195,7 +198,6 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_ALTERTABLE_TOUCH:
       case HiveParser.TOK_ALTERTABLE_ARCHIVE:
       case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
-      case HiveParser.TOK_ALTERTABLE_ALTERPARTS:
       case HiveParser.TOK_LOCKTABLE:
       case HiveParser.TOK_UNLOCKTABLE:
       case HiveParser.TOK_LOCKDB:
@@ -208,6 +210,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_GRANT_ROLE:
       case HiveParser.TOK_REVOKE_ROLE:
       case HiveParser.TOK_SHOW_ROLE_GRANT:
+      case HiveParser.TOK_SHOW_ROLES:
       case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
       case HiveParser.TOK_ALTERTABLE_SKEWED:
       case HiveParser.TOK_TRUNCATETABLE:

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java Tue Jan 28 05:48:03 2014
@@ -218,10 +218,13 @@ class UnparseTranslator {
    */
   void applyTranslations(TokenRewriteStream tokenRewriteStream) {
     for (Map.Entry<Integer, Translation> entry : translations.entrySet()) {
-      tokenRewriteStream.replace(
-        entry.getKey(),
-        entry.getValue().tokenStopIndex,
-        entry.getValue().replacementText);
+      if (entry.getKey() > 0) { // negative means the key didn't exist in the original 
+                                // stream (i.e.: we changed the tree)
+        tokenRewriteStream.replace(
+           entry.getKey(),
+           entry.getValue().tokenStopIndex,
+           entry.getValue().replacementText);
+      }
     }
     for (CopyTranslation copyTranslation : copyTranslations) {
       String replacementText = tokenRewriteStream.toString(

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java Tue Jan 28 05:48:03 2014
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -26,27 +27,131 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Order;
 
 /**
- * Contains the information needed to add a partition.
+ * Contains the information needed to add one or more partitions.
  */
 public class AddPartitionDesc extends DDLDesc implements Serializable {
 
+  public static class OnePartitionDesc {
+    public OnePartitionDesc() {}
+
+    OnePartitionDesc(
+        Map<String, String> partSpec, String location, Map<String, String> params) {
+      this(partSpec, location);
+      this.partParams = params;
+    }
+
+    OnePartitionDesc(Map<String, String> partSpec, String location) {
+      this.partSpec = partSpec;
+      this.location = location;
+    }
+
+    Map<String, String> partSpec;
+    Map<String, String> partParams;
+    String location;
+    String inputFormat = null;
+    String outputFormat = null;
+    int numBuckets = -1;
+    List<FieldSchema> cols = null;
+    String serializationLib = null;
+    Map<String, String> serdeParams = null;
+    List<String> bucketCols = null;
+    List<Order> sortCols = null;
+
+    public Map<String, String> getPartSpec() {
+      return partSpec;
+    }
+
+    /**
+     * @return location of partition in relation to table
+     */
+    public String getLocation() {
+      return location;
+    }
+
+    public void setLocation(String location) {
+      this.location = location;
+    }
+
+    public Map<String, String> getPartParams() {
+      return partParams;
+    }
+
+    public void setPartParams(Map<String, String> partParams) {
+      this.partParams = partParams;
+    }
+
+    public int getNumBuckets() {
+      return numBuckets;
+    }
+
+    public void setNumBuckets(int numBuckets) {
+      this.numBuckets = numBuckets;
+    }
+
+    public List<FieldSchema> getCols() {
+      return cols;
+    }
+
+    public void setCols(List<FieldSchema> cols) {
+      this.cols = cols;
+    }
+
+    public String getSerializationLib() {
+      return serializationLib;
+    }
+
+    public void setSerializationLib(String serializationLib) {
+      this.serializationLib = serializationLib;
+    }
+
+    public Map<String, String> getSerdeParams() {
+      return serdeParams;
+    }
+
+    public void setSerdeParams(Map<String, String> serdeParams) {
+      this.serdeParams = serdeParams;
+    }
+
+    public List<String> getBucketCols() {
+      return bucketCols;
+    }
+
+    public void setBucketCols(List<String> bucketCols) {
+      this.bucketCols = bucketCols;
+    }
+
+    public List<Order> getSortCols() {
+      return sortCols;
+    }
+
+    public void setSortCols(List<Order> sortCols) {
+      this.sortCols = sortCols;
+    }
+
+    public String getInputFormat() {
+      return inputFormat;
+    }
+
+    public void setInputFormat(String inputFormat) {
+      this.inputFormat = inputFormat;
+    }
+
+    public String getOutputFormat() {
+      return outputFormat;
+    }
+
+    public void setOutputFormat(String outputFormat) {
+      this.outputFormat = outputFormat;
+    }
+  }
+
   private static final long serialVersionUID = 1L;
 
   String tableName;
   String dbName;
-  String location;
   boolean ifNotExists;
-  boolean expectView;
-  LinkedHashMap<String, String> partSpec;
-  Map<String, String> partParams;
-  String inputFormat = null;
-  String outputFormat = null;
-  int numBuckets = -1;
-  List<FieldSchema> cols = null;
-  String serializationLib = null;
-  Map<String, String> serdeParams = null;
-  List<String> bucketCols = null;
-  List<Order> sortCols = null;
+  List<OnePartitionDesc> partitions = null;
+
 
   /**
    * For serialization only.
@@ -54,7 +159,16 @@ public class AddPartitionDesc extends DD
   public AddPartitionDesc() {
   }
 
+  public AddPartitionDesc(
+      String dbName, String tableName, boolean ifNotExists) {
+    super();
+    this.dbName = dbName;
+    this.tableName = tableName;
+    this.ifNotExists = ifNotExists;
+  }
+
   /**
+   * Legacy single-partition ctor for ImportSemanticAnalyzer
    * @param dbName
    *          database to add to.
    * @param tableName
@@ -66,36 +180,26 @@ public class AddPartitionDesc extends DD
    * @param params
    *          partition parameters.
    */
+  @Deprecated
   public AddPartitionDesc(String dbName, String tableName,
       Map<String, String> partSpec, String location, Map<String, String> params) {
-    this(dbName, tableName, partSpec, location, true, false);
-    this.partParams = params;
-  }
-
-  /**
-   * @param dbName
-   *          database to add to.
-   * @param tableName
-   *          table to add to.
-   * @param partSpec
-   *          partition specification.
-   * @param location
-   *          partition location, relative to table location.
-   * @param ifNotExists
-   *          if true, the partition is only added if it doesn't exist
-   * @param expectView
-   *          true for ALTER VIEW, false for ALTER TABLE
-   */
-  public AddPartitionDesc(String dbName, String tableName,
-      Map<String, String> partSpec, String location, boolean ifNotExists,
-      boolean expectView) {
     super();
     this.dbName = dbName;
     this.tableName = tableName;
-    this.partSpec = new LinkedHashMap<String,String>(partSpec);
-    this.location = location;
-    this.ifNotExists = ifNotExists;
-    this.expectView = expectView;
+    this.ifNotExists = true;
+    addPartition(partSpec, location, params);
+  }
+
+  public void addPartition(Map<String, String> partSpec, String location) {
+    addPartition(partSpec, location, null);
+  }
+
+  private void addPartition(
+      Map<String, String> partSpec, String location, Map<String, String> params) {
+    if (this.partitions == null) {
+      this.partitions = new ArrayList<OnePartitionDesc>();
+    }
+    this.partitions.add(new OnePartitionDesc(partSpec, location, params));
   }
 
   /**
@@ -132,42 +236,39 @@ public class AddPartitionDesc extends DD
    * @return location of partition in relation to table
    */
   @Explain(displayName = "Location")
-  public String getLocation() {
-    return location;
-  }
-
-  /**
-   * @param location
-   *          location of partition in relation to table
-   */
-  public void setLocation(String location) {
-    this.location = location;
-  }
-
-  /**
-   * @return partition specification.
-   */
-  public LinkedHashMap<String, String> getPartSpec() {
-    return partSpec;
+  public String getLocationForExplain() {
+    if (this.partitions == null || this.partitions.isEmpty()) return "<no partition>";
+    boolean isFirst = true;
+    StringBuilder sb = new StringBuilder();
+    for (OnePartitionDesc desc : this.partitions) {
+      if (!isFirst) {
+        sb.append(", ");
+      }
+      isFirst = false;
+      sb.append(desc.location);
+    }
+    return sb.toString();
   }
 
   @Explain(displayName = "Spec")
-  public String getPartSpecString() {
-    return partSpec.toString();
-  }
-
-  /**
-   * @param partSpec
-   *          partition specification
-   */
-  public void setPartSpec(LinkedHashMap<String, String> partSpec) {
-    this.partSpec = partSpec;
+  public String getPartSpecStringForExplain() {
+    if (this.partitions == null || this.partitions.isEmpty()) return "<no partition>";
+    boolean isFirst = true;
+    StringBuilder sb = new StringBuilder();
+    for (OnePartitionDesc desc : this.partitions) {
+      if (!isFirst) {
+        sb.append(", ");
+      }
+      isFirst = false;
+      sb.append(desc.partSpec.toString());
+    }
+    return sb.toString();
   }
 
   /**
    * @return if the partition should only be added if it doesn't exist already
    */
-  public boolean getIfNotExists() {
+  public boolean isIfNotExists() {
     return this.ifNotExists;
   }
 
@@ -179,98 +280,11 @@ public class AddPartitionDesc extends DD
     this.ifNotExists = ifNotExists;
   }
 
-  /**
-   * @return partition parameters.
-   */
-  public Map<String, String> getPartParams() {
-    return partParams;
-  }
-
-  /**
-   * @param partParams
-   *          partition parameters
-   */
-
-  public void setPartParams(Map<String, String> partParams) {
-    this.partParams = partParams;
-  }
-
-  public int getNumBuckets() {
-    return numBuckets;
-  }
-
-  public void setNumBuckets(int numBuckets) {
-    this.numBuckets = numBuckets;
-  }
-
-  public List<FieldSchema> getCols() {
-    return cols;
-  }
-
-  public void setCols(List<FieldSchema> cols) {
-    this.cols = cols;
-  }
-
-  public String getSerializationLib() {
-    return serializationLib;
-  }
-
-  public void setSerializationLib(String serializationLib) {
-    this.serializationLib = serializationLib;
-  }
-
-  public Map<String, String> getSerdeParams() {
-    return serdeParams;
-  }
-
-  public void setSerdeParams(Map<String, String> serdeParams) {
-    this.serdeParams = serdeParams;
-  }
-
-  public List<String> getBucketCols() {
-    return bucketCols;
-  }
-
-  public void setBucketCols(List<String> bucketCols) {
-    this.bucketCols = bucketCols;
-  }
-
-  public List<Order> getSortCols() {
-    return sortCols;
-  }
-
-  public void setSortCols(List<Order> sortCols) {
-    this.sortCols = sortCols;
-  }
-
-  public String getInputFormat() {
-    return inputFormat;
+  public int getPartitionCount() {
+    return this.partitions.size();
   }
 
-  public void setInputFormat(String inputFormat) {
-    this.inputFormat = inputFormat;
-  }
-
-  public String getOutputFormat() {
-    return outputFormat;
-  }
-
-  public void setOutputFormat(String outputFormat) {
-    this.outputFormat = outputFormat;
-  }
-
-  /*
-   * @return whether to expect a view being altered
-   */
-  public boolean getExpectView() {
-    return expectView;
-  }
-
-  /**
-   * @param expectView
-   *          set whether to expect a view being altered
-   */
-  public void setExpectView(boolean expectView) {
-    this.expectView = expectView;
+  public OnePartitionDesc getPartition(int i) {
+    return this.partitions.get(i);
   }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverCommonJoin.java Tue Jan 28 05:48:03 2014
@@ -19,22 +19,28 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 
 /**
  * ConditionalResolverSkewJoin.
  *
  */
 public class ConditionalResolverCommonJoin implements ConditionalResolver, Serializable {
+
   private static final long serialVersionUID = 1L;
+  private static final Log LOG = LogFactory.getLog(ConditionalResolverCommonJoin.class);
 
   /**
    * ConditionalResolverSkewJoinCtx.
@@ -71,7 +77,8 @@ public class ConditionalResolverCommonJo
     }
 
     public HashMap<String, Long> getAliasToKnownSize() {
-      return aliasToKnownSize == null ? new HashMap<String, Long>() : aliasToKnownSize;
+      return aliasToKnownSize == null ?
+          aliasToKnownSize = new HashMap<String, Long>() : aliasToKnownSize;
     }
 
     public void setAliasToKnownSize(HashMap<String, Long> aliasToKnownSize) {
@@ -101,6 +108,20 @@ public class ConditionalResolverCommonJo
     public void setHdfsTmpDir(Path hdfsTmpDir) {
       this.hdfsTmpDir = hdfsTmpDir;
     }
+
+    @Override
+    public ConditionalResolverCommonJoinCtx clone() {
+      ConditionalResolverCommonJoinCtx ctx = new ConditionalResolverCommonJoinCtx();
+      ctx.setAliasToTask(aliasToTask);
+      ctx.setCommonJoinTask(commonJoinTask);
+      ctx.setPathToAliases(pathToAliases);
+      ctx.setHdfsTmpDir(hdfsTmpDir);
+      ctx.setLocalTmpDir(localTmpDir);
+      // if any of join participants is from other MR, it has alias like '[pos:]$INTNAME'
+      // which of size should be caculated for each resolver.
+      ctx.setAliasToKnownSize(new HashMap<String, Long>(aliasToKnownSize));
+      return ctx;
+    }
   }
 
   public ConditionalResolverCommonJoin() {
@@ -108,15 +129,11 @@ public class ConditionalResolverCommonJo
 
   @Override
   public List<Task<? extends Serializable>> getTasks(HiveConf conf, Object objCtx) {
-    ConditionalResolverCommonJoinCtx ctx = (ConditionalResolverCommonJoinCtx) objCtx;
+    ConditionalResolverCommonJoinCtx ctx = ((ConditionalResolverCommonJoinCtx) objCtx).clone();
     List<Task<? extends Serializable>> resTsks = new ArrayList<Task<? extends Serializable>>();
 
     // get aliasToPath and pass it to the heuristic
-    HashMap<String, ArrayList<String>> pathToAliases = ctx.getPathToAliases();
-    HashMap<String, Long> aliasToKnownSize = ctx.getAliasToKnownSize();
-    String bigTableAlias = this.resolveMapJoinTask(pathToAliases, ctx
-        .getAliasToTask(), aliasToKnownSize, ctx.getHdfsTmpDir(), ctx
-        .getLocalTmpDir(), conf);
+    String bigTableAlias = resolveDriverAlias(ctx, conf);
 
     if (bigTableAlias == null) {
       // run common join task
@@ -135,99 +152,98 @@ public class ConditionalResolverCommonJo
     return resTsks;
   }
 
-  static class AliasFileSizePair implements Comparable<AliasFileSizePair> {
-    String alias;
-    long size;
-    AliasFileSizePair(String alias, long size) {
-      super();
-      this.alias = alias;
-      this.size = size;
-    }
-    @Override
-    public int compareTo(AliasFileSizePair o) {
-      if (o == null) {
-        return 1;
-      }
-      return (size < o.size) ? -1 : ((size > o.size) ? 1 : 0);
+  private String resolveDriverAlias(ConditionalResolverCommonJoinCtx ctx, HiveConf conf) {
+    try {
+      resolveUnknownSizes(ctx, conf);
+      return resolveMapJoinTask(ctx, conf);
+    } catch (Exception e) {
+      LOG.info("Failed to resolve driver alias by exception.. Falling back to common join", e);
     }
+    return null;
   }
 
-  private String resolveMapJoinTask(
-      HashMap<String, ArrayList<String>> pathToAliases,
-      HashMap<String, Task<? extends Serializable>> aliasToTask,
-      HashMap<String, Long> aliasToKnownSize, Path hdfsTmpDir,
-      Path localTmpDir, HiveConf conf) {
+  protected String resolveMapJoinTask(
+      ConditionalResolverCommonJoinCtx ctx, HiveConf conf) throws Exception {
+
+    Set<String> aliases = getParticipants(ctx);
+
+    Map<String, Long> aliasToKnownSize = ctx.getAliasToKnownSize();
+    Map<String, ArrayList<String>> pathToAliases = ctx.getPathToAliases();
+    Map<String, Task<? extends Serializable>> aliasToTask = ctx.getAliasToTask();
+
+    long threshold = HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVESMALLTABLESFILESIZE);
 
+    Long bigTableSize = null;
+    Long smallTablesSize = null;
     String bigTableFileAlias = null;
-    long smallTablesFileSizeSum = 0;
+    for (String alias : aliases) {
+      if (!aliasToTask.containsKey(alias)) {
+        continue;
+      }
+      long sumOfOthers = Utilities.sumOfExcept(aliasToKnownSize, aliases, alias);
+      if (sumOfOthers < 0 || sumOfOthers > threshold) {
+        continue;
+      }
+      // at most one alias is unknown. we can safely regard it as a big alias
+      Long aliasSize = aliasToKnownSize.get(alias);
+      if (bigTableSize == null || (aliasSize != null && aliasSize > bigTableSize)) {
+        bigTableFileAlias = alias;
+        bigTableSize = aliasSize;
+        smallTablesSize = sumOfOthers;
+      }
+    }
+    if (bigTableFileAlias != null) {
+      LOG.info("Driver alias is " + bigTableFileAlias + " with size " + bigTableSize
+          + " (total size of others : " + smallTablesSize + ", threshold : " + threshold + ")");
+      return bigTableFileAlias;
+    }
+    LOG.info("Failed to resolve driver alias (threshold : " + threshold +
+        ", length mapping : " + aliasToKnownSize + ")");
+    return null;
+  }
 
-    Map<String, AliasFileSizePair> aliasToFileSizeMap = new HashMap<String, AliasFileSizePair>();
-    for (Map.Entry<String, Long> entry : aliasToKnownSize.entrySet()) {
-      String alias = entry.getKey();
-      AliasFileSizePair pair = new AliasFileSizePair(alias, entry.getValue());
-      aliasToFileSizeMap.put(alias, pair);
+  private Set<String> getParticipants(ConditionalResolverCommonJoinCtx ctx) {
+    Set<String> participants = new HashSet<String>();
+    for (List<String> aliases : ctx.getPathToAliases().values()) {
+      participants.addAll(aliases);
     }
+    return participants;
+  }
 
-    try {
-      // need to compute the input size at runtime, and select the biggest as
-      // the big table.
-      for (Map.Entry<String, ArrayList<String>> oneEntry : pathToAliases
-          .entrySet()) {
-        String p = oneEntry.getKey();
-        // this path is intermediate data
-        if (p.startsWith(hdfsTmpDir.toString()) || p.startsWith(localTmpDir.toString())) {
-          ArrayList<String> aliasArray = oneEntry.getValue();
-          if (aliasArray.size() <= 0) {
-            continue;
-          }
-          Path path = new Path(p);
-          FileSystem fs = path.getFileSystem(conf);
-          long fileSize = fs.getContentSummary(path).getLength();
-          for (String alias : aliasArray) {
-            AliasFileSizePair pair = aliasToFileSizeMap.get(alias);
-            if (pair == null) {
-              pair = new AliasFileSizePair(alias, 0);
-              aliasToFileSizeMap.put(alias, pair);
-            }
-            pair.size += fileSize;
-          }
-        }
-      }
-      // generate file size to alias mapping; but not set file size as key,
-      // because different file may have the same file size.
+  protected void resolveUnknownSizes(ConditionalResolverCommonJoinCtx ctx, HiveConf conf)
+      throws Exception {
+
+    Set<String> aliases = getParticipants(ctx);
 
-      List<AliasFileSizePair> aliasFileSizeList = new ArrayList<AliasFileSizePair>(
-          aliasToFileSizeMap.values());
+    Map<String, Long> aliasToKnownSize = ctx.getAliasToKnownSize();
+    Map<String, ArrayList<String>> pathToAliases = ctx.getPathToAliases();
 
-      Collections.sort(aliasFileSizeList);
-      // iterating through this list from the end to beginning, trying to find
-      // the big table for mapjoin
-      int idx = aliasFileSizeList.size() - 1;
-      boolean bigAliasFound = false;
-      while (idx >= 0) {
-        AliasFileSizePair pair = aliasFileSizeList.get(idx);
-        String alias = pair.alias;
-        long size = pair.size;
-        idx--;
-        if (!bigAliasFound && aliasToTask.get(alias) != null) {
-          // got the big table
-          bigAliasFound = true;
-          bigTableFileAlias = alias;
-          continue;
+    Set<String> unknownPaths = new HashSet<String>();
+    for (Map.Entry<String, ArrayList<String>> entry : pathToAliases.entrySet()) {
+      for (String alias : entry.getValue()) {
+        if (aliases.contains(alias) && !aliasToKnownSize.containsKey(alias)) {
+          unknownPaths.add(entry.getKey());
+          break;
         }
-        smallTablesFileSizeSum += size;
       }
-
-      // compare with threshold
-      long threshold = HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVESMALLTABLESFILESIZE);
-      if (smallTablesFileSizeSum <= threshold) {
-        return bigTableFileAlias;
-      } else {
-        return null;
+    }
+    Path hdfsTmpDir = ctx.getHdfsTmpDir();
+    Path localTmpDir = ctx.getLocalTmpDir();
+    // need to compute the input size at runtime, and select the biggest as
+    // the big table.
+    for (String p : unknownPaths) {
+      // this path is intermediate data
+      if (p.startsWith(hdfsTmpDir.toString()) || p.startsWith(localTmpDir.toString())) {
+        Path path = new Path(p);
+        FileSystem fs = path.getFileSystem(conf);
+        long fileSize = fs.getContentSummary(path).getLength();
+        for (String alias : pathToAliases.get(p)) {
+          Long length = aliasToKnownSize.get(alias);
+          if (length == null) {
+            aliasToKnownSize.put(alias, fileSize);
+          }
+        }
       }
-    } catch (Exception e) {
-      e.printStackTrace();
-      return null;
     }
   }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java Tue Jan 28 05:48:03 2014
@@ -50,7 +50,7 @@ public class ConditionalResolverSkewJoin
     // tables into corresponding different dirs (one dir per table).
     // this map stores mapping from "big key dir" to its corresponding mapjoin
     // task.
-    private HashMap<String, Task<? extends Serializable>> dirToTaskMap;
+    private HashMap<Path, Task<? extends Serializable>> dirToTaskMap;
     private Task<? extends Serializable> noSkewTask;
 
     /**
@@ -60,19 +60,19 @@ public class ConditionalResolverSkewJoin
     }
 
     public ConditionalResolverSkewJoinCtx(
-        HashMap<String, Task<? extends Serializable>> dirToTaskMap,
+        HashMap<Path, Task<? extends Serializable>> dirToTaskMap,
         Task<? extends Serializable> noSkewTask) {
       super();
       this.dirToTaskMap = dirToTaskMap;
       this.noSkewTask = noSkewTask;
     }
 
-    public HashMap<String, Task<? extends Serializable>> getDirToTaskMap() {
+    public HashMap<Path, Task<? extends Serializable>> getDirToTaskMap() {
       return dirToTaskMap;
     }
 
     public void setDirToTaskMap(
-        HashMap<String, Task<? extends Serializable>> dirToTaskMap) {
+        HashMap<Path, Task<? extends Serializable>> dirToTaskMap) {
       this.dirToTaskMap = dirToTaskMap;
     }
 
@@ -94,16 +94,14 @@ public class ConditionalResolverSkewJoin
     ConditionalResolverSkewJoinCtx ctx = (ConditionalResolverSkewJoinCtx) objCtx;
     List<Task<? extends Serializable>> resTsks = new ArrayList<Task<? extends Serializable>>();
 
-    Map<String, Task<? extends Serializable>> dirToTaskMap = ctx
+    Map<Path, Task<? extends Serializable>> dirToTaskMap = ctx
         .getDirToTaskMap();
-    Iterator<Entry<String, Task<? extends Serializable>>> bigKeysPathsIter = dirToTaskMap
+    Iterator<Entry<Path, Task<? extends Serializable>>> bigKeysPathsIter = dirToTaskMap
         .entrySet().iterator();
     try {
       while (bigKeysPathsIter.hasNext()) {
-        Entry<String, Task<? extends Serializable>> entry = bigKeysPathsIter
-            .next();
-        String path = entry.getKey();
-        Path dirPath = new Path(path);
+        Entry<Path, Task<? extends Serializable>> entry = bigKeysPathsIter.next();
+        Path dirPath = entry.getKey();
         FileSystem inpFs = dirPath.getFileSystem(conf);
         FileStatus[] fstatus = Utilities.listStatusIfExists(dirPath, inpFs);
         if (fstatus != null && fstatus.length > 0) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicPartitionCtx.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicPartitionCtx.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicPartitionCtx.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicPartitionCtx.java Tue Jan 28 05:48:03 2014
@@ -24,6 +24,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -39,7 +40,7 @@ public class DynamicPartitionCtx impleme
   private int numDPCols;   // number of dynamic partition columns
   private int numSPCols;   // number of static partition columns
   private String spPath;   // path name corresponding to SP columns
-  private String rootPath; // the root path DP columns paths start from
+  private Path rootPath; // the root path DP columns paths start from
   private int numBuckets;  // number of buckets in each partition
 
   private Map<String, String> inputToDPCols; // mapping from input column names to DP columns
@@ -128,11 +129,11 @@ public class DynamicPartitionCtx impleme
     return this.numBuckets;
   }
 
-  public void setRootPath(String root) {
+  public void setRootPath(Path root) {
     this.rootPath = root;
   }
 
-  public String getRootPath() {
+  public Path getRootPath() {
     return this.rootPath;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java Tue Jan 28 05:48:03 2014
@@ -22,10 +22,18 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.util.ReflectionUtils;
 
 public class ExprNodeDescUtils {
 
@@ -244,4 +252,70 @@ public class ExprNodeDescUtils {
     }
     throw new SemanticException("Met multiple parent operators");
   }
+
+  public static ExprNodeDesc[] extractComparePair(ExprNodeDesc expr1, ExprNodeDesc expr2) {
+    expr1 = extractConstant(expr1);
+    expr2 = extractConstant(expr2);
+    if (expr1 instanceof ExprNodeColumnDesc && expr2 instanceof ExprNodeConstantDesc) {
+      return new ExprNodeDesc[] {expr1, expr2};
+    }
+    if (expr1 instanceof ExprNodeConstantDesc && expr2 instanceof ExprNodeColumnDesc) {
+      return new ExprNodeDesc[] {expr2, expr1, null}; // add null as a marker (inverted order)
+    }
+    // todo: constant op constant
+    return null;
+  }
+
+  // from IndexPredicateAnalyzer
+  private static ExprNodeDesc extractConstant(ExprNodeDesc expr) {
+    if (!(expr instanceof ExprNodeGenericFuncDesc)) {
+      return expr;
+    }
+    ExprNodeConstantDesc folded = foldConstant(((ExprNodeGenericFuncDesc) expr));
+    return folded == null ? expr : folded;
+  }
+
+  private static ExprNodeConstantDesc foldConstant(ExprNodeGenericFuncDesc func) {
+    GenericUDF udf = func.getGenericUDF();
+    if (!FunctionRegistry.isDeterministic(udf) || FunctionRegistry.isStateful(udf)) {
+      return null;
+    }
+    try {
+      // If the UDF depends on any external resources, we can't fold because the
+      // resources may not be available at compile time.
+      if (udf instanceof GenericUDFBridge) {
+        UDF internal = ReflectionUtils.newInstance(((GenericUDFBridge) udf).getUdfClass(), null);
+        if (internal.getRequiredFiles() != null || internal.getRequiredJars() != null) {
+          return null;
+        }
+      } else {
+        if (udf.getRequiredFiles() != null || udf.getRequiredJars() != null) {
+          return null;
+        }
+      }
+
+      if (func.getChildren() != null) {
+        for (ExprNodeDesc child : func.getChildren()) {
+          if (child instanceof ExprNodeConstantDesc) {
+            continue;
+          }
+          if (child instanceof ExprNodeGenericFuncDesc) {
+            if (foldConstant((ExprNodeGenericFuncDesc) child) != null) {
+              continue;
+            }
+          }
+          return null;
+        }
+      }
+      ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(func);
+      ObjectInspector output = evaluator.initialize(null);
+
+      Object constant = evaluator.evaluate(null);
+      Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
+
+      return new ExprNodeConstantDesc(java);
+    } catch (Exception e) {
+      return null;
+    }
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java Tue Jan 28 05:48:03 2014
@@ -30,7 +30,7 @@ import org.apache.hadoop.fs.Path;
 @Explain(displayName = "File Output Operator")
 public class FileSinkDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
-  private String dirName;
+  private Path dirName;
   // normally statsKeyPref will be the same as dirName, but the latter
   // could be changed in local execution optimization
   private String statsKeyPref;
@@ -69,7 +69,7 @@ public class FileSinkDesc extends Abstra
   // the sub-queries write to sub-directories of a common directory. So, the file sink
   // descriptors for subq1 and subq2 are linked.
   private boolean linkedFileSink = false;
-  private String parentDir;
+  private Path parentDir;
   transient private List<FileSinkDesc> linkedFileSinkDesc;
 
   private boolean statsReliable;
@@ -81,7 +81,7 @@ public class FileSinkDesc extends Abstra
   public FileSinkDesc() {
   }
 
-  public FileSinkDesc(final String dirName, final TableDesc tableInfo,
+  public FileSinkDesc(final Path dirName, final TableDesc tableInfo,
       final boolean compressed, final int destTableId, final boolean multiFileSpray,
       final boolean canBeMerged, final int numFiles, final int totalFiles,
       final ArrayList<ExprNodeDesc> partitionCols, final DynamicPartitionCtx dpCtx) {
@@ -98,7 +98,7 @@ public class FileSinkDesc extends Abstra
     this.dpCtx = dpCtx;
   }
 
-  public FileSinkDesc(final String dirName, final TableDesc tableInfo,
+  public FileSinkDesc(final Path dirName, final TableDesc tableInfo,
       final boolean compressed) {
 
     this.dirName = dirName;
@@ -132,15 +132,15 @@ public class FileSinkDesc extends Abstra
   }
 
   @Explain(displayName = "directory", normalExplain = false)
-  public String getDirName() {
+  public Path getDirName() {
     return dirName;
   }
 
-  public void setDirName(final String dirName) {
+  public void setDirName(final Path dirName) {
     this.dirName = dirName;
   }
 
-  public String getFinalDirName() {
+  public Path getFinalDirName() {
     return linkedFileSink ? parentDir : dirName;
   }
 
@@ -320,11 +320,11 @@ public class FileSinkDesc extends Abstra
     this.linkedFileSink = linkedFileSink;
   }
 
-  public String getParentDir() {
+  public Path getParentDir() {
     return parentDir;
   }
 
-  public void setParentDir(String parentDir) {
+  public void setParentDir(Path parentDir) {
     this.parentDir = parentDir;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableSinkDesc.java Tue Jan 28 05:48:03 2014
@@ -20,15 +20,15 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
+import org.apache.hadoop.fs.Path;
+
 /**
  * Map Join operator Descriptor implementation.
  *
@@ -41,8 +41,8 @@ public class HashTableSinkDesc extends J
   // used to handle skew join
   private boolean handleSkewJoin = false;
   private int skewKeyDefinition = -1;
-  private Map<Byte, String> bigKeysDirMap;
-  private Map<Byte, Map<Byte, String>> smallKeysDirMap;
+  private Map<Byte, Path> bigKeysDirMap;
+  private Map<Byte, Map<Byte, Path>> smallKeysDirMap;
   private Map<Byte, TableDesc> skewKeysValuesTables;
 
   // alias to key mapping
@@ -173,22 +173,22 @@ public class HashTableSinkDesc extends J
   }
 
   @Override
-  public Map<Byte, String> getBigKeysDirMap() {
+  public Map<Byte, Path> getBigKeysDirMap() {
     return bigKeysDirMap;
   }
 
   @Override
-  public void setBigKeysDirMap(Map<Byte, String> bigKeysDirMap) {
+  public void setBigKeysDirMap(Map<Byte, Path> bigKeysDirMap) {
     this.bigKeysDirMap = bigKeysDirMap;
   }
 
   @Override
-  public Map<Byte, Map<Byte, String>> getSmallKeysDirMap() {
+  public Map<Byte, Map<Byte, Path>> getSmallKeysDirMap() {
     return smallKeysDirMap;
   }
 
   @Override
-  public void setSmallKeysDirMap(Map<Byte, Map<Byte, String>> smallKeysDirMap) {
+  public void setSmallKeysDirMap(Map<Byte, Map<Byte, Path>> smallKeysDirMap) {
     this.smallKeysDirMap = smallKeysDirMap;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Tue Jan 28 05:48:03 2014
@@ -86,6 +86,7 @@ public enum HiveOperation {
   SHOW_GRANT("SHOW_GRANT", null, null),
   GRANT_ROLE("GRANT_ROLE", null, null),
   REVOKE_ROLE("REVOKE_ROLE", null, null),
+  SHOW_ROLES("SHOW_ROLES", null, null),
   SHOW_ROLE_GRANT("SHOW_ROLE_GRANT", null, null),
   ALTERTABLE_PROTECTMODE("ALTERTABLE_PROTECTMODE", new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERPARTITION_PROTECTMODE("ALTERPARTITION_PROTECTMODE", new Privilege[]{Privilege.ALTER_METADATA}, null),
@@ -105,6 +106,7 @@ public enum HiveOperation {
   ALTERTABLE_SKEWED("ALTERTABLE_SKEWED", new Privilege[] {Privilege.ALTER_METADATA}, null),
   ALTERTBLPART_SKEWED_LOCATION("ALTERTBLPART_SKEWED_LOCATION",
       new Privilege[] {Privilege.ALTER_DATA}, null),
+  ALTERTABLE_PARTCOLTYPE("ALTERTABLE_PARTCOLTYPE", new Privilege[] { Privilege.SELECT }, new Privilege[] { Privilege.ALTER_DATA }),
   ALTERVIEW_RENAME("ALTERVIEW_RENAME", new Privilege[] {Privilege.ALTER_METADATA}, null),
   ;
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java Tue Jan 28 05:48:03 2014
@@ -26,6 +26,8 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.fs.Path;
+
 
 /**
  * Join operator Descriptor implementation.
@@ -44,8 +46,8 @@ public class JoinDesc extends AbstractOp
   // used to handle skew join
   private boolean handleSkewJoin = false;
   private int skewKeyDefinition = -1;
-  private Map<Byte, String> bigKeysDirMap;
-  private Map<Byte, Map<Byte, String>> smallKeysDirMap;
+  private Map<Byte, Path> bigKeysDirMap;
+  private Map<Byte, Map<Byte, Path>> smallKeysDirMap;
   private Map<Byte, TableDesc> skewKeysValuesTables;
 
   // alias to key mapping
@@ -128,12 +130,12 @@ public class JoinDesc extends AbstractOp
     }
 
     if (getBigKeysDirMap() != null) {
-      Map<Byte, String> cloneBigKeysDirMap = new HashMap<Byte, String>();
+      Map<Byte, Path> cloneBigKeysDirMap = new HashMap<Byte, Path>();
       cloneBigKeysDirMap.putAll(getBigKeysDirMap());
       ret.setBigKeysDirMap(cloneBigKeysDirMap);
     }
     if (getSmallKeysDirMap() != null) {
-      Map<Byte, Map<Byte, String>> cloneSmallKeysDirMap = new HashMap<Byte, Map<Byte,String>> ();
+      Map<Byte, Map<Byte, Path>> cloneSmallKeysDirMap = new HashMap<Byte, Map<Byte,Path>> ();
       cloneSmallKeysDirMap.putAll(getSmallKeysDirMap());
       ret.setSmallKeysDirMap(cloneSmallKeysDirMap);
     }
@@ -364,7 +366,7 @@ public class JoinDesc extends AbstractOp
   /**
    * @return mapping from tbl to dir for big keys.
    */
-  public Map<Byte, String> getBigKeysDirMap() {
+  public Map<Byte, Path> getBigKeysDirMap() {
     return bigKeysDirMap;
   }
 
@@ -373,14 +375,14 @@ public class JoinDesc extends AbstractOp
    *
    * @param bigKeysDirMap
    */
-  public void setBigKeysDirMap(Map<Byte, String> bigKeysDirMap) {
+  public void setBigKeysDirMap(Map<Byte, Path> bigKeysDirMap) {
     this.bigKeysDirMap = bigKeysDirMap;
   }
 
   /**
    * @return mapping from tbl to dir for small keys
    */
-  public Map<Byte, Map<Byte, String>> getSmallKeysDirMap() {
+  public Map<Byte, Map<Byte, Path>> getSmallKeysDirMap() {
     return smallKeysDirMap;
   }
 
@@ -389,7 +391,7 @@ public class JoinDesc extends AbstractOp
    *
    * @param smallKeysDirMap
    */
-  public void setSmallKeysDirMap(Map<Byte, Map<Byte, String>> smallKeysDirMap) {
+  public void setSmallKeysDirMap(Map<Byte, Map<Byte, Path>> smallKeysDirMap) {
     this.smallKeysDirMap = smallKeysDirMap;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java Tue Jan 28 05:48:03 2014
@@ -83,7 +83,7 @@ public class MapWork extends BaseWork {
       new HashMap<String, List<SortCol>>();
 
   private MapredLocalWork mapLocalWork;
-  private String tmpHDFSFileURI;
+  private Path tmpHDFSPath;
 
   private String inputformat;
 
@@ -432,12 +432,12 @@ public class MapWork extends BaseWork {
     this.opParseCtxMap = opParseCtxMap;
   }
 
-  public String getTmpHDFSFileURI() {
-    return tmpHDFSFileURI;
+  public Path getTmpHDFSPath() {
+    return tmpHDFSPath;
   }
 
-  public void setTmpHDFSFileURI(String tmpHDFSFileURI) {
-    this.tmpHDFSFileURI = tmpHDFSFileURI;
+  public void setTmpHDFSPath(Path tmpHDFSPath) {
+    this.tmpHDFSPath = tmpHDFSPath;
   }
 
   public void mergingInto(MapWork mapWork) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java Tue Jan 28 05:48:03 2014
@@ -37,7 +37,7 @@ public class MapredLocalWork implements 
   private LinkedHashMap<String, FetchWork> aliasToFetchWork;
   private boolean inputFileChangeSensitive;
   private BucketMapJoinContext bucketMapjoinContext;
-  private String tmpFileURI;
+  private Path tmpPath;
   private String stageID;
 
   private List<Operator<? extends OperatorDesc>> dummyParentOp ;
@@ -55,7 +55,7 @@ public class MapredLocalWork implements 
   }
 
   public MapredLocalWork(MapredLocalWork clone){
-    this.tmpFileURI = clone.tmpFileURI;
+    this.tmpPath = clone.tmpPath;
     this.inputFileChangeSensitive=clone.inputFileChangeSensitive;
 
   }
@@ -151,12 +151,12 @@ public class MapredLocalWork implements 
     return null;
   }
 
-  public void setTmpFileURI(String tmpFileURI) {
-    this.tmpFileURI = tmpFileURI;
+  public void setTmpPath(Path tmpPath) {
+    this.tmpPath = tmpPath;
   }
 
-  public String getTmpFileURI() {
-    return tmpFileURI;
+  public Path getTmpPath() {
+    return tmpPath;
   }
 
   public String getBucketFileName(String bigFileName) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Tue Jan 28 05:48:03 2014
@@ -85,7 +85,7 @@ public final class PlanUtils {
     FIELD, JEXL
   };
 
-  public static long getCountForMapJoinDumpFilePrefix() {
+  public static synchronized long getCountForMapJoinDumpFilePrefix() {
     return countForMapJoinDumpFilePrefix++;
   }
 
@@ -103,46 +103,51 @@ public final class PlanUtils {
 
   public static TableDesc getDefaultTableDesc(CreateTableDesc localDirectoryDesc,
       String cols, String colTypes ) {
-    TableDesc tableDesc = getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), cols,
+    TableDesc ret = getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), cols,
         colTypes, false);;
     if (localDirectoryDesc == null) {
-      return tableDesc;
+      return ret;
     }
 
     try {
+      Properties properties = ret.getProperties();
+
       if (localDirectoryDesc.getFieldDelim() != null) {
-        tableDesc.getProperties().setProperty(
+        properties.setProperty(
             serdeConstants.FIELD_DELIM, localDirectoryDesc.getFieldDelim());
-        tableDesc.getProperties().setProperty(
+        properties.setProperty(
             serdeConstants.SERIALIZATION_FORMAT, localDirectoryDesc.getFieldDelim());
       }
       if (localDirectoryDesc.getLineDelim() != null) {
-        tableDesc.getProperties().setProperty(
+        properties.setProperty(
             serdeConstants.LINE_DELIM, localDirectoryDesc.getLineDelim());
       }
       if (localDirectoryDesc.getCollItemDelim() != null) {
-        tableDesc.getProperties().setProperty(
+        properties.setProperty(
             serdeConstants.COLLECTION_DELIM, localDirectoryDesc.getCollItemDelim());
       }
       if (localDirectoryDesc.getMapKeyDelim() != null) {
-        tableDesc.getProperties().setProperty(
+        properties.setProperty(
             serdeConstants.MAPKEY_DELIM, localDirectoryDesc.getMapKeyDelim());
       }
       if (localDirectoryDesc.getFieldEscape() !=null) {
-        tableDesc.getProperties().setProperty(
+        properties.setProperty(
             serdeConstants.ESCAPE_CHAR, localDirectoryDesc.getFieldEscape());
       }
       if (localDirectoryDesc.getSerName() != null) {
-        tableDesc.getProperties().setProperty(
+        properties.setProperty(
             serdeConstants.SERIALIZATION_LIB, localDirectoryDesc.getSerName());
       }
       if (localDirectoryDesc.getOutputFormat() != null){
-          tableDesc.setOutputFileFormatClass(Class.forName(localDirectoryDesc.getOutputFormat()));
+          ret.setOutputFileFormatClass(Class.forName(localDirectoryDesc.getOutputFormat()));
       }
       if (localDirectoryDesc.getNullFormat() != null) {
-        tableDesc.getProperties().setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT,
+        properties.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT,
               localDirectoryDesc.getNullFormat());
       }
+      if (localDirectoryDesc.getTblProps() != null) {
+        properties.putAll(localDirectoryDesc.getTblProps());
+      }
 
     } catch (ClassNotFoundException e) {
       // mimicking behaviour in CreateTableDesc tableDesc creation
@@ -150,7 +155,7 @@ public final class PlanUtils {
       e.printStackTrace();
       return null;
     }
-    return tableDesc;
+    return ret;
   }
 
   /**
@@ -245,7 +250,7 @@ public final class PlanUtils {
     }
 
     // It is not a very clean way, and should be modified later - due to
-    // compatiblity reasons,
+    // compatibility reasons,
     // user sees the results as json for custom scripts and has no way for
     // specifying that.
     // Right now, it is hard-coded in the code
@@ -340,6 +345,10 @@ public final class PlanUtils {
             crtTblDesc.getDatabaseName() + "." + crtTblDesc.getTableName());
       }
 
+      if (crtTblDesc.getTblProps() != null) {
+        properties.putAll(crtTblDesc.getTblProps());
+      }
+
       // replace the default input & output file format with those found in
       // crtTblDesc
       Class c1 = Class.forName(crtTblDesc.getInputFormat());
@@ -465,7 +474,7 @@ public final class PlanUtils {
   /**
    * Convert the ColumnList to FieldSchema list.
    *
-   * Adds uniontype for distinctColIndices.
+   * Adds union type for distinctColIndices.
    */
   public static List<FieldSchema> getFieldSchemasFromColumnListWithLength(
       List<ExprNodeDesc> cols, List<List<Integer>> distinctColIndices,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java Tue Jan 28 05:48:03 2014
@@ -49,7 +49,7 @@ public class RoleDDLDesc extends DDLDesc
   }
 
   public static enum RoleOperation {
-    DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_roles");
+    DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"), SHOW_ROLES("show_roles");
     private String operationName;
 
     private RoleOperation() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java Tue Jan 28 05:48:03 2014
@@ -50,6 +50,9 @@ public class StatsWork implements Serial
 
   private boolean isPartialScanAnalyzeCommand = false;
 
+  // sourceTask for TS is not changed (currently) but that of FS might be changed
+  // by various optimizers (auto.convert.join, for example)
+  // so this is set by DriverContext in runtime
   private transient Task sourceTask;
 
   public StatsWork() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java Tue Jan 28 05:48:03 2014
@@ -99,6 +99,7 @@ public class SetProcessor implements Com
     }
   }
 
+  @Override
   public void init() {
   }
 
@@ -181,18 +182,18 @@ public class SetProcessor implements Com
   }
 
 
-  private CommandProcessorResponse getVariable(String varname){
+  private CommandProcessorResponse getVariable(String varname) {
     SessionState ss = SessionState.get();
     if (varname.equals("silent")){
       ss.out.println("silent" + "=" + ss.getIsSilent());
-      return new CommandProcessorResponse(0);
+      return createProcessorSuccessResponse();
     }
     if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){
       String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length());
       String result = System.getProperty(propName);
       if (result != null){
         ss.out.println(SetProcessor.SYSTEM_PREFIX+propName + "=" + result);
-        return new CommandProcessorResponse(0);
+        return createProcessorSuccessResponse();
       } else {
         ss.out.println( propName + " is undefined as a system property");
         return new CommandProcessorResponse(1);
@@ -201,7 +202,7 @@ public class SetProcessor implements Com
       String var = varname.substring(ENV_PREFIX.length());
       if (System.getenv(var)!=null){
         ss.out.println(SetProcessor.ENV_PREFIX+var + "=" + System.getenv(var));
-        return new CommandProcessorResponse(0);
+        return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as an environmental variable");
         return new CommandProcessorResponse(1);
@@ -210,7 +211,7 @@ public class SetProcessor implements Com
       String var = varname.substring(SetProcessor.HIVECONF_PREFIX.length());
       if (ss.getConf().get(var)!=null){
         ss.out.println(SetProcessor.HIVECONF_PREFIX+var + "=" + ss.getConf().get(var));
-        return new CommandProcessorResponse(0);
+        return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as a hive configuration variable");
         return new CommandProcessorResponse(1);
@@ -219,30 +220,34 @@ public class SetProcessor implements Com
       String var = varname.substring(SetProcessor.HIVEVAR_PREFIX.length());
       if (ss.getHiveVariables().get(var)!=null){
         ss.out.println(SetProcessor.HIVEVAR_PREFIX+var + "=" + ss.getHiveVariables().get(var));
-        return new CommandProcessorResponse(0);
+        return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as a hive variable");
         return new CommandProcessorResponse(1);
       }
     } else {
       dumpOption(varname);
-      return new CommandProcessorResponse(0, null, null, getSchema());
+      return createProcessorSuccessResponse();
     }
   }
 
+  private CommandProcessorResponse createProcessorSuccessResponse() {
+    return new CommandProcessorResponse(0, null, null, getSchema());
+  }
+
+  @Override
   public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
-    Schema sch = getSchema();
 
     String nwcmd = command.trim();
     if (nwcmd.equals("")) {
       dumpOptions(ss.getConf().getChangedProperties());
-      return new CommandProcessorResponse(0, null, null, sch);
+      return createProcessorSuccessResponse();
     }
 
     if (nwcmd.equals("-v")) {
       dumpOptions(ss.getConf().getAllProperties());
-      return new CommandProcessorResponse(0, null, null, sch);
+      return createProcessorSuccessResponse();
     }
 
     String[] part = new String[2];

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java Tue Jan 28 05:48:03 2014
@@ -174,7 +174,7 @@ public class StorageBasedAuthorizationPr
     if ((part == null) || (part.getLocation() == null)) {
       authorize(table, readRequiredPriv, writeRequiredPriv);
     } else {
-      authorize(part.getPartitionPath(), readRequiredPriv, writeRequiredPriv);
+      authorize(part.getDataLocation(), readRequiredPriv, writeRequiredPriv);
     }
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Tue Jan 28 05:48:03 2014
@@ -54,7 +54,11 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl;
 import org.apache.hadoop.hive.ql.util.DosToUnix;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -116,8 +120,14 @@ public class SessionState {
    */
   private HiveOperation commandType;
 
+  private String lastCommand;
+
   private HiveAuthorizationProvider authorizer;
 
+  private HiveAuthorizer authorizerV2;
+
+  public enum AuthorizationMode{V1, V2};
+
   private HiveAuthenticationProvider authenticator;
 
   private CreateTableAutomaticGrant createTableGrants;
@@ -297,15 +307,33 @@ public class SessionState {
       // that would cause ClassNoFoundException otherwise
       throw new RuntimeException(e);
     }
+    setupAuth(startSs);
+    return startSs;
+  }
 
+  /**
+   * Setup authentication and authorization plugins for this session.
+   * @param startSs
+   */
+  private static void setupAuth(SessionState startSs) {
     try {
       startSs.authenticator = HiveUtils.getAuthenticator(
           startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
       startSs.authorizer = HiveUtils.getAuthorizeProviderManager(
           startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
-          startSs.authenticator);
-      startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs
-          .getConf());
+          startSs.authenticator, true);
+
+      if(startSs.authorizer == null){
+        //if it was null, the new authorization plugin must be specified in config
+        HiveAuthorizerFactory authorizerFactory =
+            HiveUtils.getAuthorizerFactory(startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
+        startSs.authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(),
+            startSs.getConf(), startSs.authenticator.getUserName());
+      }
+      else{
+        startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs
+            .getConf());
+      }
     } catch (HiveException e) {
       throw new RuntimeException(e);
     }
@@ -323,8 +351,7 @@ public class SessionState {
     } else {
        LOG.info("No Tez session required at this point. hive.execution.engine=mr.");
     }
-
-    return startSs;
+    return;
   }
 
   /**
@@ -379,6 +406,14 @@ public class SessionState {
     return UUID.randomUUID().toString();
   }
 
+  public String getLastCommand() {
+    return lastCommand;
+  }
+
+  public void setLastCommand(String lastCommand) {
+    this.lastCommand = lastCommand;
+  }
+
   /**
    * This class provides helper routines to emit informational and error
    * messages to the user and log4j files while obeying the current session's
@@ -749,6 +784,10 @@ public class SessionState {
     this.authorizer = authorizer;
   }
 
+  public HiveAuthorizer getAuthorizerV2() {
+    return authorizerV2;
+  }
+
   public HiveAuthenticationProvider getAuthenticator() {
     return authenticator;
   }
@@ -842,6 +881,20 @@ public class SessionState {
     }
   }
 
+  public AuthorizationMode getAuthorizationMode(){
+    if(authorizer != null){
+      return AuthorizationMode.V1;
+    }else if(authorizerV2 != null){
+      return AuthorizationMode.V2;
+    }
+    //should not happen - this should not get called before this.start() is called
+    throw new AssertionError("Authorization plugins not initialized!");
+  }
+
+  public boolean isAuthorizationModeV2(){
+    return getAuthorizationMode() == AuthorizationMode.V2;
+  }
+
   /**
    * @param resetPerfLogger
    * @return  Tries to return an instance of the class whose name is configured in

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java Tue Jan 28 05:48:03 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.util.Reflection
 
 import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
 import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_STATS_KEY_PREFIX_MAX_LENGTH;
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_STATS_KEY_PREFIX_RESERVE_LENGTH;
 
 /**
  * A factory of stats publisher and aggregator implementations of the
@@ -51,6 +52,10 @@ public final class StatsFactory {
       maxPrefixLength = maxPrefixLength < 0 ? groupNameMax :
           Math.min(maxPrefixLength, groupNameMax);
     }
+    if (maxPrefixLength > 0) {
+      int reserve = HiveConf.getIntVar(conf, HIVE_STATS_KEY_PREFIX_RESERVE_LENGTH);
+      return reserve < 0 ? maxPrefixLength : maxPrefixLength - reserve;
+    }
     return maxPrefixLength;
   }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java Tue Jan 28 05:48:03 2014
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.ql.stats;
 
 import java.util.List;
@@ -305,7 +323,7 @@ public class StatsUtils {
   public static List<Long> getFileSizeForPartitions(HiveConf conf, List<Partition> parts) {
     List<Long> sizes = Lists.newArrayList();
     for (Partition part : parts) {
-      Path path = part.getPartitionPath();
+      Path path = part.getDataLocation();
       long size = 0;
       try {
         FileSystem fs = path.getFileSystem(conf);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Tue Jan 28 05:48:03 2014
@@ -187,4 +187,15 @@ public abstract class GenericUDF impleme
    */
   public void close() throws IOException {
   }
+
+  /**
+   * Some functions are affected by appearing order of arguments (comparisons, for example)
+   */
+  public GenericUDF flip() {
+    return this;
+  }
+
+  public String getUdfName() {
+    return getClass().getName();
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java Tue Jan 28 05:48:03 2014
@@ -87,7 +87,7 @@ public class GenericUDFBridge extends Ge
    *
    * @param udfName
    *          The name of the corresponding udf.
-   * @param isOperator
+   * @param isOperator true for operators
    * @param udfClassName java class name of UDF
    */
   public GenericUDFBridge(String udfName, boolean isOperator,
@@ -105,6 +105,7 @@ public class GenericUDFBridge extends Ge
     this.udfName = udfName;
   }
 
+  @Override
   public String getUdfName() {
     return udfName;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java Tue Jan 28 05:48:03 2014
@@ -52,7 +52,6 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarGreaterEqualStringColumn;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -139,4 +138,9 @@ public class GenericUDFOPEqualOrGreaterT
     return result;
   }
 
+  @Override
+  public GenericUDF flip() {
+    return new GenericUDFOPEqualOrLessThan();
+  }
+
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java Tue Jan 28 05:48:03 2014
@@ -52,7 +52,6 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarLessEqualStringColumn;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -139,5 +138,10 @@ public class GenericUDFOPEqualOrLessThan
     return result;
   }
 
+  @Override
+  public GenericUDF flip() {
+    return new GenericUDFOPEqualOrGreaterThan();
+  }
+
 }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java Tue Jan 28 05:48:03 2014
@@ -52,7 +52,6 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarGreaterStringColumn;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -139,5 +138,10 @@ public class GenericUDFOPGreaterThan ext
     return result;
   }
 
+  @Override
+  public GenericUDF flip() {
+    return new GenericUDFOPLessThan();
+  }
+
 }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java Tue Jan 28 05:48:03 2014
@@ -23,7 +23,6 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -110,4 +109,9 @@ public class GenericUDFOPLessThan extend
     return result;
   }
 
+  @Override
+  public GenericUDF flip() {
+    return new GenericUDFOPGreaterThan();
+  }
+
 }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Tue Jan 28 05:48:03 2014
@@ -196,8 +196,8 @@ public class TestExecDriver extends Test
   @SuppressWarnings("unchecked")
   private void populateMapPlan1(Table src) {
 
-    Operator<FileSinkDesc> op2 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapplan1.out", Utilities.defaultTd, true));
+    Operator<FileSinkDesc> op2 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapplan1.out"), Utilities.defaultTd, true));
     Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
         op2);
 
@@ -207,8 +207,8 @@ public class TestExecDriver extends Test
   @SuppressWarnings("unchecked")
   private void populateMapPlan2(Table src) {
 
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapplan2.out", Utilities.defaultTd, false));
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapplan2.out"), Utilities.defaultTd, false));
 
     Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("cat",
         PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
@@ -243,8 +243,8 @@ public class TestExecDriver extends Test
     mr.setReduceWork(rWork);
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapredplan1.out", Utilities.defaultTd, false));
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapredplan1.out"), Utilities.defaultTd, false));
 
     Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
         getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
@@ -273,8 +273,8 @@ public class TestExecDriver extends Test
     mr.setReduceWork(rWork);
 
     // reduce side work
-    Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapredplan2.out", Utilities.defaultTd, false));
+    Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapredplan2.out"), Utilities.defaultTd, false));
 
     Operator<FilterDesc> op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
 
@@ -317,8 +317,8 @@ public class TestExecDriver extends Test
     rWork.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapredplan3.out", Utilities.defaultTd, false));
+    Operator<FileSinkDesc> op4 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapredplan3.out"), Utilities.defaultTd, false));
 
     Operator<SelectDesc> op5 = OperatorFactory.get(new SelectDesc(Utilities
         .makeList(new ExprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
@@ -360,8 +360,8 @@ public class TestExecDriver extends Test
     mr.setReduceWork(rWork);
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapredplan4.out", Utilities.defaultTd, false));
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapredplan4.out"), Utilities.defaultTd, false));
 
     Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
         getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
@@ -399,8 +399,8 @@ public class TestExecDriver extends Test
     rWork.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapredplan5.out", Utilities.defaultTd, false));
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapredplan5.out"), Utilities.defaultTd, false));
 
     Operator<ExtractDesc> op2 = OperatorFactory.get(new ExtractDesc(
         getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
@@ -440,8 +440,8 @@ public class TestExecDriver extends Test
     rWork.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
 
     // reduce side work
-    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(tmpdir + File.separator
-        + "mapredplan6.out", Utilities.defaultTd, false));
+    Operator<FileSinkDesc> op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator
+        + "mapredplan6.out"), Utilities.defaultTd, false));
 
     Operator<FilterDesc> op2 = OperatorFactory.get(getTestFilterDesc("0"), op3);
 

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Tue Jan 28 05:48:03 2014
@@ -25,6 +25,7 @@ import java.util.LinkedHashMap;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -89,8 +90,8 @@ public class TestPlan extends TestCase {
       // store into configuration
 
       job.set("fs.default.name", "file:///");
-      Utilities.setMapRedWork(job, mrwork, System.getProperty("java.io.tmpdir") + File.separator +
-        System.getProperty("user.name") + File.separator + "hive");
+      Utilities.setMapRedWork(job, mrwork, new Path(System.getProperty("java.io.tmpdir") + File.separator +
+        System.getProperty("user.name") + File.separator + "hive"));
       MapredWork mrwork2 = Utilities.getMapRedWork(job);
       Utilities.clearWork(job);
 

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java?rev=1561947&r1=1561946&r2=1561947&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java Tue Jan 28 05:48:03 2014
@@ -26,10 +26,13 @@ import java.util.List;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.JobConf;
@@ -74,4 +77,33 @@ public class TestUtilities extends TestC
     assertEquals(desc.getExprString(), Utilities.deserializeExpression(
       Utilities.serializeExpression(desc)).getExprString());
   }
+
+  public void testgetDbTableName() throws HiveException{
+    String tablename;
+    String [] dbtab;
+    SessionState.start(new HiveConf(this.getClass()));
+    String curDefaultdb = SessionState.get().getCurrentDatabase();
+
+    //test table without db portion
+    tablename = "tab1";
+    dbtab = Utilities.getDbTableName(tablename);
+    assertEquals("db name", curDefaultdb, dbtab[0]);
+    assertEquals("table name", tablename, dbtab[1]);
+
+    //test table with db portion
+    tablename = "dab1.tab1";
+    dbtab = Utilities.getDbTableName(tablename);
+    assertEquals("db name", "dab1", dbtab[0]);
+    assertEquals("table name", "tab1", dbtab[1]);
+
+    //test invalid table name
+    tablename = "dab1.tab1.x1";
+    try {
+      dbtab = Utilities.getDbTableName(tablename);
+      fail("exception was expected for invalid table name");
+    } catch(HiveException ex){
+      assertEquals("Invalid table name " + tablename, ex.getMessage());
+    }
+  }
+
 }