You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/02 21:57:07 UTC

svn commit: r1622108 [17/27] - in /hive/branches/tez: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ checkstyle/ common/src/java/...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java Tue Sep  2 19:56:56 2014
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.securi
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.security.AccessControlException;
-import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
 
@@ -34,12 +34,9 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -48,7 +45,6 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.shims.ShimLoader;
 
 /**
  * StorageBasedAuthorizationProvider is an implementation of
@@ -141,28 +137,77 @@ public class StorageBasedAuthorizationPr
   public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
       throws HiveException, AuthorizationException {
     Path path = getDbLocation(db);
+
+    // extract drop privileges
+    DropPrivilegeExtractor privExtractor = new DropPrivilegeExtractor(readRequiredPriv,
+        writeRequiredPriv);
+    readRequiredPriv = privExtractor.getReadReqPriv();
+    writeRequiredPriv = privExtractor.getWriteReqPriv();
+
+    // authorize drops if there was a drop privilege requirement
+    if(privExtractor.hasDropPrivilege()) {
+      checkDeletePermission(path, getConf(), authenticator.getUserName());
+    }
+
     authorize(path, readRequiredPriv, writeRequiredPriv);
   }
 
   @Override
   public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
       throws HiveException, AuthorizationException {
-
-    // To create/drop/alter a table, the owner should have WRITE permission on the database directory
-    authorize(hive_db.getDatabase(table.getDbName()), readRequiredPriv, writeRequiredPriv);
-
-    // If the user has specified a location - external or not, check if the user has the
     try {
       initWh();
-      String location = table.getTTable().getSd().getLocation();
-      if (location != null && !location.isEmpty()) {
-        authorize(new Path(location), readRequiredPriv, writeRequiredPriv);
-      }
     } catch (MetaException ex) {
       throw hiveException(ex);
     }
+
+    // extract any drop privileges out of required privileges
+    DropPrivilegeExtractor privExtractor = new DropPrivilegeExtractor(readRequiredPriv,
+        writeRequiredPriv);
+    readRequiredPriv = privExtractor.getReadReqPriv();
+    writeRequiredPriv = privExtractor.getWriteReqPriv();
+
+    // if CREATE or DROP priv requirement is there, the owner should have WRITE permission on
+    // the database directory
+    if (privExtractor.hasDropPrivilege || requireCreatePrivilege(readRequiredPriv)
+        || requireCreatePrivilege(writeRequiredPriv)) {
+      authorize(hive_db.getDatabase(table.getDbName()), new Privilege[] {},
+          new Privilege[] { Privilege.ALTER_DATA });
+    }
+
+    Path path = table.getDataLocation();
+    // authorize drops if there was a drop privilege requirement, and
+    // table is not external (external table data is not dropped)
+    if (privExtractor.hasDropPrivilege() && table.getTableType() != TableType.EXTERNAL_TABLE) {
+      checkDeletePermission(path, getConf(), authenticator.getUserName());
+    }
+
+    // If the user has specified a location - external or not, check if the user
+    // has the permissions on the table dir
+    if (path != null) {
+      authorize(path, readRequiredPriv, writeRequiredPriv);
+    }
   }
 
+
+  /**
+   *
+   * @param privs
+   * @return true, if set of given privileges privs contain CREATE privilege
+   */
+  private boolean requireCreatePrivilege(Privilege[] privs) {
+    if(privs == null) {
+      return false;
+    }
+    for (Privilege priv : privs) {
+      if (priv.equals(Privilege.CREATE)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+
   @Override
   public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
       throws HiveException, AuthorizationException {
@@ -173,17 +218,39 @@ public class StorageBasedAuthorizationPr
       Privilege[] writeRequiredPriv)
       throws HiveException, AuthorizationException {
 
+    // extract drop privileges
+    DropPrivilegeExtractor privExtractor = new DropPrivilegeExtractor(readRequiredPriv,
+        writeRequiredPriv);
+    readRequiredPriv = privExtractor.getReadReqPriv();
+    writeRequiredPriv = privExtractor.getWriteReqPriv();
+
+    // authorize drops if there was a drop privilege requirement
+    if(privExtractor.hasDropPrivilege()) {
+      checkDeletePermission(part.getDataLocation(), getConf(), authenticator.getUserName());
+    }
+
     // Partition path can be null in the case of a new create partition - in this case,
     // we try to default to checking the permissions of the parent table.
     // Partition itself can also be null, in cases where this gets called as a generic
     // catch-all call in cases like those with CTAS onto an unpartitioned table (see HIVE-1887)
     if ((part == null) || (part.getLocation() == null)) {
-      authorize(table, readRequiredPriv, writeRequiredPriv);
+      // this should be the case only if this is a create partition.
+      // The privilege needed on the table should be ALTER_DATA, and not CREATE
+      authorize(table, new Privilege[]{}, new Privilege[]{Privilege.ALTER_DATA});
     } else {
       authorize(part.getDataLocation(), readRequiredPriv, writeRequiredPriv);
     }
   }
 
+  private void checkDeletePermission(Path dataLocation, Configuration conf, String userName)
+      throws HiveException {
+    try {
+      FileUtils.checkDeletePermission(dataLocation, conf, userName);
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
   @Override
   public void authorize(Table table, Partition part, List<String> columns,
       Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException,
@@ -191,11 +258,7 @@ public class StorageBasedAuthorizationPr
     // In a simple storage-based auth, we have no information about columns
     // living in different files, so we do simple partition-auth and ignore
     // the columns parameter.
-    if ((part != null) && (part.getTable() != null)) {
-      authorize(part.getTable(), part, readRequiredPriv, writeRequiredPriv);
-    } else {
-      authorize(table, part, readRequiredPriv, writeRequiredPriv);
-    }
+    authorize(table, part, readRequiredPriv, writeRequiredPriv);
   }
 
   @Override
@@ -373,4 +436,48 @@ public class StorageBasedAuthorizationPr
     // no-op - SBA does not attempt to authorize auth api call. Allow it
   }
 
+  public class DropPrivilegeExtractor {
+
+    private boolean hasDropPrivilege = false;
+    private final Privilege[] readReqPriv;
+    private final Privilege[] writeReqPriv;
+
+    public DropPrivilegeExtractor(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) {
+      this.readReqPriv = extractDropPriv(readRequiredPriv);
+      this.writeReqPriv = extractDropPriv(writeRequiredPriv);
+    }
+
+    private Privilege[] extractDropPriv(Privilege[] requiredPrivs) {
+      if (requiredPrivs == null) {
+        return null;
+      }
+      List<Privilege> privList = new ArrayList<Privilege>();
+      for (Privilege priv : requiredPrivs) {
+        if (priv.equals(Privilege.DROP)) {
+          hasDropPrivilege = true;
+        } else {
+          privList.add(priv);
+        }
+      }
+      return privList.toArray(new Privilege[0]);
+    }
+
+    public boolean hasDropPrivilege() {
+      return hasDropPrivilege;
+    }
+
+    public void setHasDropPrivilege(boolean hasDropPrivilege) {
+      this.hasDropPrivilege = hasDropPrivilege;
+    }
+
+    public Privilege[] getReadReqPriv() {
+      return readReqPriv;
+    }
+
+    public Privilege[] getWriteReqPriv() {
+      return writeReqPriv;
+    }
+
+  }
+
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java Tue Sep  2 19:56:56 2014
@@ -114,6 +114,7 @@ public enum HiveOperationType {
   ALTERTABLE_SKEWED,
   ALTERTBLPART_SKEWED_LOCATION,
   ALTERVIEW_RENAME,
+  ALTERVIEW_AS,
   ALTERTABLE_COMPACT,
   SHOW_COMPACTIONS,
   SHOW_TRANSACTIONS,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java Tue Sep  2 19:56:56 2014
@@ -50,16 +50,9 @@ public class HivePrincipal implements Co
 
   public HivePrincipal(String name, HivePrincipalType type){
     this.type = type;
-    if (type == HivePrincipalType.ROLE) {
-      // lower case role to make operations on it case insensitive
-      // when the old default authorization gets deprecated, this can move
-      // to ObjectStore code base
-      this.name = name.toLowerCase();
-    } else {
-      this.name = name;
-    }
-
+    this.name = name;
   }
+
   public String getName() {
     return name;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java Tue Sep  2 19:56:56 2014
@@ -246,6 +246,8 @@ public class Operation2Privilege {
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.ALTERVIEW_RENAME, PrivRequirement.newIOPrivRequirement
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERVIEW_AS, PrivRequirement.newIOPrivRequirement
+(OWNER_PRIV_AR, OWNER_PRIV_AR));
     op2Priv.put(HiveOperationType.DROPVIEW, PrivRequirement.newIOPrivRequirement
 (OWNER_PRIV_AR, OWNER_PRIV_AR));
 
@@ -276,8 +278,9 @@ public class Operation2Privilege {
 (SEL_NOGRANT_AR, null));
     op2Priv.put(HiveOperationType.SHOW_TBLPROPERTIES, PrivRequirement.newIOPrivRequirement
 (SEL_NOGRANT_AR, null));
-    op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, PrivRequirement.newIOPrivRequirement
-(SEL_NOGRANT_AR, null));
+    op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, PrivRequirement.newPrivRequirementList(
+        new PrivRequirement(SEL_NOGRANT_AR, IOType.INPUT),
+        new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));
 
     // QUERY,LOAD op can contain an insert & overwrite,
     // require delete privilege if this is an insert-overwrite
@@ -300,8 +303,9 @@ public class Operation2Privilege {
 
     // for now allow only create-view with 'select with grant'
     // the owner will also have select with grant privileges on new view
-    op2Priv.put(HiveOperationType.CREATEVIEW, PrivRequirement.newIOPrivRequirement
-(SEL_GRANT_AR, null));
+    op2Priv.put(HiveOperationType.CREATEVIEW, PrivRequirement.newPrivRequirementList(
+        new PrivRequirement(SEL_GRANT_AR, IOType.INPUT),
+        new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));
 
     op2Priv.put(HiveOperationType.SHOWFUNCTIONS, PrivRequirement.newIOPrivRequirement
 (null, null));

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java Tue Sep  2 19:56:56 2014
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -25,6 +24,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.ListIterator;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
@@ -416,4 +416,43 @@ public class SQLAuthorizationUtils {
     return new HiveAuthzPluginException(prefix + ": " + e.getMessage(), e);
   }
 
+  /**
+   * Validate the principal type, and convert role name to lower case
+   * @param hPrincipal
+   * @return validated principal
+   * @throws HiveAuthzPluginException
+   */
+  public static HivePrincipal getValidatedPrincipal(HivePrincipal hPrincipal)
+      throws HiveAuthzPluginException {
+    if (hPrincipal == null || hPrincipal.getType() == null) {
+      // null principal
+      return hPrincipal;
+    }
+    switch (hPrincipal.getType()) {
+    case USER:
+      return hPrincipal;
+    case ROLE:
+      // lower case role names, for case insensitive behavior
+      return new HivePrincipal(hPrincipal.getName().toLowerCase(), hPrincipal.getType());
+    default:
+      throw new HiveAuthzPluginException("Invalid principal type in principal " + hPrincipal);
+    }
+  }
+
+  /**
+   * Calls getValidatedPrincipal on each principal in list and updates the list
+   * @param hivePrincipals
+   * @return
+   * @return
+   * @throws HiveAuthzPluginException
+   */
+  public static List<HivePrincipal> getValidatedPrincipals(List<HivePrincipal> hivePrincipals)
+      throws HiveAuthzPluginException {
+    ListIterator<HivePrincipal> it = hivePrincipals.listIterator();
+    while(it.hasNext()){
+      it.set(getValidatedPrincipal(it.next()));
+    }
+    return hivePrincipals;
+  }
+
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java Tue Sep  2 19:56:56 2014
@@ -43,17 +43,17 @@ public class SQLStdHiveAuthorizationVali
   private final HiveMetastoreClientFactory metastoreClientFactory;
   private final HiveConf conf;
   private final HiveAuthenticationProvider authenticator;
-  private final SQLStdHiveAccessController privController;
+  private final SQLStdHiveAccessControllerWrapper privController;
   public static final Log LOG = LogFactory.getLog(SQLStdHiveAuthorizationValidator.class);
 
   public SQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator,
-      SQLStdHiveAccessController privController) {
+      SQLStdHiveAccessControllerWrapper privilegeManager) {
 
     this.metastoreClientFactory = metastoreClientFactory;
     this.conf = conf;
     this.authenticator = authenticator;
-    this.privController = privController;
+    this.privController = privilegeManager;
   }
 
   @Override

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java Tue Sep  2 19:56:56 2014
@@ -32,8 +32,8 @@ public class SQLStdHiveAuthorizerFactory
   @Override
   public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
       HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
-    SQLStdHiveAccessController privilegeManager =
-        new SQLStdHiveAccessController(metastoreClientFactory, conf, authenticator, ctx);
+    SQLStdHiveAccessControllerWrapper privilegeManager =
+        new SQLStdHiveAccessControllerWrapper(metastoreClientFactory, conf, authenticator, ctx);
     return new HiveAuthorizerImpl(
         privilegeManager,
         new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Tue Sep  2 19:56:56 2014
@@ -45,6 +45,8 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.ql.MapRedStats;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
@@ -86,6 +88,8 @@ public class SessionState {
   private static final String HDFS_SESSION_PATH_KEY = "_hive.hdfs.session.path";
   private static final String TMP_TABLE_SPACE_KEY = "_hive.tmp_table_space";
   private final Map<String, Map<String, Table>> tempTables = new HashMap<String, Map<String, Table>>();
+  private final Map<String, Map<String, ColumnStatisticsObj>> tempTableColStats =
+      new HashMap<String, Map<String, ColumnStatisticsObj>>();
 
   protected ClassLoader parentLoader;
 
@@ -1145,6 +1149,10 @@ public class SessionState {
     return tempTables;
   }
 
+  public Map<String, Map<String, ColumnStatisticsObj>> getTempTableColStats() {
+    return tempTableColStats;
+  }
+
   /**
    * @return ip address for user running the query
    */

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsAggregator.java Tue Sep  2 19:56:56 2014
@@ -60,8 +60,8 @@ public class CounterStatsAggregator impl
   @Override
   public String aggregateStats(String counterGrpName, String statType) {
     // In case of counters, aggregation is done by JobTracker / MR AM itself
-	// so no need to aggregate, simply return the counter value for requested stat.
-	return String.valueOf(counters.getGroup(counterGrpName).getCounter(statType));
+    // so no need to aggregate, simply return the counter value for requested stat.
+    return String.valueOf(counters.getGroup(counterGrpName).getCounter(statType));
   }
 
   @Override

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java Tue Sep  2 19:56:56 2014
@@ -224,7 +224,7 @@ public class StatsUtils {
           if (aggrStats.getPartsFound() != partNames.size() && colState != State.NONE) {
             LOG.debug("Column stats requested for : " + partNames.size() +" partitions. "
               + "Able to retrieve for " + aggrStats.getPartsFound() + " partitions");
-            stats.updateColumnStatsState(State.PARTIAL);
+            colState = State.PARTIAL;
           }
           stats.setColumnStatsState(colState);
         }
@@ -1166,6 +1166,10 @@ public class StatsUtils {
     return getFullyQualifiedName(dbName, tabName, partName, colName);
   }
 
+  public static String getFullyQualifiedTableName(String dbName, String tabName) {
+    return getFullyQualifiedName(dbName, tabName);
+  }
+
   private static String getFullyQualifiedName(String... names) {
     List<String> nonNullAndEmptyNames = Lists.newArrayList();
     for (String name : names) {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java Tue Sep  2 19:56:56 2014
@@ -27,6 +27,7 @@ import java.sql.SQLRecoverableException;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -46,7 +47,8 @@ public class JDBCStatsAggregator impleme
   private final Log LOG = LogFactory.getLog(this.getClass().getName());
   private int timeout = 30;
   private final String comment = "Hive stats aggregation: " + this.getClass().getName();
-  private int maxRetries, waitWindow;
+  private int maxRetries;
+  private long waitWindow;
   private final Random r;
 
   public JDBCStatsAggregator() {
@@ -57,11 +59,14 @@ public class JDBCStatsAggregator impleme
   @Override
   public boolean connect(Configuration hiveconf, Task sourceTask) {
     this.hiveconf = hiveconf;
-    timeout = HiveConf.getIntVar(hiveconf, HiveConf.ConfVars.HIVE_STATS_JDBC_TIMEOUT);
+    timeout = (int) HiveConf.getTimeVar(
+        hiveconf, HiveConf.ConfVars.HIVE_STATS_JDBC_TIMEOUT, TimeUnit.SECONDS);
     connectionString = HiveConf.getVar(hiveconf, HiveConf.ConfVars.HIVESTATSDBCONNECTIONSTRING);
     String driver = HiveConf.getVar(hiveconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER);
     maxRetries = HiveConf.getIntVar(hiveconf, HiveConf.ConfVars.HIVE_STATS_RETRIES_MAX);
-    waitWindow = HiveConf.getIntVar(hiveconf, HiveConf.ConfVars.HIVE_STATS_RETRIES_WAIT);
+    waitWindow = HiveConf.getTimeVar(
+        hiveconf, HiveConf.ConfVars.HIVE_STATS_RETRIES_WAIT, TimeUnit.MILLISECONDS);
+    this.sourceTask = sourceTask;
 
     try {
       Class.forName(driver).newInstance();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java Tue Sep  2 19:56:56 2014
@@ -30,6 +30,7 @@ import java.sql.Statement;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -48,7 +49,8 @@ public class JDBCStatsPublisher implemen
   private int timeout; // default timeout in sec. for JDBC connection and statements
   // SQL comment that identifies where the SQL statement comes from
   private final String comment = "Hive stats publishing: " + this.getClass().getName();
-  private int maxRetries, waitWindow;
+  private int maxRetries;
+  private long waitWindow;
   private final Random r;
 
   public JDBCStatsPublisher() {
@@ -59,9 +61,11 @@ public class JDBCStatsPublisher implemen
   public boolean connect(Configuration hiveconf) {
     this.hiveconf = hiveconf;
     maxRetries = HiveConf.getIntVar(hiveconf, HiveConf.ConfVars.HIVE_STATS_RETRIES_MAX);
-    waitWindow = HiveConf.getIntVar(hiveconf, HiveConf.ConfVars.HIVE_STATS_RETRIES_WAIT);
+    waitWindow = HiveConf.getTimeVar(
+        hiveconf, HiveConf.ConfVars.HIVE_STATS_RETRIES_WAIT, TimeUnit.MILLISECONDS);
     connectionString = HiveConf.getVar(hiveconf, HiveConf.ConfVars.HIVESTATSDBCONNECTIONSTRING);
-    timeout = HiveConf.getIntVar(hiveconf, HiveConf.ConfVars.HIVE_STATS_JDBC_TIMEOUT);
+    timeout = (int) HiveConf.getTimeVar(
+        hiveconf, HiveConf.ConfVars.HIVE_STATS_JDBC_TIMEOUT, TimeUnit.SECONDS);
     String driver = HiveConf.getVar(hiveconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER);
 
     try {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java Tue Sep  2 19:56:56 2014
@@ -120,7 +120,7 @@ public class CompactorMR {
     job.setBoolean(IS_MAJOR, isMajor);
     job.setBoolean(IS_COMPRESSED, sd.isCompressed());
     job.set(TABLE_PROPS, new StringableMap(t.getParameters()).toString());
-    job.setInt(NUM_BUCKETS, sd.getBucketColsSize());
+    job.setInt(NUM_BUCKETS, sd.getNumBuckets());
     job.set(ValidTxnList.VALID_TXNS_KEY, txns.toString());
     setColumnTypes(job, sd.getCols());
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java Tue Sep  2 19:56:56 2014
@@ -43,6 +43,7 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 import java.util.List;
 import java.util.Set;
+import java.util.concurrent.TimeUnit;
 
 /**
  * A class to initiate compactions.  This will run in a separate thread.
@@ -50,7 +51,6 @@ import java.util.Set;
 public class Initiator extends CompactorThread {
   static final private String CLASS_NAME = Initiator.class.getName();
   static final private Log LOG = LogFactory.getLog(CLASS_NAME);
-  static final private int threadId = 10000;
 
   static final private String NO_COMPACTION = "NO_AUTO_COMPACTION";
 
@@ -63,7 +63,7 @@ public class Initiator extends Compactor
     try {
       recoverFailedCompactions(false);
 
-      int abortedThreashold = HiveConf.getIntVar(conf,
+      int abortedThreshold = HiveConf.getIntVar(conf,
           HiveConf.ConfVars.HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD);
 
       // Make sure we run through the loop once before checking to stop as this makes testing
@@ -77,7 +77,7 @@ public class Initiator extends Compactor
         try {
           ShowCompactResponse currentCompactions = txnHandler.showCompact(new ShowCompactRequest());
           ValidTxnList txns = TxnHandler.createValidTxnList(txnHandler.getOpenTxns());
-          Set<CompactionInfo> potentials = txnHandler.findPotentialCompactions(abortedThreashold);
+          Set<CompactionInfo> potentials = txnHandler.findPotentialCompactions(abortedThreshold);
           LOG.debug("Found " + potentials.size() + " potential compactions, " +
               "checking to see if we should compact any of them");
           for (CompactionInfo ci : potentials) {
@@ -140,13 +140,13 @@ public class Initiator extends Compactor
   public void init(BooleanPointer stop) throws MetaException {
     super.init(stop);
     checkInterval =
-        HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_CHECK_INTERVAL) * 1000;
+        conf.getTimeVar(HiveConf.ConfVars.HIVE_COMPACTOR_CHECK_INTERVAL, TimeUnit.MILLISECONDS) ;
   }
 
   private void recoverFailedCompactions(boolean remoteOnly) throws MetaException {
     if (!remoteOnly) txnHandler.revokeFromLocalWorkers(Worker.hostname());
-    txnHandler.revokeTimedoutWorkers(HiveConf.getLongVar(conf,
-        HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT));
+    txnHandler.revokeTimedoutWorkers(HiveConf.getTimeVar(conf,
+        HiveConf.ConfVars.HIVE_COMPACTOR_WORKER_TIMEOUT, TimeUnit.MILLISECONDS));
   }
 
   // Figure out if there are any currently running compactions on the same table or partition.

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java Tue Sep  2 19:56:56 2014
@@ -34,43 +34,38 @@ import org.apache.hadoop.io.IntWritable;
 
 @WindowFunctionDescription
 (
-		description = @Description(
-								name = "cume_dist",
-								value = "_FUNC_(x) - The CUME_DIST function (defined as the inverse of percentile in some " +
-									"statistical books) computes the position of a specified value relative to a set of values. " +
-									"To compute the CUME_DIST of a value x in a set S of size N, you use the formula: " +
-									"CUME_DIST(x) =  number of values in S coming before " +
-									"   and including x in the specified order/ N"
-								),
-		supportsWindow = false,
-		pivotResult = true,
-		rankingFunction = true,
-		impliesOrder = true
+    description = @Description(
+                name = "cume_dist",
+                value = "_FUNC_(x) - The CUME_DIST function (defined as the inverse of percentile in some " +
+                  "statistical books) computes the position of a specified value relative to a set of values. " +
+                  "To compute the CUME_DIST of a value x in a set S of size N, you use the formula: " +
+                  "CUME_DIST(x) =  number of values in S coming before " +
+                  "   and including x in the specified order/ N"
+                ),
+    supportsWindow = false,
+    pivotResult = true,
+    rankingFunction = true,
+    impliesOrder = true
 )
-public class GenericUDAFCumeDist extends GenericUDAFRank
-{
+public class GenericUDAFCumeDist extends GenericUDAFRank {
 
-	static final Log LOG = LogFactory.getLog(GenericUDAFCumeDist.class.getName());
+  static final Log LOG = LogFactory.getLog(GenericUDAFCumeDist.class.getName());
 
-	@Override
-  protected GenericUDAFAbstractRankEvaluator createEvaluator()
-	{
-		return new GenericUDAFCumeDistEvaluator();
-	}
+  @Override
+  protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+    return new GenericUDAFCumeDistEvaluator();
+  }
 
-  public static class GenericUDAFCumeDistEvaluator extends GenericUDAFAbstractRankEvaluator
-  {
+  public static class GenericUDAFCumeDistEvaluator extends GenericUDAFAbstractRankEvaluator {
     @Override
-    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
-    {
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
       super.init(m, parameters);
       return ObjectInspectorFactory
           .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
     }
 
     @Override
-    public Object terminate(AggregationBuffer agg) throws HiveException
-    {
+    public Object terminate(AggregationBuffer agg) throws HiveException {
       List<IntWritable> ranks = ((RankBuffer) agg).rowNums;
       int ranksSize = ranks.size();
       double ranksSizeDouble = ranksSize;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java Tue Sep  2 19:56:56 2014
@@ -23,41 +23,38 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription;
 
-@WindowFunctionDescription
-(
-		description = @Description(
-								name = "dense_rank",
-								value = "_FUNC_(x) The difference between RANK and DENSE_RANK is that DENSE_RANK leaves no " +
-										"gaps in ranking sequence when there are ties. That is, if you were " +
-										"ranking a competition using DENSE_RANK and had three people tie for " +
-										"second place, you would say that all three were in second place and " +
-										"that the next person came in third."
-								),
-		supportsWindow = false,
-		pivotResult = true,
-		rankingFunction = true,
-		impliesOrder = true
+@WindowFunctionDescription(
+  description = @Description(
+    name = "dense_rank",
+    value = "_FUNC_(x) The difference between RANK and DENSE_RANK is that DENSE_RANK leaves no " +
+            "gaps in ranking sequence when there are ties. That is, if you were " +
+            "ranking a competition using DENSE_RANK and had three people tie for " +
+            "second place, you would say that all three were in second place and " +
+            "that the next person came in third."
+  ),
+  supportsWindow = false,
+  pivotResult = true,
+  rankingFunction = true,
+  impliesOrder = true
 )
-public class GenericUDAFDenseRank extends GenericUDAFRank
-{
-	static final Log LOG = LogFactory.getLog(GenericUDAFDenseRank.class.getName());
-
-	@Override
-  protected GenericUDAFAbstractRankEvaluator createEvaluator()
-	{
-		return new GenericUDAFDenseRankEvaluator();
-	}
-
-	public static class GenericUDAFDenseRankEvaluator extends GenericUDAFRankEvaluator
-	{
-		/*
-		 * Called when the value in the partition has changed. Update the currentRank
-		 */
-		@Override
-    protected void nextRank(RankBuffer rb)
-		{
-			rb.currentRank++;
-		}
-	}
+public class GenericUDAFDenseRank extends GenericUDAFRank {
+
+  static final Log LOG = LogFactory.getLog(GenericUDAFDenseRank.class.getName());
+
+  @Override
+  protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+    return new GenericUDAFDenseRankEvaluator();
+  }
+
+  public static class GenericUDAFDenseRankEvaluator extends GenericUDAFRankEvaluator {
+
+    /*
+     * Called when the value in the partition has changed. Update the currentRank
+     */
+    @Override
+    protected void nextRank(RankBuffer rb) {
+      rb.currentRank++;
+    }
+  }
 }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java Tue Sep  2 19:56:56 2014
@@ -41,147 +41,128 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
-@WindowFunctionDescription
-(
-		description = @Description(
-								name = "first_value",
-								value = "_FUNC_(x)"
-								),
-		supportsWindow = true,
-		pivotResult = false,
-		impliesOrder = true
+@WindowFunctionDescription(
+  description = @Description(
+    name = "first_value",
+    value = "_FUNC_(x)"
+  ),
+  supportsWindow = true,
+  pivotResult = false,
+  impliesOrder = true
 )
-public class GenericUDAFFirstValue extends AbstractGenericUDAFResolver
-{
-	static final Log LOG = LogFactory.getLog(GenericUDAFFirstValue.class.getName());
-
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException
-	{
-		if (parameters.length > 2)
-		{
-			throw new UDFArgumentTypeException(2, "At most 2 arguments expected");
-		}
-		if ( parameters.length > 1 && !parameters[1].equals(TypeInfoFactory.booleanTypeInfo) )
-		{
-			throw new UDFArgumentTypeException(1, "second argument must be a boolean expression");
-		}
-		return createEvaluator();
-	}
-
-	protected GenericUDAFFirstValueEvaluator createEvaluator()
-	{
-		return new GenericUDAFFirstValueEvaluator();
-	}
-
-	static class FirstValueBuffer implements AggregationBuffer
-	{
-		Object val;
-		boolean valSet;
-		boolean firstRow;
-		boolean skipNulls;
-
-		FirstValueBuffer()
-		{
-			init();
-		}
-
-		void init()
-		{
-			val = null;
-			valSet = false;
-			firstRow = true;
-			skipNulls = false;
-		}
-
-	}
-
-	public static class GenericUDAFFirstValueEvaluator extends GenericUDAFEvaluator
-	{
-		ObjectInspector inputOI;
-		ObjectInspector outputOI;
-
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
-		{
-			super.init(m, parameters);
-			if (m != Mode.COMPLETE)
-			{
-				throw new HiveException(
-						"Only COMPLETE mode supported for Rank function");
-			}
-			inputOI = parameters[0];
-			outputOI = ObjectInspectorUtils.getStandardObjectInspector(inputOI, ObjectInspectorCopyOption.WRITABLE);
-			return outputOI;
-		}
-
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException
-		{
-			return new FirstValueBuffer();
-		}
-
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException
-		{
-			((FirstValueBuffer) agg).init();
-		}
-
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException
-		{
-			FirstValueBuffer fb = (FirstValueBuffer) agg;
-
-			if (fb.firstRow )
-			{
-				fb.firstRow = false;
-				if ( parameters.length == 2  )
-				{
-					fb.skipNulls = PrimitiveObjectInspectorUtils.getBoolean(
-							parameters[1],
-							PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
-				}
-			}
-
-			if ( !fb.valSet )
-			{
-				fb.val = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI, ObjectInspectorCopyOption.WRITABLE);
-				if ( !fb.skipNulls || fb.val != null )
-				{
-					fb.valSet = true;
-				}
-			}
-		}
-
-		@Override
-		public Object terminatePartial(AggregationBuffer agg) throws HiveException
-		{
-			throw new HiveException("terminatePartial not supported");
-		}
-
-		@Override
-		public void merge(AggregationBuffer agg, Object partial) throws HiveException
-		{
-			throw new HiveException("merge not supported");
-		}
-
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException
-		{
-			return ((FirstValueBuffer) agg).val;
-		}
-		
+public class GenericUDAFFirstValue extends AbstractGenericUDAFResolver {
+
+  static final Log LOG = LogFactory.getLog(GenericUDAFFirstValue.class.getName());
+
+  @Override
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+    if (parameters.length > 2) {
+      throw new UDFArgumentTypeException(2, "At most 2 arguments expected");
+    }
+    if (parameters.length > 1 && !parameters[1].equals(TypeInfoFactory.booleanTypeInfo)) {
+      throw new UDFArgumentTypeException(1, "second argument must be a boolean expression");
+    }
+    return createEvaluator();
+  }
+
+  protected GenericUDAFFirstValueEvaluator createEvaluator() {
+    return new GenericUDAFFirstValueEvaluator();
+  }
+
+  static class FirstValueBuffer implements AggregationBuffer {
+
+    Object val;
+    boolean valSet;
+    boolean firstRow;
+    boolean skipNulls;
+
+    FirstValueBuffer() {
+      init();
+    }
+
+    void init() {
+      val = null;
+      valSet = false;
+      firstRow = true;
+      skipNulls = false;
+    }
+
+  }
+
+  public static class GenericUDAFFirstValueEvaluator extends GenericUDAFEvaluator {
+
+    ObjectInspector inputOI;
+    ObjectInspector outputOI;
+
+    @Override
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+      super.init(m, parameters);
+      if (m != Mode.COMPLETE) {
+        throw new HiveException("Only COMPLETE mode supported for Rank function");
+      }
+      inputOI = parameters[0];
+      outputOI = ObjectInspectorUtils.getStandardObjectInspector(inputOI,
+        ObjectInspectorCopyOption.WRITABLE);
+      return outputOI;
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new FirstValueBuffer();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      ((FirstValueBuffer) agg).init();
+    }
+
+    @Override
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+      FirstValueBuffer fb = (FirstValueBuffer) agg;
+
+      if (fb.firstRow) {
+        fb.firstRow = false;
+        if (parameters.length == 2) {
+          fb.skipNulls = PrimitiveObjectInspectorUtils.getBoolean(parameters[1],
+            PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
+        }
+      }
+
+      if (!fb.valSet) {
+        fb.val = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI,
+          ObjectInspectorCopyOption.WRITABLE);
+        if (!fb.skipNulls || fb.val != null) {
+          fb.valSet = true;
+        }
+      }
+    }
+
+    @Override
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+      throw new HiveException("terminatePartial not supported");
+    }
+
+    @Override
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+      throw new HiveException("merge not supported");
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException {
+      return ((FirstValueBuffer) agg).val;
+    }
+
     @Override
     public GenericUDAFEvaluator getWindowingEvaluator(WindowFrameDef wFrmDef) {
       BoundaryDef start = wFrmDef.getStart();
       BoundaryDef end = wFrmDef.getEnd();
-      return new FirstValStreamingFixedWindow(this, start.getAmt(),
-          end.getAmt());
+      return new FirstValStreamingFixedWindow(this, start.getAmt(), end.getAmt());
     }
 
-	}
-	
+  }
+
   static class ValIndexPair {
+
     Object val;
     int idx;
 
@@ -191,16 +172,15 @@ public class GenericUDAFFirstValue exten
     }
   }
 
-  static class FirstValStreamingFixedWindow extends
-      GenericUDAFStreamingEvaluator<Object> {
+  static class FirstValStreamingFixedWindow extends GenericUDAFStreamingEvaluator<Object> {
 
     class State extends GenericUDAFStreamingEvaluator<Object>.StreamingState {
+
       private final Deque<ValIndexPair> valueChain;
 
       public State(int numPreceding, int numFollowing, AggregationBuffer buf) {
         super(numPreceding, numFollowing, buf);
-        valueChain = new ArrayDeque<ValIndexPair>(numPreceding + numFollowing
-            + 1);
+        valueChain = new ArrayDeque<ValIndexPair>(numPreceding + numFollowing + 1);
       }
 
       @Override
@@ -222,8 +202,8 @@ public class GenericUDAFFirstValue exten
          */
 
         int wdwSz = numPreceding + numFollowing + 1;
-        return underlying + (underlying * wdwSz) + (underlying * wdwSz)
-            + (3 * JavaDataModel.PRIMITIVES1);
+        return underlying + (underlying * wdwSz) + (underlying * wdwSz) + (3
+                                                                           * JavaDataModel.PRIMITIVES1);
       }
 
       protected void reset() {
@@ -232,8 +212,8 @@ public class GenericUDAFFirstValue exten
       }
     }
 
-    public FirstValStreamingFixedWindow(GenericUDAFEvaluator wrappedEval,
-        int numPreceding, int numFollowing) {
+    public FirstValStreamingFixedWindow(GenericUDAFEvaluator wrappedEval, int numPreceding,
+      int numFollowing) {
       super(wrappedEval, numPreceding, numFollowing);
     }
 
@@ -253,8 +233,7 @@ public class GenericUDAFFirstValue exten
     }
 
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters)
-        throws HiveException {
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
 
       State s = (State) agg;
       FirstValueBuffer fb = (FirstValueBuffer) s.wrappedBuf;
@@ -266,15 +245,14 @@ public class GenericUDAFFirstValue exten
         wrappedEval.iterate(fb, parameters);
       }
 
-      Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0],
-          inputOI(), ObjectInspectorCopyOption.WRITABLE);
+      Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI(),
+        ObjectInspectorCopyOption.WRITABLE);
 
       /*
        * add row to chain. except in case of UNB preceding: - only 1 firstVal
        * needs to be tracked.
        */
-      if (s.numPreceding != BoundarySpec.UNBOUNDED_AMOUNT
-          || s.valueChain.isEmpty()) {
+      if (s.numPreceding != BoundarySpec.UNBOUNDED_AMOUNT || s.valueChain.isEmpty()) {
         /*
          * add value to chain if it is not null or if skipNulls is false.
          */
@@ -309,8 +287,7 @@ public class GenericUDAFFirstValue exten
     public Object terminate(AggregationBuffer agg) throws HiveException {
       State s = (State) agg;
       FirstValueBuffer fb = (FirstValueBuffer) s.wrappedBuf;
-      ValIndexPair r = fb.skipNulls && s.valueChain.size() == 0 ? null
-          : s.valueChain.getFirst();
+      ValIndexPair r = fb.skipNulls && s.valueChain.size() == 0 ? null : s.valueChain.getFirst();
 
       for (int i = 0; i < s.numFollowing; i++) {
         s.results.add(r == null ? null : r.val);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java Tue Sep  2 19:56:56 2014
@@ -37,131 +37,107 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
-@WindowFunctionDescription(description = @Description(name = "last_value", value = "_FUNC_(x)"), supportsWindow = true, pivotResult = false, impliesOrder = true)
-public class GenericUDAFLastValue extends AbstractGenericUDAFResolver
-{
-	static final Log LOG = LogFactory.getLog(GenericUDAFLastValue.class
-			.getName());
-
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException
-	{
-		if (parameters.length > 2)
-		{
-			throw new UDFArgumentTypeException(2, "At most 2 arguments expected");
-		}
-		if ( parameters.length > 1 && !parameters[1].equals(TypeInfoFactory.booleanTypeInfo) )
-		{
-			throw new UDFArgumentTypeException(1, "second argument must be a boolean expression");
-		}
-		return createEvaluator();
-	}
-
-	protected GenericUDAFLastValueEvaluator createEvaluator()
-	{
-		return new GenericUDAFLastValueEvaluator();
-	}
-
-	static class LastValueBuffer implements AggregationBuffer
-	{
-		Object val;
-		boolean firstRow;
-		boolean skipNulls;
-
-		LastValueBuffer()
-		{
-			init();
-		}
-
-		void init()
-		{
-			val = null;
-			firstRow = true;
-			skipNulls = false;
-		}
-
-	}
-
-	public static class GenericUDAFLastValueEvaluator extends
-			GenericUDAFEvaluator
-	{
-		ObjectInspector inputOI;
-		ObjectInspector outputOI;
-
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException
-		{
-			super.init(m, parameters);
-			if (m != Mode.COMPLETE)
-			{
-				throw new HiveException(
-						"Only COMPLETE mode supported for Rank function");
-			}
-			inputOI = parameters[0];
-			outputOI = ObjectInspectorUtils.getStandardObjectInspector(inputOI,
-					ObjectInspectorCopyOption.WRITABLE);
-			return outputOI;
-		}
-
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException
-		{
-			return new LastValueBuffer();
-		}
-
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException
-		{
-			((LastValueBuffer) agg).init();
-		}
-
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException
-		{
-			LastValueBuffer lb = (LastValueBuffer) agg;
-			if (lb.firstRow )
-			{
-				lb.firstRow = false;
-				if ( parameters.length == 2  )
-				{
-					lb.skipNulls = PrimitiveObjectInspectorUtils.getBoolean(
-							parameters[1],
-							PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
-				}
-			}
-			
-      Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0],
-          inputOI, ObjectInspectorCopyOption.WRITABLE);
+@WindowFunctionDescription(description = @Description(name = "last_value", value = "_FUNC_(x)"),
+  supportsWindow = true, pivotResult = false, impliesOrder = true)
+public class GenericUDAFLastValue extends AbstractGenericUDAFResolver {
+
+  static final Log LOG = LogFactory.getLog(GenericUDAFLastValue.class.getName());
+
+  @Override
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+    if (parameters.length > 2) {
+      throw new UDFArgumentTypeException(2, "At most 2 arguments expected");
+    }
+    if (parameters.length > 1 && !parameters[1].equals(TypeInfoFactory.booleanTypeInfo)) {
+      throw new UDFArgumentTypeException(1, "second argument must be a boolean expression");
+    }
+    return createEvaluator();
+  }
+
+  protected GenericUDAFLastValueEvaluator createEvaluator() {
+    return new GenericUDAFLastValueEvaluator();
+  }
+
+  static class LastValueBuffer implements AggregationBuffer {
+
+    Object val;
+    boolean firstRow;
+    boolean skipNulls;
+
+    LastValueBuffer() {
+      init();
+    }
+
+    void init() {
+      val = null;
+      firstRow = true;
+      skipNulls = false;
+    }
+
+  }
+
+  public static class GenericUDAFLastValueEvaluator extends GenericUDAFEvaluator {
+
+    ObjectInspector inputOI;
+    ObjectInspector outputOI;
+
+    @Override
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+      super.init(m, parameters);
+      if (m != Mode.COMPLETE) {
+        throw new HiveException("Only COMPLETE mode supported for Rank function");
+      }
+      inputOI = parameters[0];
+      outputOI = ObjectInspectorUtils.getStandardObjectInspector(inputOI,
+        ObjectInspectorCopyOption.WRITABLE);
+      return outputOI;
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new LastValueBuffer();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      ((LastValueBuffer) agg).init();
+    }
+
+    @Override
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+      LastValueBuffer lb = (LastValueBuffer) agg;
+      if (lb.firstRow) {
+        lb.firstRow = false;
+        if (parameters.length == 2) {
+          lb.skipNulls = PrimitiveObjectInspectorUtils.getBoolean(parameters[1],
+            PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
+        }
+      }
+
+      Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI,
+        ObjectInspectorCopyOption.WRITABLE);
 
       if (!lb.skipNulls || o != null) {
         lb.val = o;
       }
-		}
+    }
 
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException
-		{
-			throw new HiveException("terminatePartial not supported");
-		}
-
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException
-		{
-			throw new HiveException("merge not supported");
-		}
-
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException
-		{
-			LastValueBuffer lb = (LastValueBuffer) agg;
-			return lb.val;
+    @Override
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+      throw new HiveException("terminatePartial not supported");
+    }
 
-		}
+    @Override
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+      throw new HiveException("merge not supported");
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException {
+      LastValueBuffer lb = (LastValueBuffer) agg;
+      return lb.val;
+
+    }
 
     @Override
     public GenericUDAFEvaluator getWindowingEvaluator(WindowFrameDef wFrmDef) {
@@ -169,12 +145,12 @@ public class GenericUDAFLastValue extend
       BoundaryDef end = wFrmDef.getEnd();
       return new LastValStreamingFixedWindow(this, start.getAmt(), end.getAmt());
     }
-	}
+  }
 
-  static class LastValStreamingFixedWindow extends
-      GenericUDAFStreamingEvaluator<Object> {
+  static class LastValStreamingFixedWindow extends GenericUDAFStreamingEvaluator<Object> {
 
     class State extends GenericUDAFStreamingEvaluator<Object>.StreamingState {
+
       private Object lastValue;
       private int lastIdx;
 
@@ -203,8 +179,8 @@ public class GenericUDAFLastValue extend
       }
     }
 
-    public LastValStreamingFixedWindow(GenericUDAFEvaluator wrappedEval,
-        int numPreceding, int numFollowing) {
+    public LastValStreamingFixedWindow(GenericUDAFEvaluator wrappedEval, int numPreceding,
+      int numFollowing) {
       super(wrappedEval, numPreceding, numFollowing);
     }
 
@@ -224,8 +200,7 @@ public class GenericUDAFLastValue extend
     }
 
     @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters)
-        throws HiveException {
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
 
       State s = (State) agg;
       LastValueBuffer lb = (LastValueBuffer) s.wrappedBuf;
@@ -237,8 +212,8 @@ public class GenericUDAFLastValue extend
         wrappedEval.iterate(lb, parameters);
       }
 
-      Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0],
-          inputOI(), ObjectInspectorCopyOption.WRITABLE);
+      Object o = ObjectInspectorUtils.copyToStandardObject(parameters[0], inputOI(),
+        ObjectInspectorCopyOption.WRITABLE);
 
       if (!lb.skipNulls || o != null) {
         s.lastValue = o;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java Tue Sep  2 19:56:56 2014
@@ -38,144 +38,129 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.IntWritable;
 
-@WindowFunctionDescription
-(
-		description = @Description(
-								name = "rank",
-								value = "_FUNC_(x) NTILE allows easy calculation of tertiles, quartiles, deciles and other " +
-									"common summary statistics. This function divides an ordered partition into a specified " +
-									"number of groups called buckets and assigns a bucket number to each row in the partition."
-								),
-		supportsWindow = false,
-		pivotResult = true
+@WindowFunctionDescription(
+  description = @Description(
+    name = "rank",
+    value = "_FUNC_(x) NTILE allows easy calculation of tertiles, quartiles, deciles and other "
+            +"common summary statistics. This function divides an ordered partition into a "
+            + "specified number of groups called buckets and assigns a bucket number to each row "
+            + "in the partition."
+  ),
+  supportsWindow = false,
+  pivotResult = true
 )
-public class GenericUDAFNTile  extends AbstractGenericUDAFResolver
-{
-	static final Log LOG = LogFactory.getLog(GenericUDAFNTile.class.getName());
-
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException
-	{
-		if (parameters.length != 1)
-		{
-			throw new UDFArgumentTypeException(parameters.length - 1, "Exactly one argument is expected.");
-		}
-		ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
-
-		boolean c = ObjectInspectorUtils.compareTypes(oi, PrimitiveObjectInspectorFactory.writableIntObjectInspector);
-		if (!c)
-		{
-			throw new UDFArgumentTypeException(0, "Number of tiles must be an int expression");
-		}
-
-		return new GenericUDAFNTileEvaluator();
-	}
-
-	static class NTileBuffer implements AggregationBuffer
-	{
-		Integer numBuckets;
-		int numRows;
-
-		void init()
-		{
-			numBuckets = null;
-			numRows = 0;
-		}
-
-		NTileBuffer()
-		{
-			init();
-		}
-	}
-
-	public static class GenericUDAFNTileEvaluator extends GenericUDAFEvaluator
-	{
-		private transient PrimitiveObjectInspector inputOI;
-
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
-		{
-			assert (parameters.length == 1);
-			super.init(m, parameters);
-			if (m != Mode.COMPLETE)
-			{
-				throw new HiveException(
-						"Only COMPLETE mode supported for NTile function");
-			}
-			inputOI = (PrimitiveObjectInspector) parameters[0];
-			return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
-		}
-
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException
-		{
-			return new NTileBuffer();
-		}
-
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException
-		{
-			((NTileBuffer) agg).init();
-		}
-
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException
-		{
-			NTileBuffer rb = (NTileBuffer) agg;
-			if ( rb.numBuckets == null)
-			{
-				rb.numBuckets = PrimitiveObjectInspectorUtils.getInt(parameters[0], inputOI);
-			}
-			rb.numRows++;
-		}
-
-		@Override
-		public Object terminatePartial(AggregationBuffer agg) throws HiveException
-		{
-			throw new HiveException("terminatePartial not supported");
-		}
-
-		@Override
-		public void merge(AggregationBuffer agg, Object partial) throws HiveException
-		{
-			throw new HiveException("merge not supported");
-		}
-
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException
-		{
-			NTileBuffer rb = (NTileBuffer) agg;
-			ArrayList<IntWritable> res = new ArrayList<IntWritable>(rb.numRows);
-
-			/*
-			 * if there is a remainder from numRows/numBuckets; then distribute increase the size of the first 'rem' buckets by 1.
-			 */
-
-			int bucketsz = rb.numRows / rb.numBuckets;
-			int rem = rb.numRows % rb.numBuckets;
-			int start = 0;
-			int bucket = 1;
-			while ( start < rb.numRows)
-			{
-				int end = start + bucketsz;
-				if (rem > 0)
-				{
-					end++; rem--;
-				}
-				end = Math.min(rb.numRows, end);
-				for(int i = start; i < end; i++)
-				{
-					res.add(new IntWritable(bucket));
-				}
-				start = end;
-				bucket++;
-			}
+public class GenericUDAFNTile extends AbstractGenericUDAFResolver {
 
-			return res;
-		}
+  static final Log LOG = LogFactory.getLog(GenericUDAFNTile.class.getName());
 
-	}
+  @Override
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+    if (parameters.length != 1) {
+      throw new UDFArgumentTypeException(parameters.length - 1,
+        "Exactly one argument is expected.");
+    }
+    ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
+
+    boolean c = ObjectInspectorUtils.compareTypes(oi,
+      PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+    if (!c) {
+      throw new UDFArgumentTypeException(0, "Number of tiles must be an int expression");
+    }
+
+    return new GenericUDAFNTileEvaluator();
+  }
+
+  static class NTileBuffer implements AggregationBuffer {
+
+    Integer numBuckets;
+    int numRows;
+
+    void init() {
+      numBuckets = null;
+      numRows = 0;
+    }
+
+    NTileBuffer() {
+      init();
+    }
+  }
+
+  public static class GenericUDAFNTileEvaluator extends GenericUDAFEvaluator {
+
+    private transient PrimitiveObjectInspector inputOI;
+
+    @Override
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+      assert (parameters.length == 1);
+      super.init(m, parameters);
+      if (m != Mode.COMPLETE) {
+        throw new HiveException("Only COMPLETE mode supported for NTile function");
+      }
+      inputOI = (PrimitiveObjectInspector) parameters[0];
+      return ObjectInspectorFactory.getStandardListObjectInspector(
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new NTileBuffer();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      ((NTileBuffer) agg).init();
+    }
+
+    @Override
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+      NTileBuffer rb = (NTileBuffer) agg;
+      if (rb.numBuckets == null) {
+        rb.numBuckets = PrimitiveObjectInspectorUtils.getInt(parameters[0], inputOI);
+      }
+      rb.numRows++;
+    }
+
+    @Override
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+      throw new HiveException("terminatePartial not supported");
+    }
+
+    @Override
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+      throw new HiveException("merge not supported");
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException {
+      NTileBuffer rb = (NTileBuffer) agg;
+      ArrayList<IntWritable> res = new ArrayList<IntWritable>(rb.numRows);
+
+      /*
+       * if there is a remainder from numRows/numBuckets; then distribute increase the size of the first 'rem' buckets by 1.
+       */
+
+      int bucketsz = rb.numRows / rb.numBuckets;
+      int rem = rb.numRows % rb.numBuckets;
+      int start = 0;
+      int bucket = 1;
+      while (start < rb.numRows) {
+        int end = start + bucketsz;
+        if (rem > 0) {
+          end++;
+          rem--;
+        }
+        end = Math.min(rb.numRows, end);
+        for (int i = start; i < end; i++) {
+          res.add(new IntWritable(bucket));
+        }
+        start = end;
+        bucket++;
+      }
 
+      return res;
+    }
+
+  }
 
 }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java Tue Sep  2 19:56:56 2014
@@ -31,56 +31,52 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
 
-@WindowFunctionDescription
-(
-		description = @Description(
-								name = "percent_rank",
-								value = "_FUNC_(x) PERCENT_RANK is similar to CUME_DIST, but it uses rank values rather " +
-									"than row counts in its numerator. PERCENT_RANK of a row is calculated as: " +
-									"(rank of row in its partition - 1) / (number of rows in the partition - 1)"
-								),
-		supportsWindow = false,
-		pivotResult = true,
-		rankingFunction = true,
-		impliesOrder = true
+@WindowFunctionDescription(
+  description = @Description(
+    name = "percent_rank",
+    value = "_FUNC_(x) PERCENT_RANK is similar to CUME_DIST, but it uses rank values rather " +
+            "than row counts in its numerator. PERCENT_RANK of a row is calculated as: " +
+            "(rank of row in its partition - 1) / (number of rows in the partition - 1)"
+  ),
+  supportsWindow = false,
+  pivotResult = true,
+  rankingFunction = true,
+  impliesOrder = true
 )
-public class GenericUDAFPercentRank extends GenericUDAFRank
-{
-	static final Log LOG = LogFactory.getLog(GenericUDAFPercentRank.class.getName());
-
-	@Override
-  protected GenericUDAFAbstractRankEvaluator createEvaluator()
-	{
-		return new GenericUDAFPercentRankEvaluator();
-	}
-
-	public static class GenericUDAFPercentRankEvaluator extends GenericUDAFAbstractRankEvaluator
-	{
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
-		{
-			super.init(m, parameters);
-			return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
-		}
-
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException
-		{
-			ArrayList<IntWritable> ranks =  ((RankBuffer) agg).rowNums;
-			double sz = ranks.size();
-			if ( sz > 1 ) {
+public class GenericUDAFPercentRank extends GenericUDAFRank {
+
+  static final Log LOG = LogFactory.getLog(GenericUDAFPercentRank.class.getName());
+
+  @Override
+  protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+    return new GenericUDAFPercentRankEvaluator();
+  }
+
+  public static class GenericUDAFPercentRankEvaluator extends GenericUDAFAbstractRankEvaluator {
+
+    @Override
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+      super.init(m, parameters);
+      return ObjectInspectorFactory.getStandardListObjectInspector(
+        PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException {
+      ArrayList<IntWritable> ranks = ((RankBuffer) agg).rowNums;
+      double sz = ranks.size();
+      if (sz > 1) {
         sz = sz - 1;
       }
-			ArrayList<DoubleWritable> pranks = new ArrayList<DoubleWritable>(ranks.size());
+      ArrayList<DoubleWritable> pranks = new ArrayList<DoubleWritable>(ranks.size());
+
+      for (IntWritable i : ranks) {
+        double pr = ((double) i.get() - 1) / sz;
+        pranks.add(new DoubleWritable(pr));
+      }
 
-			for(IntWritable i : ranks)
-			{
-				double pr = ((double)i.get() - 1)/sz;
-				pranks.add(new DoubleWritable(pr));
-			}
-
-			return pranks;
-		}
-	}
+      return pranks;
+    }
+  }
 }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java Tue Sep  2 19:56:56 2014
@@ -38,170 +38,150 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.IntWritable;
 
-@WindowFunctionDescription
-(
-		description = @Description(
-								name = "rank",
-								value = "_FUNC_(x)"
-								),
-		supportsWindow = false,
-		pivotResult = true,
-		rankingFunction = true,
-		impliesOrder = true
-)
-public class GenericUDAFRank extends AbstractGenericUDAFResolver
-{
-	static final Log LOG = LogFactory.getLog(GenericUDAFRank.class.getName());
-
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException
-	{
-		if (parameters.length < 1)
-		{
-			throw new UDFArgumentTypeException(parameters.length - 1, "One or more arguments are expected.");
-		}
-		for(int i=0; i<parameters.length; i++)
-		{
-			ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[i]);
-			if (!ObjectInspectorUtils.compareSupported(oi))
-			{
-				throw new UDFArgumentTypeException(i,
-					"Cannot support comparison of map<> type or complex type containing map<>.");
-			}
-		}
-		return createEvaluator();
-	}
-
-	protected GenericUDAFAbstractRankEvaluator createEvaluator()
-	{
-		return new GenericUDAFRankEvaluator();
-	}
-
-	static class RankBuffer implements AggregationBuffer
-	{
-		ArrayList<IntWritable> rowNums;
-		int currentRowNum;
-		Object[] currVal;
-		int currentRank;
-		int numParams;
-		boolean supportsStreaming;
-
-		RankBuffer(int numParams, boolean supportsStreaming)
-		{
-			this.numParams = numParams;
-			this.supportsStreaming = supportsStreaming;
-			init();
-		}
-
-		void init()
-		{
-			rowNums = new ArrayList<IntWritable>();
-			currentRowNum = 0;
-			currentRank = 0;
-			currVal = new Object[numParams];
-			if ( supportsStreaming ) {
-			  /* initialize rowNums to have 1 row */
-			  rowNums.add(null);
-			}
-		}
-		
-		void incrRowNum() { currentRowNum++; }
-
-		void addRank()
-		{
-		  if ( supportsStreaming ) {
-		    rowNums.set(0, new IntWritable(currentRank));
-		  } else {
-		    rowNums.add(new IntWritable(currentRank));
-		  }
-		}
-	}
-
-	public static abstract class GenericUDAFAbstractRankEvaluator extends GenericUDAFEvaluator 
-	{
-		ObjectInspector[] inputOI;
-		ObjectInspector[] outputOI;
-		boolean isStreamingMode = false;
-
-		protected boolean isStreaming() {
-		  return isStreamingMode;
-		}
-
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException
-		{
-			super.init(m, parameters);
-			if (m != Mode.COMPLETE)
-			{
-				throw new HiveException(
-						"Only COMPLETE mode supported for Rank function");
-			}
-			inputOI = parameters;
-			outputOI = new ObjectInspector[inputOI.length];
-			for(int i=0; i < inputOI.length; i++)
-			{
-				outputOI[i] = ObjectInspectorUtils.getStandardObjectInspector(inputOI[i], ObjectInspectorCopyOption.JAVA);
-			}
-			return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
-		}
-
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException
-		{
-			return new RankBuffer(inputOI.length, isStreamingMode);
-		}
-
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException
-		{
-			((RankBuffer) agg).init();
-		}
-
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException
-		{
-			RankBuffer rb = (RankBuffer) agg;
-			 int c = GenericUDAFRank.compare(rb.currVal, outputOI, parameters, inputOI);
-			 rb.incrRowNum();
-			if ( rb.currentRowNum == 1 || c != 0 )
-			{
-				nextRank(rb);
-				rb.currVal = GenericUDAFRank.copyToStandardObject(parameters, inputOI, ObjectInspectorCopyOption.JAVA);
-			}
-			rb.addRank();
-		}
-
-		/*
-		 * Called when the value in the partition has changed. Update the currentRank
-		 */
-		protected void nextRank(RankBuffer rb)
-		{
-			rb.currentRank = rb.currentRowNum;
-		}
-
-		@Override
-		public Object terminatePartial(AggregationBuffer agg) throws HiveException
-		{
-			throw new HiveException("terminatePartial not supported");
-		}
-
-		@Override
-		public void merge(AggregationBuffer agg, Object partial) throws HiveException
-		{
-			throw new HiveException("merge not supported");
-		}
-
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException
-		{
-			return ((RankBuffer) agg).rowNums;
-		}
-
-	}
-
-  public static class GenericUDAFRankEvaluator extends
-      GenericUDAFAbstractRankEvaluator implements
-      ISupportStreamingModeForWindowing {
+@WindowFunctionDescription(
+  description = @Description(
+    name = "rank",
+    value = "_FUNC_(x)"),
+  supportsWindow = false,
+  pivotResult = true,
+  rankingFunction = true,
+  impliesOrder = true)
+public class GenericUDAFRank extends AbstractGenericUDAFResolver {
+
+  static final Log LOG = LogFactory.getLog(GenericUDAFRank.class.getName());
+
+  @Override
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+    if (parameters.length < 1) {
+      throw new UDFArgumentTypeException(parameters.length - 1,
+        "One or more arguments are expected.");
+    }
+    for (int i = 0; i < parameters.length; i++) {
+      ObjectInspector oi = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[i]);
+      if (!ObjectInspectorUtils.compareSupported(oi)) {
+        throw new UDFArgumentTypeException(i,
+          "Cannot support comparison of map<> type or complex type containing map<>.");
+      }
+    }
+    return createEvaluator();
+  }
+
+  protected GenericUDAFAbstractRankEvaluator createEvaluator() {
+    return new GenericUDAFRankEvaluator();
+  }
+
+  static class RankBuffer implements AggregationBuffer {
+
+    ArrayList<IntWritable> rowNums;
+    int currentRowNum;
+    Object[] currVal;
+    int currentRank;
+    int numParams;
+    boolean supportsStreaming;
+
+    RankBuffer(int numParams, boolean supportsStreaming) {
+      this.numParams = numParams;
+      this.supportsStreaming = supportsStreaming;
+      init();
+    }
+
+    void init() {
+      rowNums = new ArrayList<IntWritable>();
+      currentRowNum = 0;
+      currentRank = 0;
+      currVal = new Object[numParams];
+      if (supportsStreaming) {
+        /* initialize rowNums to have 1 row */
+        rowNums.add(null);
+      }
+    }
+
+    void incrRowNum() { currentRowNum++; }
+
+    void addRank() {
+      if (supportsStreaming) {
+        rowNums.set(0, new IntWritable(currentRank));
+      } else {
+        rowNums.add(new IntWritable(currentRank));
+      }
+    }
+  }
+
+  public static abstract class GenericUDAFAbstractRankEvaluator extends GenericUDAFEvaluator {
+
+    ObjectInspector[] inputOI;
+    ObjectInspector[] outputOI;
+    boolean isStreamingMode = false;
+
+    protected boolean isStreaming() {
+      return isStreamingMode;
+    }
+
+    @Override
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+      super.init(m, parameters);
+      if (m != Mode.COMPLETE) {
+        throw new HiveException("Only COMPLETE mode supported for Rank function");
+      }
+      inputOI = parameters;
+      outputOI = new ObjectInspector[inputOI.length];
+      for (int i = 0; i < inputOI.length; i++) {
+        outputOI[i] = ObjectInspectorUtils.getStandardObjectInspector(inputOI[i],
+          ObjectInspectorCopyOption.JAVA);
+      }
+      return ObjectInspectorFactory.getStandardListObjectInspector(
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new RankBuffer(inputOI.length, isStreamingMode);
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      ((RankBuffer) agg).init();
+    }
+
+    @Override
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+      RankBuffer rb = (RankBuffer) agg;
+      int c = GenericUDAFRank.compare(rb.currVal, outputOI, parameters, inputOI);
+      rb.incrRowNum();
+      if (rb.currentRowNum == 1 || c != 0) {
+        nextRank(rb);
+        rb.currVal =
+          GenericUDAFRank.copyToStandardObject(parameters, inputOI, ObjectInspectorCopyOption.JAVA);
+      }
+      rb.addRank();
+    }
+
+    /*
+     * Called when the value in the partition has changed. Update the currentRank
+     */
+    protected void nextRank(RankBuffer rb) {
+      rb.currentRank = rb.currentRowNum;
+    }
+
+    @Override
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+      throw new HiveException("terminatePartial not supported");
+    }
+
+    @Override
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+      throw new HiveException("merge not supported");
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException {
+      return ((RankBuffer) agg).rowNums;
+    }
+
+  }
+
+  public static class GenericUDAFRankEvaluator extends GenericUDAFAbstractRankEvaluator
+    implements ISupportStreamingModeForWindowing {
 
     @Override
     public Object getNextResult(AggregationBuffer agg) throws HiveException {
@@ -215,18 +195,15 @@ public class GenericUDAFRank extends Abs
     }
 
     @Override
-    public int getRowsRemainingAfterTerminate()
-        throws HiveException {
+    public int getRowsRemainingAfterTerminate() throws HiveException {
       return 0;
     }
   }
 
   public static int compare(Object[] o1, ObjectInspector[] oi1, Object[] o2,
-      ObjectInspector[] oi2)
-  {
+    ObjectInspector[] oi2) {
     int c = 0;
-    for (int i = 0; i < oi1.length; i++)
-    {
+    for (int i = 0; i < oi1.length; i++) {
       c = ObjectInspectorUtils.compare(o1[i], oi1[i], o2[i], oi2[i]);
       if (c != 0) {
         return c;
@@ -235,15 +212,11 @@ public class GenericUDAFRank extends Abs
     return c;
   }
 
-  public static Object[] copyToStandardObject(Object[] o,
-      ObjectInspector[] oi,
-      ObjectInspectorCopyOption objectInspectorOption)
-  {
+  public static Object[] copyToStandardObject(Object[] o, ObjectInspector[] oi,
+    ObjectInspectorCopyOption objectInspectorOption) {
     Object[] out = new Object[o.length];
-    for (int i = 0; i < oi.length; i++)
-    {
-      out[i] = ObjectInspectorUtils.copyToStandardObject(o[i], oi[i],
-          objectInspectorOption);
+    for (int i = 0; i < oi.length; i++) {
+      out[i] = ObjectInspectorUtils.copyToStandardObject(o[i], oi[i], objectInspectorOption);
     }
     return out;
   }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java Tue Sep  2 19:56:56 2014
@@ -34,110 +34,89 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.io.IntWritable;
 
-@WindowFunctionDescription
-(
-		description = @Description(
-								name = "row_number",
-								value = "_FUNC_() - The ROW_NUMBER function assigns a unique number (sequentially, starting from 1, as defined by ORDER BY) to each row within the partition."
-								),
-		supportsWindow = false,
-		pivotResult = true
+@WindowFunctionDescription(
+  description = @Description(
+    name = "row_number",
+    value = "_FUNC_() - The ROW_NUMBER function assigns a unique number (sequentially, starting "
+            + "from 1, as defined by ORDER BY) to each row within the partition."
+  ),
+  supportsWindow = false,
+  pivotResult = true
 )
-public class GenericUDAFRowNumber extends AbstractGenericUDAFResolver
-{
-	static final Log LOG = LogFactory.getLog(GenericUDAFRowNumber.class.getName());
-
-	@Override
-	public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-			throws SemanticException
-	{
-		if (parameters.length != 0)
-		{
-			throw new UDFArgumentTypeException(parameters.length - 1,
-					"No argument is expected.");
-		}
-		return new GenericUDAFRowNumberEvaluator();
-	}
-
-	static class RowNumberBuffer implements AggregationBuffer
-	{
-		ArrayList<IntWritable> rowNums;
-		int nextRow;
-
-		void init()
-		{
-			rowNums = new ArrayList<IntWritable>();
-		}
-
-		RowNumberBuffer()
-		{
-			init();
-			nextRow = 1;
-		}
-
-		void incr()
-		{
-			rowNums.add(new IntWritable(nextRow++));
-		}
-	}
-
-	public static class GenericUDAFRowNumberEvaluator extends
-			GenericUDAFEvaluator
-	{
-
-		@Override
-		public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-				throws HiveException
-		{
-			super.init(m, parameters);
-			if (m != Mode.COMPLETE)
-			{
-				throw new HiveException("Only COMPLETE mode supported for row_number function");
-			}
-
-			return ObjectInspectorFactory.getStandardListObjectInspector(
-					PrimitiveObjectInspectorFactory.writableIntObjectInspector);
-		}
-
-		@Override
-		public AggregationBuffer getNewAggregationBuffer() throws HiveException
-		{
-			return new RowNumberBuffer();
-		}
-
-		@Override
-		public void reset(AggregationBuffer agg) throws HiveException
-		{
-			((RowNumberBuffer) agg).init();
-		}
-
-		@Override
-		public void iterate(AggregationBuffer agg, Object[] parameters)
-				throws HiveException
-		{
-			((RowNumberBuffer) agg).incr();
-		}
-
-		@Override
-		public Object terminatePartial(AggregationBuffer agg)
-				throws HiveException
-		{
-			throw new HiveException("terminatePartial not supported");
-		}
-
-		@Override
-		public void merge(AggregationBuffer agg, Object partial)
-				throws HiveException
-		{
-			throw new HiveException("merge not supported");
-		}
-
-		@Override
-		public Object terminate(AggregationBuffer agg) throws HiveException
-		{
-			return ((RowNumberBuffer) agg).rowNums;
-		}
+public class GenericUDAFRowNumber extends AbstractGenericUDAFResolver {
 
-	}
+  static final Log LOG = LogFactory.getLog(GenericUDAFRowNumber.class.getName());
+
+  @Override
+  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException {
+    if (parameters.length != 0) {
+      throw new UDFArgumentTypeException(parameters.length - 1, "No argument is expected.");
+    }
+    return new GenericUDAFRowNumberEvaluator();
+  }
+
+  static class RowNumberBuffer implements AggregationBuffer {
+
+    ArrayList<IntWritable> rowNums;
+    int nextRow;
+
+    void init() {
+      rowNums = new ArrayList<IntWritable>();
+    }
+
+    RowNumberBuffer() {
+      init();
+      nextRow = 1;
+    }
+
+    void incr() {
+      rowNums.add(new IntWritable(nextRow++));
+    }
+  }
+
+  public static class GenericUDAFRowNumberEvaluator extends GenericUDAFEvaluator {
+
+    @Override
+    public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
+      super.init(m, parameters);
+      if (m != Mode.COMPLETE) {
+        throw new HiveException("Only COMPLETE mode supported for row_number function");
+      }
+
+      return ObjectInspectorFactory.getStandardListObjectInspector(
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new RowNumberBuffer();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      ((RowNumberBuffer) agg).init();
+    }
+
+    @Override
+    public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
+      ((RowNumberBuffer) agg).incr();
+    }
+
+    @Override
+    public Object terminatePartial(AggregationBuffer agg) throws HiveException {
+      throw new HiveException("terminatePartial not supported");
+    }
+
+    @Override
+    public void merge(AggregationBuffer agg, Object partial) throws HiveException {
+      throw new HiveException("merge not supported");
+    }
+
+    @Override
+    public Object terminate(AggregationBuffer agg) throws HiveException {
+      return ((RowNumberBuffer) agg).rowNums;
+    }
+
+  }
 }
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java Tue Sep  2 19:56:56 2014
@@ -46,7 +46,7 @@ public abstract class GenericUDFBasePad 
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
     if (arguments.length != 3) {
       throw new UDFArgumentException(udfName + " requires three arguments. Found :"
-	  + arguments.length);
+        + arguments.length);
     }
     converter1 = checkTextArguments(arguments, 0);
     converter2 = checkIntArguments(arguments, 1);