You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/11/27 02:07:35 UTC

svn commit: r1641980 [1/4] - in /hive/trunk: ./ beeline/src/test/org/apache/hive/beeline/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/ hcatalog/...

Author: hashutosh
Date: Thu Nov 27 01:07:32 2014
New Revision: 1641980

URL: http://svn.apache.org/r1641980
Log:
HIVE-8828 : Remove hadoop 20 shims (Ashutosh Chauhan via Thejas Nair & Brock Noland)

Added:
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/client/
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/security/
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/security/token/
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/security/token/delegation/
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/security/token/delegation/DelegationTokenSelector.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/security/token/delegation/HiveDelegationTokenSupport.java
Removed:
    hive/trunk/ql/src/test/queries/clientpositive/archive.q
    hive/trunk/ql/src/test/queries/clientpositive/archive_corrupt.q
    hive/trunk/ql/src/test/queries/clientpositive/split_sample.q
    hive/trunk/ql/src/test/results/clientpositive/archive.q.out
    hive/trunk/ql/src/test/results/clientpositive/archive_corrupt.q.out
    hive/trunk/ql/src/test/results/clientpositive/split_sample.q.out
    hive/trunk/shims/0.20/
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/DelegationTokenStore.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/client/
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/security/
Modified:
    hive/trunk/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java
    hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
    hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java
    hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java
    hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
    hive/trunk/pom.xml
    hive/trunk/ql/pom.xml
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java
    hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table.q
    hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2.q
    hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2_h23.q
    hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table_h23.q
    hive/trunk/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
    hive/trunk/ql/src/test/queries/clientpositive/auto_join14.q
    hive/trunk/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q
    hive/trunk/ql/src/test/queries/clientpositive/combine2.q
    hive/trunk/ql/src/test/queries/clientpositive/combine2_hadoop20.q
    hive/trunk/ql/src/test/queries/clientpositive/combine2_win.q
    hive/trunk/ql/src/test/queries/clientpositive/ctas.q
    hive/trunk/ql/src/test/queries/clientpositive/ctas_hadoop20.q
    hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1.q
    hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1_23.q
    hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q
    hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1_23.q
    hive/trunk/ql/src/test/queries/clientpositive/input12.q
    hive/trunk/ql/src/test/queries/clientpositive/input12_hadoop20.q
    hive/trunk/ql/src/test/queries/clientpositive/input39.q
    hive/trunk/ql/src/test/queries/clientpositive/join14.q
    hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q
    hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q
    hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q
    hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q
    hive/trunk/ql/src/test/queries/clientpositive/uber_reduce.q
    hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q
    hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out
    hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out
    hive/trunk/ql/src/test/results/clientpositive/archive_excludeHadoop20.q.out
    hive/trunk/ql/src/test/results/clientpositive/auto_join14.q.out
    hive/trunk/ql/src/test/results/clientpositive/combine2.q.out
    hive/trunk/ql/src/test/results/clientpositive/ctas.q.out
    hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
    hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
    hive/trunk/ql/src/test/results/clientpositive/input12.q.out
    hive/trunk/ql/src/test/results/clientpositive/input39.q.out
    hive/trunk/ql/src/test/results/clientpositive/join14.q.out
    hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out
    hive/trunk/ql/src/test/results/clientpositive/tez/ctas.q.out
    hive/trunk/ql/src/test/results/clientpositive/uber_reduce.q.out
    hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
    hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java
    hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java
    hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java
    hive/trunk/shims/aggregator/pom.xml
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
    hive/trunk/shims/common/pom.xml
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/ShimLoader.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
    hive/trunk/shims/pom.xml

Modified: hive/trunk/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
URL: http://svn.apache.org/viewvc/hive/trunk/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java (original)
+++ hive/trunk/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java Thu Nov 27 01:07:32 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hive.jdbc.HiveConnection;
 import org.apache.hive.beeline.BeeLine;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hive.service.auth.HiveAuthFactory;
 
 /**
@@ -201,7 +202,7 @@ public class ProxyAuthTest {
   }
 
   private static void storeTokenInJobConf(String tokenStr) throws Exception {
-    ShimLoader.getHadoopShims().setTokenStr(ShimLoader.getHadoopShims().getUGIForConf(new Configuration()),
+    Utils.setTokenStr(Utils.getUGIForConf(new Configuration()),
           tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
     System.out.println("Stored token " + tokenStr);
   }

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Thu Nov 27 01:07:32 2014
@@ -30,6 +30,7 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.DefaultFileAccess;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -41,6 +42,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatus;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 
@@ -373,8 +375,8 @@ public final class FileUtils {
   public static void checkFileAccessWithImpersonation(final FileSystem fs,
       final FileStatus stat, final FsAction action, final String user)
           throws IOException, AccessControlException, InterruptedException, Exception {
-    UserGroupInformation ugi = ShimLoader.getHadoopShims().getUGIForConf(fs.getConf());
-    String currentUser = ShimLoader.getHadoopShims().getShortUserName(ugi);
+    UserGroupInformation ugi = Utils.getUGIForConf(fs.getConf());
+    String currentUser = ugi.getShortUserName();
 
     if (user == null || currentUser.equals(user)) {
       // No need to impersonate user, do the checks as the currently configured user.
@@ -383,8 +385,9 @@ public final class FileUtils {
     }
 
     // Otherwise, try user impersonation. Current user must be configured to do user impersonation.
-    UserGroupInformation proxyUser = ShimLoader.getHadoopShims().createProxyUser(user);
-    ShimLoader.getHadoopShims().doAs(proxyUser, new PrivilegedExceptionAction<Object>() {
+    UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(
+        user, UserGroupInformation.getLoginUser());
+    proxyUser.doAs(new PrivilegedExceptionAction<Object>() {
       @Override
       public Object run() throws Exception {
         FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf());

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Nov 27 01:07:32 2014
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.conf.Valid
 import org.apache.hadoop.hive.conf.Validator.StringSet;
 import org.apache.hadoop.hive.conf.Validator.TimeValidator;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
@@ -2760,8 +2761,7 @@ public class HiveConf extends Configurat
    */
   public String getUser() throws IOException {
     try {
-      UserGroupInformation ugi = ShimLoader.getHadoopShims()
-        .getUGIForConf(this);
+      UserGroupInformation ugi = Utils.getUGIForConf(this);
       return ugi.getUserName();
     } catch (LoginException le) {
       throw new IOException(le);

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java Thu Nov 27 01:07:32 2014
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
@@ -254,7 +255,7 @@ class HiveClientCache {
 
     private HiveClientCacheKey(HiveConf hiveConf, final int threadId) throws IOException, LoginException {
       this.metaStoreURIs = hiveConf.getVar(HiveConf.ConfVars.METASTOREURIS);
-      ugi = ShimLoader.getHadoopShims().getUGIForConf(hiveConf);
+      ugi = Utils.getUGIForConf(hiveConf);
       this.hiveConf = hiveConf;
       this.threadId = threadId;
     }

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java Thu Nov 27 01:07:32 2014
@@ -29,7 +29,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -38,6 +37,7 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenSelector;
+import org.apache.hadoop.security.token.delegation.DelegationTokenSelector;
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.thrift.TException;

Modified: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java (original)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java Thu Nov 27 01:07:32 2014
@@ -30,7 +30,7 @@ import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.GroupMappingServiceProvider;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -129,9 +129,9 @@ public class MiniHiveKdc {
    */
   public UserGroupInformation loginUser(String principal)
       throws Exception {
-    ShimLoader.getHadoopShims().loginUserFromKeytab(principal,
+    UserGroupInformation.loginUserFromKeytab(principal,
         getKeyTabFile(principal));
-    return ShimLoader.getHadoopShims().getUGIForConf(conf);
+    return Utils.getUGIForConf(conf);
   }
 
   public Properties getKdcConf() {

Modified: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java (original)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java Thu Nov 27 01:07:32 2014
@@ -32,7 +32,7 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.jdbc.HiveConnection;
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
@@ -231,7 +231,7 @@ public class TestJdbcWithMiniKdc {
   // Store the given token in the UGI
   private void storeToken(String tokenStr, UserGroupInformation ugi)
       throws Exception {
-    ShimLoader.getHadoopShims().setTokenStr(ugi,
+    Utils.setTokenStr(ugi,
         tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
   }
 

Modified: hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java (original)
+++ hive/trunk/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestMiniHiveKdc.java Thu Nov 27 01:07:32 2014
@@ -23,7 +23,7 @@ import static org.junit.Assert.*;
 import java.io.File;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -61,9 +61,8 @@ public class TestMiniHiveKdc {
     String servicePrinc = miniHiveKdc.getHiveServicePrincipal();
     assertNotNull(servicePrinc);
     miniHiveKdc.loginUser(servicePrinc);
-    assertTrue(ShimLoader.getHadoopShims().isLoginKeytabBased());
-    UserGroupInformation ugi =
-        ShimLoader.getHadoopShims().getUGIForConf(hiveConf);
+    assertTrue(UserGroupInformation.isLoginKeytabBased());
+    UserGroupInformation ugi = Utils.getUGIForConf(hiveConf);
     assertEquals(MiniHiveKdc.HIVE_SERVICE_PRINCIPAL, ugi.getShortUserName());
   }
 

Modified: hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java (original)
+++ hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProviderWithACL.java Thu Nov 27 01:07:32 2014
@@ -19,6 +19,7 @@ import org.apache.hadoop.fs.permission.A
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -56,7 +57,7 @@ public class TestStorageBasedMetastoreAu
 
     // Hadoop FS ACLs do not work with LocalFileSystem, so set up MiniDFS.
     HiveConf conf = super.createHiveConf();
-    String currentUserName = ShimLoader.getHadoopShims().getUGIForConf(conf).getShortUserName();
+    String currentUserName = Utils.getUGIForConf(conf).getShortUserName();
     conf.set("dfs.namenode.acls.enabled", "true");
     conf.set("hadoop.proxyuser." + currentUserName + ".groups", "*");
     conf.set("hadoop.proxyuser." + currentUserName + ".hosts", "*");

Modified: hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java (original)
+++ hive/trunk/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java Thu Nov 27 01:07:32 2014
@@ -67,7 +67,7 @@ public class TestHadoop20SAuthBridge ext
    */
   static volatile boolean isMetastoreTokenManagerInited;
 
-  private static class MyHadoopThriftAuthBridge20S extends HadoopThriftAuthBridge20S {
+  private static class MyHadoopThriftAuthBridge20S extends HadoopThriftAuthBridge {
     @Override
     public Server createServer(String keytabFile, String principalConf)
     throws TTransportException {
@@ -75,7 +75,7 @@ public class TestHadoop20SAuthBridge ext
       return new Server();
     }
 
-    static class Server extends HadoopThriftAuthBridge20S.Server {
+    static class Server extends HadoopThriftAuthBridge.Server {
       public Server() throws TTransportException {
         super();
       }
@@ -312,9 +312,9 @@ public class TestHadoop20SAuthBridge ext
 
     waitForMetastoreTokenInit();
 
-    HadoopThriftAuthBridge20S.Server.authenticationMethod
+    HadoopThriftAuthBridge.Server.authenticationMethod
                              .set(AuthenticationMethod.KERBEROS);
-    HadoopThriftAuthBridge20S.Server.remoteAddress.set(InetAddress.getLocalHost());
+    HadoopThriftAuthBridge.Server.remoteAddress.set(InetAddress.getLocalHost());
     return
         HiveMetaStore.getDelegationToken(ownerUgi.getShortUserName(),
             realUgi.getShortUserName());

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java Thu Nov 27 01:07:32 2014
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.WindowsPathUtil;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 import org.junit.After;
@@ -90,7 +91,7 @@ public class StorageBasedMetastoreTestBa
     clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
 
-    ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
+    ugi = Utils.getUGIForConf(clientHiveConf);
 
     SessionState.start(new CliSessionState(clientHiveConf));
     msc = new HiveMetaStoreClient(clientHiveConf, null);

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java Thu Nov 27 01:07:32 2014
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.process
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
@@ -82,7 +83,7 @@ public class TestClientSideAuthorization
     clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
 
-    ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
+    ugi = Utils.getUGIForConf(clientHiveConf);
 
     SessionState.start(new CliSessionState(clientHiveConf));
     msc = new HiveMetaStoreClient(clientHiveConf, null);

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java Thu Nov 27 01:07:32 2014
@@ -45,6 +45,7 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
@@ -109,7 +110,7 @@ public class TestMetastoreAuthorizationP
     clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
     clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
 
-    ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
+    ugi = Utils.getUGIForConf(clientHiveConf);
 
     SessionState.start(new CliSessionState(clientHiveConf));
     msc = new HiveMetaStoreClient(clientHiveConf, null);

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java Thu Nov 27 01:07:32 2014
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
+import org.apache.hadoop.hive.shims.Utils;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -42,7 +43,7 @@ public class TestStorageBasedMetastoreAu
     // Hadoop FS ACLs do not work with LocalFileSystem, so set up MiniDFS.
     HiveConf conf = super.createHiveConf();
 
-    String currentUserName = ShimLoader.getHadoopShims().getUGIForConf(conf).getShortUserName();
+    String currentUserName = Utils.getUGIForConf(conf).getShortUserName();
     conf.set("hadoop.proxyuser." + currentUserName + ".groups", "*");
     conf.set("hadoop.proxyuser." + currentUserName + ".hosts", "*");
     dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java Thu Nov 27 01:07:32 2014
@@ -70,9 +70,9 @@ public class TestZooKeeperTokenStore ext
 
   private Configuration createConf(String zkPath) {
     Configuration conf = new Configuration();
-    conf.set(HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, "localhost:"
+    conf.set(HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_CONNECT_STR, "localhost:"
         + this.zkPort);
-    conf.set(HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ZNODE, zkPath);
+    conf.set(HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ZNODE, zkPath);
     return conf;
   }
 
@@ -80,7 +80,7 @@ public class TestZooKeeperTokenStore ext
     String ZK_PATH = "/zktokenstore-testTokenStorage";
     ts = new ZooKeeperTokenStore();
     Configuration conf = createConf(ZK_PATH);
-    conf.set(HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL, "world:anyone:cdrwa");
+    conf.set(HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL, "world:anyone:cdrwa");
     ts.setConf(conf);
     ts.init(null, ServerMode.METASTORE);
 
@@ -128,7 +128,7 @@ public class TestZooKeeperTokenStore ext
     String ZK_PATH = "/zktokenstore-testAclNoAuth";
     Configuration conf = createConf(ZK_PATH);
     conf.set(
-        HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
+        HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
         "ip:127.0.0.1:r");
 
     ts = new ZooKeeperTokenStore();
@@ -146,7 +146,7 @@ public class TestZooKeeperTokenStore ext
     String aclString = "sasl:hive/host@TEST.DOMAIN:cdrwa, fail-parse-ignored";
     Configuration conf = createConf(ZK_PATH);
     conf.set(
-        HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
+        HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
         aclString);
 
     List<ACL> aclList = ZooKeeperTokenStore.parseACLs(aclString);
@@ -166,7 +166,7 @@ public class TestZooKeeperTokenStore ext
     String ZK_PATH = "/zktokenstore-testAcl";
     Configuration conf = createConf(ZK_PATH);
     conf.set(
-        HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
+        HadoopThriftAuthBridge.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
         "ip:127.0.0.1:cdrwa,world:anyone:cdrwa");
     ts = new ZooKeeperTokenStore();
     ts.setConf(conf);

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Thu Nov 27 01:07:32 2014
@@ -125,12 +125,10 @@ public class QTestUtil {
   protected HiveConf conf;
   private Driver drv;
   private BaseSemanticAnalyzer sem;
-  private FileSystem fs;
   protected final boolean overWrite;
   private CliDriver cliDriver;
   private HadoopShims.MiniMrShim mr = null;
   private HadoopShims.MiniDFSShim dfs = null;
-  private boolean miniMr = false;
   private String hadoopVer = null;
   private QTestSetup setup = null;
   private boolean isSessionStateStarted = false;
@@ -309,7 +307,6 @@ public class QTestUtil {
       System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
     }
     conf = new HiveConf(Driver.class);
-    this.miniMr = (clusterType == MiniClusterType.mr);
     this.hadoopVer = getHadoopMainVersion(hadoopVer);
     qMap = new TreeMap<String, String>();
     qSkipSet = new HashSet<String>();
@@ -651,17 +648,6 @@ public class QTestUtil {
     FunctionRegistry.unregisterTemporaryUDF("test_error");
   }
 
-  private void runLoadCmd(String loadCmd) throws Exception {
-    int ecode = 0;
-    ecode = drv.run(loadCmd).getResponseCode();
-    drv.close();
-    if (ecode != 0) {
-      throw new Exception("load command: " + loadCmd
-          + " failed with exit code= " + ecode);
-    }
-    return;
-  }
-
   protected void runCreateTableCmd(String createTableCmd) throws Exception {
     int ecode = 0;
     ecode = drv.run(createTableCmd).getResponseCode();
@@ -712,7 +698,6 @@ public class QTestUtil {
     SessionState.start(conf);
     conf.set("hive.execution.engine", execEngine);
     db = Hive.get(conf);
-    fs = FileSystem.get(conf);
     drv = new Driver(conf);
     drv.init();
     pd = new ParseDriver();

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Thu Nov 27 01:07:32 2014
@@ -414,8 +414,7 @@ public class HiveConnection implements j
     if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
       // check delegation token in job conf if any
       try {
-        tokenStr = ShimLoader.getHadoopShims().
-            getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
+        tokenStr = org.apache.hadoop.hive.shims.Utils.getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
       } catch (IOException e) {
         throw new SQLException("Error reading token ", e);
       }

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Nov 27 01:07:32 2014
@@ -192,6 +192,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
 import org.apache.hadoop.hive.thrift.TUGIContainingTransport;
@@ -330,7 +331,7 @@ public class HiveMetaStore extends Thrif
 
       UserGroupInformation ugi;
       try {
-        ugi = ShimLoader.getHadoopShims().getUGIForConf(getConf());
+        ugi = Utils.getUGIForConf(getConf());
       } catch (Exception ex) {
         throw new RuntimeException(ex);
       }

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Thu Nov 27 01:07:32 2014
@@ -125,6 +125,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.txn.TxnHandler;
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
@@ -342,7 +343,6 @@ public class HiveMetaStoreClient impleme
   private void open() throws MetaException {
     isConnected = false;
     TTransportException tte = null;
-    HadoopShims shim = ShimLoader.getHadoopShims();
     boolean useSasl = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL);
     boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT);
     int clientSocketTimeout = (int) conf.getTimeVar(
@@ -366,7 +366,7 @@ public class HiveMetaStoreClient impleme
               // submission.
               String tokenSig = conf.get("hive.metastore.token.signature");
               // tokenSig could be null
-              tokenStrForm = shim.getTokenStrForm(tokenSig);
+              tokenStrForm = Utils.getTokenStrForm(tokenSig);
               if(tokenStrForm != null) {
                 // authenticate using delegation tokens via the "DIGEST" mechanism
                 transport = authBridge.createClientTransport(null, store.getHost(),
@@ -404,7 +404,7 @@ public class HiveMetaStoreClient impleme
           if (isConnected && !useSasl && conf.getBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI)){
             // Call set_ugi, only in unsecure mode.
             try {
-              UserGroupInformation ugi = shim.getUGIForConf(conf);
+              UserGroupInformation ugi = Utils.getUGIForConf(conf);
               client.set_ugi(ugi.getUserName(), Arrays.asList(ugi.getGroupNames()));
             } catch (LoginException e) {
               LOG.warn("Failed to do login. set_ugi() is not successful, " +
@@ -1208,7 +1208,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<String> listPartitionNames(String dbName, String tblName,
       short max) throws MetaException, TException {
-    return filterHook.filterPartitionNames(dbName, tblName, 
+    return filterHook.filterPartitionNames(dbName, tblName,
         client.get_partition_names(dbName, tblName, max));
   }
 
@@ -1216,7 +1216,7 @@ public class HiveMetaStoreClient impleme
   public List<String> listPartitionNames(String db_name, String tbl_name,
       List<String> part_vals, short max_parts)
       throws MetaException, TException, NoSuchObjectException {
-    return filterHook.filterPartitionNames(db_name, tbl_name, 
+    return filterHook.filterPartitionNames(db_name, tbl_name,
         client.get_partition_names_ps(db_name, tbl_name, part_vals, max_parts));
   }
 

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RetryingMetaStoreClient.java Thu Nov 27 01:07:32 2014
@@ -31,6 +31,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TApplicationException;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TProtocolException;
@@ -122,11 +123,16 @@ public class RetryingMetaStoreClient imp
    * @throws MetaException
    */
   private void reloginExpiringKeytabUser() throws MetaException {
-    if(!ShimLoader.getHadoopShims().isSecurityEnabled()){
+    if(!UserGroupInformation.isSecurityEnabled()){
       return;
     }
     try {
-      ShimLoader.getHadoopShims().reLoginUserFromKeytab();
+      UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+      //checkTGT calls ugi.relogin only after checking if it is close to tgt expiry
+      //hadoop relogin is actually done only every x minutes (x=10 in hadoop 1.x)
+      if(ugi.isFromKeytab()){
+        ugi.checkTGTAndReloginFromKeytab();
+      }
     } catch (IOException e) {
       String msg = "Error doing relogin using keytab " + e.getMessage();
       LOG.error(msg, e);

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TUGIBasedProcessor.java Thu Nov 27 01:07:32 2014
@@ -25,11 +25,12 @@ import java.security.PrivilegedException
 import java.util.List;
 import java.util.Map;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.set_ugi_args;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.set_ugi_result;
-import org.apache.hadoop.hive.shims.HadoopShims;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.thrift.TUGIContainingTransport;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.ProcessFunction;
@@ -56,7 +57,7 @@ public class TUGIBasedProcessor<I extend
   private final I iface;
   private final Map<String,  org.apache.thrift.ProcessFunction<Iface, ? extends  TBase>>
     functions;
-  private final HadoopShims shim;
+  static final Log LOG = LogFactory.getLog(TUGIBasedProcessor.class);
 
   public TUGIBasedProcessor(I iface) throws SecurityException, NoSuchFieldException,
     IllegalArgumentException, IllegalAccessException, NoSuchMethodException,
@@ -64,7 +65,6 @@ public class TUGIBasedProcessor<I extend
     super(iface);
     this.iface = iface;
     this.functions = getProcessMapView();
-    shim = ShimLoader.getHadoopShims();
   }
 
   @SuppressWarnings("unchecked")
@@ -115,7 +115,7 @@ public class TUGIBasedProcessor<I extend
         }
       };
       try {
-        shim.doAs(clientUgi, pvea);
+        clientUgi.doAs(pvea);
         return true;
       } catch (RuntimeException rte) {
         if (rte.getCause() instanceof TException) {
@@ -127,7 +127,11 @@ public class TUGIBasedProcessor<I extend
       } catch (IOException ioe) {
         throw new RuntimeException(ioe); // unexpected!
       } finally {
-          shim.closeAllForUGI(clientUgi);
+          try {
+            FileSystem.closeAllForUGI(clientUgi);
+          } catch (IOException e) {
+            LOG.error("Could not clean up file-system handles for UGI: " + clientUgi, e);
+          }
       }
     }
   }
@@ -160,8 +164,7 @@ public class TUGIBasedProcessor<I extend
     set_ugi_result result = fn.getResult(iface, args);
     List<String> principals = result.getSuccess();
     // Store the ugi in transport and then continue as usual.
-    ugiTrans.setClientUGI(shim.createRemoteUser(principals.remove(principals.size()-1),
-        principals));
+    ugiTrans.setClientUGI(UserGroupInformation.createRemoteUser(principals.remove(principals.size()-1)));
     oprot.writeMessageBegin(new TMessage(msg.name, TMessageType.REPLY, msg.seqid));
     result.write(oprot);
     oprot.writeMessageEnd();

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java Thu Nov 27 01:07:32 2014
@@ -55,6 +55,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -261,11 +262,11 @@ public class Warehouse {
     }
     final UserGroupInformation ugi;
     try {
-      ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
+      ugi = Utils.getUGIForConf(conf);
     } catch (LoginException le) {
       throw new IOException(le);
     }
-    String user = ShimLoader.getHadoopShims().getShortUserName(ugi);
+    String user = ugi.getShortUserName();
     //check whether owner can delete
     if (stat.getOwner().equals(user) &&
         stat.getPermission().getUserAction().implies(FsAction.WRITE)) {

Modified: hive/trunk/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/pom.xml?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/pom.xml (original)
+++ hive/trunk/pom.xml Thu Nov 27 01:07:32 2014
@@ -117,7 +117,6 @@
     <derby.version>10.11.1.1</derby.version>
     <guava.version>11.0.2</guava.version>
     <groovy.version>2.1.6</groovy.version>
-    <hadoop-20.version>0.20.2</hadoop-20.version>
     <hadoop-20S.version>1.2.1</hadoop-20S.version>
     <hadoop-23.version>2.5.0</hadoop-23.version>
     <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>

Modified: hive/trunk/ql/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/pom.xml?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/pom.xml (original)
+++ hive/trunk/ql/pom.xml Thu Nov 27 01:07:32 2014
@@ -449,6 +449,12 @@
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-archives</artifactId>
+          <version>${hadoop-23.version}</version>
+          <optional>true</optional>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-core</artifactId>
           <version>${hadoop-23.version}</version>
           <optional>true</optional>

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Thu Nov 27 01:07:32 2014
@@ -115,6 +115,7 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
@@ -1348,7 +1349,7 @@ public class Driver implements CommandPr
           perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.PRE_HOOK + peh.getClass().getName());
 
           ((PreExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(),
-              ShimLoader.getHadoopShims().getUGIForConf(conf));
+              Utils.getUGIForConf(conf));
 
           perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PRE_HOOK + peh.getClass().getName());
         }
@@ -1517,7 +1518,7 @@ public class Driver implements CommandPr
 
           ((PostExecute) peh).run(SessionState.get(), plan.getInputs(), plan.getOutputs(),
               (SessionState.get() != null ? SessionState.get().getLineageState().getLineageInfo()
-                  : null), ShimLoader.getHadoopShims().getUGIForConf(conf));
+                  : null), Utils.getUGIForConf(conf));
 
           perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.POST_HOOK + peh.getClass().getName());
         }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java Thu Nov 27 01:07:32 2014
@@ -164,15 +164,17 @@ public final class ArchiveUtils {
       }
     }
 
-    public URI getHarUri(URI original, HadoopShims shim) throws HiveException {
-      URI harUri = null;
-      try {
-        harUri = shim.getHarUri(original, base, originalBase);
-      } catch (URISyntaxException e) {
-        throw new HiveException("Couldn't create har URI for location", e);
+    public URI getHarUri(URI original) throws URISyntaxException {
+      URI relative = originalBase.relativize(original);
+      if (relative.isAbsolute()) {
+        throw new URISyntaxException("Couldn't create URI for location.",
+                                     "Relative: " + relative + " Base: "
+                                     + base + " OriginalBase: " + originalBase);
       }
 
-      return harUri;
+      return base.resolve(relative);
+
+
     }
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Thu Nov 27 01:07:32 2014
@@ -172,6 +172,7 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.tools.HadoopArchives;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hive.common.util.AnnotationUtils;
@@ -1297,7 +1298,6 @@ public class DDLTask extends Task<DDLWor
     // ARCHIVE_INTERMEDIATE_DIR_SUFFIX that's the same level as the partition,
     // if it does not already exist. If it does exist, we assume the dir is good
     // to use as the move operation that created it is atomic.
-    HadoopShims shim = ShimLoader.getHadoopShims();
     if (!pathExists(intermediateArchivedDir) &&
         !pathExists(intermediateOriginalDir)) {
 
@@ -1319,7 +1319,16 @@ public class DDLTask extends Task<DDLWor
             tbl.getTableName(), partSpecInfo.getName());
         jobname = Utilities.abbreviate(jobname, maxJobNameLen - 6);
         conf.setVar(HiveConf.ConfVars.HADOOPJOBNAME, jobname);
-        ret = shim.createHadoopArchive(conf, originalDir, tmpPath, archiveName);
+        HadoopArchives har = new HadoopArchives(conf);
+        List<String> args = new ArrayList<String>();
+
+        args.add("-archiveName");
+        args.add(archiveName);
+        args.add("-p");
+        args.add(originalDir.toString());
+        args.add(tmpPath.toString());
+
+        ret = ToolRunner.run(har, args.toArray(new String[0]));;
       } catch (Exception e) {
         throw new HiveException(e);
       }
@@ -1380,8 +1389,7 @@ public class DDLTask extends Task<DDLWor
     try {
       for(Partition p: partitions) {
         URI originalPartitionUri = ArchiveUtils.addSlash(p.getDataLocation().toUri());
-        URI test = p.getDataLocation().toUri();
-        URI harPartitionDir = harHelper.getHarUri(originalPartitionUri, shim);
+        URI harPartitionDir = harHelper.getHarUri(originalPartitionUri);
         StringBuilder authority = new StringBuilder();
         if(harPartitionDir.getUserInfo() != null) {
           authority.append(harPartitionDir.getUserInfo()).append("@");
@@ -1414,7 +1422,7 @@ public class DDLTask extends Task<DDLWor
   }
 
   private int unarchive(Hive db, AlterTableSimpleDesc simpleDesc)
-      throws HiveException {
+      throws HiveException, URISyntaxException {
 
     Table tbl = db.getTable(simpleDesc.getTableName());
 
@@ -1489,8 +1497,7 @@ public class DDLTask extends Task<DDLWor
     URI archiveUri = archivePath.toUri();
     ArchiveUtils.HarPathHelper harHelper = new ArchiveUtils.HarPathHelper(conf,
         archiveUri, originalUri);
-    HadoopShims shim = ShimLoader.getHadoopShims();
-    URI sourceUri = harHelper.getHarUri(originalUri, shim);
+    URI sourceUri = harHelper.getHarUri(originalUri);
     Path sourceDir = new Path(sourceUri.getScheme(), sourceUri.getAuthority(), sourceUri.getPath());
 
     if(!pathExists(intermediateArchivedDir) && !pathExists(archivePath)) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SecureCmdDoAs.java Thu Nov 27 01:07:32 2014
@@ -17,13 +17,18 @@
  */
 package org.apache.hadoop.hive.ql.exec;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.Map;
 
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
 
 /**
  * SecureCmdDoAs - Helper class for setting parameters and env necessary for
@@ -35,11 +40,23 @@ public class SecureCmdDoAs {
   private final Path tokenPath;
 
   public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{
-    tokenPath = ShimLoader.getHadoopShims().createDelegationTokenFile(conf);
+    // Get delegation token for user from filesystem and write the token along with
+    // metastore tokens into a file
+    String uname = UserGroupInformation.getLoginUser().getShortUserName();
+    FileSystem fs = FileSystem.get(conf);
+    Token<?> fsToken = fs.getDelegationToken(uname);
+
+    File t = File.createTempFile("hive_hadoop_delegation_token", null);
+    tokenPath = new Path(t.toURI());
+
+    //write credential with token to file
+    Credentials cred = new Credentials();
+    cred.addToken(fsToken.getService(), fsToken);
+    cred.writeTokenStorageFile(tokenPath, conf);
   }
 
   public void addEnv(Map<String, String> env){
-    env.put(ShimLoader.getHadoopShims().getTokenFileLocEnvName(),
+    env.put(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION,
         tokenPath.toUri().getPath());
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java Thu Nov 27 01:07:32 2014
@@ -34,6 +34,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.http.HtmlQuoting;
 import org.apache.hadoop.mapred.JobConf;
 
 /**
@@ -215,8 +216,7 @@ public class TaskLogProcessor {
             break;
           }
           
-          inputLine =
-              ShimLoader.getHadoopShims().unquoteHtmlChars(inputLine);
+          inputLine = HtmlQuoting.unquoteHtmlChars(inputLine);
           
           if (stackTracePattern.matcher(inputLine).matches() ||
               endStackTracePattern.matcher(inputLine).matches()) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java Thu Nov 27 01:07:32 2014
@@ -59,6 +59,7 @@ import org.apache.hadoop.hive.ql.exec.Ut
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionState;
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
 import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
 import org.apache.hadoop.hive.ql.io.IOPrepareCache;
@@ -85,6 +86,7 @@ import org.apache.hadoop.mapred.JobClien
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Partitioner;
 import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Appender;
 import org.apache.log4j.BasicConfigurator;
 import org.apache.log4j.FileAppender;
@@ -227,7 +229,7 @@ public class ExecDriver extends Task<Map
       return 5;
     }
 
-    ShimLoader.getHadoopShims().prepareJobOutput(job);
+    HiveFileFormatUtils.prepareJobOutput(job);
     //See the javadoc on HiveOutputFormatImpl and HadoopShims.prepareJobOutput()
     job.setOutputFormat(HiveOutputFormatImpl.class);
 
@@ -276,9 +278,6 @@ public class ExecDriver extends Task<Map
         useSpeculativeExecReducers);
 
     String inpFormat = HiveConf.getVar(job, HiveConf.ConfVars.HIVEINPUTFORMAT);
-    if ((inpFormat == null) || (!StringUtils.isNotBlank(inpFormat))) {
-      inpFormat = ShimLoader.getHadoopShims().getInputFormatClassName();
-    }
 
     if (mWork.isUseBucketizedHiveInputFormat()) {
       inpFormat = BucketizedHiveInputFormat.class.getName();
@@ -665,9 +664,8 @@ public class ExecDriver extends Task<Map
       conf.set("tmpfiles", files);
     }
 
-    if(ShimLoader.getHadoopShims().isSecurityEnabled()){
-      String hadoopAuthToken =
-          System.getenv(ShimLoader.getHadoopShims().getTokenFileLocEnvName());
+    if(UserGroupInformation.isSecurityEnabled()){
+      String hadoopAuthToken = UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION;
       if(hadoopAuthToken != null){
         conf.set("mapreduce.job.credentials.binary", hadoopAuthToken);
       }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java Thu Nov 27 01:07:32 2014
@@ -66,7 +66,9 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hive.common.util.StreamPrinter;
 
@@ -237,8 +239,7 @@ public class MapredLocalTask extends Tas
       //Set HADOOP_USER_NAME env variable for child process, so that
       // it also runs with hadoop permissions for the user the job is running as
       // This will be used by hadoop only in unsecure(/non kerberos) mode
-      HadoopShims shim = ShimLoader.getHadoopShims();
-      String endUserName = shim.getShortUserName(shim.getUGIForConf(job));
+      String endUserName = Utils.getUGIForConf(job).getShortUserName();
       LOG.debug("setting HADOOP_USER_NAME\t" + endUserName);
       variables.put("HADOOP_USER_NAME", endUserName);
 
@@ -265,8 +266,8 @@ public class MapredLocalTask extends Tas
       }
 
 
-      if(ShimLoader.getHadoopShims().isSecurityEnabled() &&
-          ShimLoader.getHadoopShims().isLoginKeytabBased()) {
+      if(UserGroupInformation.isSecurityEnabled() &&
+           UserGroupInformation.isLoginKeytabBased()) {
         //If kerberos security is enabled, and HS2 doAs is enabled,
         // then additional params need to be set so that the command is run as
         // intended user

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java Thu Nov 27 01:07:32 2014
@@ -20,7 +20,9 @@ package org.apache.hadoop.hive.ql.exec.t
 import com.google.common.base.Function;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
+
 import javax.security.auth.login.LoginException;
+
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
@@ -54,6 +56,7 @@ import org.apache.hadoop.hive.ql.exec.mr
 import org.apache.hadoop.hive.ql.exec.tez.tools.TezMergedLogicalInput;
 import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
 import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.NullOutputCommitter;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
@@ -72,8 +75,7 @@ import org.apache.hadoop.hive.ql.plan.Te
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.stats.StatsFactory;
 import org.apache.hadoop.hive.ql.stats.StatsPublisher;
-import org.apache.hadoop.hive.shims.HadoopShimsSecure.NullOutputCommitter;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.mapred.FileOutputFormat;
@@ -203,9 +205,6 @@ public class DagUtils {
     Utilities.setInputAttributes(conf, mapWork);
 
     String inpFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVETEZINPUTFORMAT);
-    if ((inpFormat == null) || (!StringUtils.isNotBlank(inpFormat))) {
-      inpFormat = ShimLoader.getHadoopShims().getInputFormatClassName();
-    }
 
     if (mapWork.isUseBucketizedHiveInputFormat()) {
       inpFormat = BucketizedHiveInputFormat.class.getName();
@@ -761,8 +760,8 @@ public class DagUtils {
    */
   @SuppressWarnings("deprecation")
   public Path getDefaultDestDir(Configuration conf) throws LoginException, IOException {
-    UserGroupInformation ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
-    String userName = ShimLoader.getHadoopShims().getShortUserName(ugi);
+    UserGroupInformation ugi = Utils.getUGIForConf(conf);
+    String userName = ugi.getShortUserName();
     String userPathStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_USER_INSTALL_DIR);
     Path userPath = new Path(userPathStr);
     FileSystem fs = userPath.getFileSystem(conf);
@@ -1125,8 +1124,8 @@ public class DagUtils {
     UserGroupInformation ugi;
     String userName = System.getProperty("user.name");
     try {
-      ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
-      userName = ShimLoader.getHadoopShims().getShortUserName(ugi);
+      ugi = Utils.getUGIForConf(conf);
+      userName = ugi.getShortUserName();
     } catch (LoginException e) {
       throw new IOException(e);
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolManager.java Thu Nov 27 01:07:32 2014
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
@@ -212,8 +213,8 @@ public class TezSessionPoolManager {
     }
 
     try {
-      UserGroupInformation ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
-      String userName = ShimLoader.getHadoopShims().getShortUserName(ugi);
+      UserGroupInformation ugi = Utils.getUGIForConf(conf);
+      String userName = ugi.getShortUserName();
       LOG.info("The current user: " + userName + ", session user: " + session.getUser());
       if (userName.equals(session.getUser()) == false) {
         LOG.info("Different users incoming: " + userName + " existing: " + session.getUser());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java Thu Nov 27 01:07:32 2014
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.api.records.LocalResourceType;
@@ -136,9 +137,8 @@ public class TezSessionState {
     this.queueName = conf.get("tez.queue.name");
     this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
 
-    UserGroupInformation ugi;
-    ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
-    user = ShimLoader.getHadoopShims().getShortUserName(ugi);
+    UserGroupInformation ugi = Utils.getUGIForConf(conf);
+    user = ugi.getShortUserName();
     LOG.info("User of session id " + sessionId + " is " + user);
 
     // create the tez tmp dir

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/HookContext.java Thu Nov 27 01:07:32 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.QueryPl
 import org.apache.hadoop.hive.ql.exec.TaskRunner;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
 /**
  * Hook Context keeps all the necessary information for all the hooks.
@@ -61,7 +62,7 @@ public class HookContext {
     completeTaskList = new ArrayList<TaskRunner>();
     inputs = queryPlan.getInputs();
     outputs = queryPlan.getOutputs();
-    ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
+    ugi = Utils.getUGIForConf(conf);
     linfo= null;
     if(SessionState.get() != null){
       linfo = SessionState.get().getLineageState().getLineageInfo();

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java Thu Nov 27 01:07:32 2014
@@ -20,43 +20,12 @@ package org.apache.hadoop.hive.ql.io;
 
 import java.io.DataInput;
 import java.io.DataOutput;
-import java.io.File;
 import java.io.IOException;
-import java.io.Serializable;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
+
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat.HiveInputSplit;
-import org.apache.hadoop.hive.ql.plan.MapredWork;
-import org.apache.hadoop.hive.ql.plan.PartitionDesc;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.hive.shims.HadoopShims.CombineFileInputFormatShim;
-import org.apache.hadoop.hive.shims.HadoopShims.InputSplitShim;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapred.FileInputFormat;
 import org.apache.hadoop.mapred.FileSplit;
-import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobConfigurable;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -101,6 +70,7 @@ public class BucketizedHiveInputSplit ex
     return inputSplits[idx];
   }
 
+  @Override
   public String inputFormatClassName() {
     return inputFormatClassName;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java Thu Nov 27 01:07:32 2014
@@ -41,15 +41,13 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
-import org.apache.hadoop.hive.ql.io.orc.OrcRecordUpdater;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.parse.SplitSample;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.shims.HadoopShims.CombineFileInputFormatShim;
-import org.apache.hadoop.hive.shims.HadoopShims.InputSplitShim;
+import org.apache.hadoop.hive.shims.HadoopShimsSecure.InputSplitShim;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
@@ -61,6 +59,7 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.mapred.lib.CombineFileSplit;
 
 
 /**
@@ -81,21 +80,21 @@ public class CombineHiveInputFormat<K ex
    * from different files. Since, they belong to a single directory, there is a
    * single inputformat for all the chunks.
    */
-  public static class CombineHiveInputSplit implements InputSplitShim {
+  public static class CombineHiveInputSplit extends InputSplitShim {
 
     String inputFormatClassName;
-    InputSplitShim inputSplitShim;
+    CombineFileSplit inputSplitShim;
 
     public CombineHiveInputSplit() throws IOException {
       this(ShimLoader.getHadoopShims().getCombineFileInputFormat()
           .getInputSplitShim());
     }
 
-    public CombineHiveInputSplit(InputSplitShim inputSplitShim) throws IOException {
+    public CombineHiveInputSplit(CombineFileSplit inputSplitShim) throws IOException {
       this(inputSplitShim.getJob(), inputSplitShim);
     }
 
-    public CombineHiveInputSplit(JobConf job, InputSplitShim inputSplitShim)
+    public CombineHiveInputSplit(JobConf job, CombineFileSplit inputSplitShim)
         throws IOException {
       this.inputSplitShim = inputSplitShim;
       if (job != null) {
@@ -114,7 +113,7 @@ public class CombineHiveInputFormat<K ex
       }
     }
 
-    public InputSplitShim getInputSplitShim() {
+    public CombineFileSplit getInputSplitShim() {
       return inputSplitShim;
     }
 
@@ -129,50 +128,60 @@ public class CombineHiveInputFormat<K ex
       this.inputFormatClassName = inputFormatClassName;
     }
 
+    @Override
     public JobConf getJob() {
       return inputSplitShim.getJob();
     }
 
+    @Override
     public long getLength() {
       return inputSplitShim.getLength();
     }
 
     /** Returns an array containing the startoffsets of the files in the split. */
+    @Override
     public long[] getStartOffsets() {
       return inputSplitShim.getStartOffsets();
     }
 
     /** Returns an array containing the lengths of the files in the split. */
+    @Override
     public long[] getLengths() {
       return inputSplitShim.getLengths();
     }
 
     /** Returns the start offset of the i<sup>th</sup> Path. */
+    @Override
     public long getOffset(int i) {
       return inputSplitShim.getOffset(i);
     }
 
     /** Returns the length of the i<sup>th</sup> Path. */
+    @Override
     public long getLength(int i) {
       return inputSplitShim.getLength(i);
     }
 
     /** Returns the number of Paths in the split. */
+    @Override
     public int getNumPaths() {
       return inputSplitShim.getNumPaths();
     }
 
     /** Returns the i<sup>th</sup> Path. */
+    @Override
     public Path getPath(int i) {
       return inputSplitShim.getPath(i);
     }
 
     /** Returns all the Paths in the split. */
+    @Override
     public Path[] getPaths() {
       return inputSplitShim.getPaths();
     }
 
     /** Returns all the Paths where this input-split resides. */
+    @Override
     public String[] getLocations() throws IOException {
       return inputSplitShim.getLocations();
     }
@@ -192,6 +201,7 @@ public class CombineHiveInputFormat<K ex
     /**
      * Writable interface.
      */
+    @Override
     public void readFields(DataInput in) throws IOException {
       inputSplitShim.readFields(in);
       inputFormatClassName = in.readUTF();
@@ -200,6 +210,7 @@ public class CombineHiveInputFormat<K ex
     /**
      * Writable interface.
      */
+    @Override
     public void write(DataOutput out) throws IOException {
       inputSplitShim.write(out);
 
@@ -219,11 +230,6 @@ public class CombineHiveInputFormat<K ex
 
       out.writeUTF(inputFormatClassName);
     }
-
-    @Override
-    public void shrinkSplit(long length) {
-      inputSplitShim.shrinkSplit(length);
-    }
   }
 
   // Splits are not shared across different partitions with different input formats.
@@ -245,9 +251,6 @@ public class CombineHiveInputFormat<K ex
     public boolean equals(Object o) {
       if (o instanceof CombinePathInputFormat) {
         CombinePathInputFormat mObj = (CombinePathInputFormat) o;
-        if (mObj == null) {
-          return false;
-        }
         return (opList.equals(mObj.opList)) &&
             (inputFormatClassName.equals(mObj.inputFormatClassName)) &&
             (deserializerClassName == null ? (mObj.deserializerClassName == null) :
@@ -410,7 +413,7 @@ public class CombineHiveInputFormat<K ex
     }
 
     // Processing directories
-    List<InputSplitShim> iss = new ArrayList<InputSplitShim>();
+    List<CombineFileSplit> iss = new ArrayList<CombineFileSplit>();
     if (!mrwork.isMapperCannotSpanPartns()) {
       //mapper can span partitions
       //combine into as few as one split, subject to the PathFilters set
@@ -434,7 +437,7 @@ public class CombineHiveInputFormat<K ex
       iss = sampleSplits(iss);
     }
 
-    for (InputSplitShim is : iss) {
+    for (CombineFileSplit is : iss) {
       CombineHiveInputSplit csplit = new CombineHiveInputSplit(job, is);
       result.add(csplit);
     }
@@ -450,9 +453,6 @@ public class CombineHiveInputFormat<K ex
   @Override
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
     init(job);
-    Map<String, ArrayList<String>> pathToAliases = mrwork.getPathToAliases();
-    Map<String, Operator<? extends OperatorDesc>> aliasToWork =
-        mrwork.getAliasToWork();
 
     ArrayList<InputSplit> result = new ArrayList<InputSplit>();
 
@@ -469,9 +469,8 @@ public class CombineHiveInputFormat<K ex
               IOPrepareCache.get().allocatePartitionDescMap());
 
       // Use HiveInputFormat if any of the paths is not splittable
-      Class inputFormatClass = part.getInputFileFormatClass();
-      String inputFormatClassName = inputFormatClass.getName();
-      InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
+      Class<? extends InputFormat> inputFormatClass = part.getInputFileFormatClass();
+      InputFormat<WritableComparable, Writable> inputFormat = getInputFormatFromCache(inputFormatClass, job);
       if (inputFormat instanceof AvoidSplitCombination &&
           ((AvoidSplitCombination) inputFormat).shouldSkipCombine(path, job)) {
         if (LOG.isDebugEnabled()) {
@@ -523,7 +522,7 @@ public class CombineHiveInputFormat<K ex
   }
 
   private void processPaths(JobConf job, CombineFileInputFormatShim combine,
-      List<InputSplitShim> iss, Path... path) throws IOException {
+      List<CombineFileSplit> iss, Path... path) throws IOException {
     JobConf currJob = new JobConf(job);
     FileInputFormat.setInputPaths(currJob, path);
     iss.addAll(Arrays.asList(combine.getSplits(currJob, 1)));
@@ -540,16 +539,16 @@ public class CombineHiveInputFormat<K ex
    * @param splits
    * @return the sampled splits
    */
-  private List<InputSplitShim> sampleSplits(List<InputSplitShim> splits) {
+  private List<CombineFileSplit> sampleSplits(List<CombineFileSplit> splits) {
     HashMap<String, SplitSample> nameToSamples = mrwork.getNameToSplitSample();
-    List<InputSplitShim> retLists = new ArrayList<InputSplitShim>();
-    Map<String, ArrayList<InputSplitShim>> aliasToSplitList = new HashMap<String, ArrayList<InputSplitShim>>();
+    List<CombineFileSplit> retLists = new ArrayList<CombineFileSplit>();
+    Map<String, ArrayList<CombineFileSplit>> aliasToSplitList = new HashMap<String, ArrayList<CombineFileSplit>>();
     Map<String, ArrayList<String>> pathToAliases = mrwork.getPathToAliases();
     Map<String, ArrayList<String>> pathToAliasesNoScheme = removeScheme(pathToAliases);
 
     // Populate list of exclusive splits for every sampled alias
     //
-    for (InputSplitShim split : splits) {
+    for (CombineFileSplit split : splits) {
       String alias = null;
       for (Path path : split.getPaths()) {
         boolean schemeless = path.toUri().getScheme() == null;
@@ -571,7 +570,7 @@ public class CombineHiveInputFormat<K ex
         // split exclusively serves alias, which needs to be sampled
         // add it to the split list of the alias.
         if (!aliasToSplitList.containsKey(alias)) {
-          aliasToSplitList.put(alias, new ArrayList<InputSplitShim>());
+          aliasToSplitList.put(alias, new ArrayList<CombineFileSplit>());
         }
         aliasToSplitList.get(alias).add(split);
       } else {
@@ -583,10 +582,10 @@ public class CombineHiveInputFormat<K ex
     // for every sampled alias, we figure out splits to be sampled and add
     // them to return list
     //
-    for (Map.Entry<String, ArrayList<InputSplitShim>> entry: aliasToSplitList.entrySet()) {
-      ArrayList<InputSplitShim> splitList = entry.getValue();
+    for (Map.Entry<String, ArrayList<CombineFileSplit>> entry: aliasToSplitList.entrySet()) {
+      ArrayList<CombineFileSplit> splitList = entry.getValue();
       long totalSize = 0;
-      for (InputSplitShim split : splitList) {
+      for (CombineFileSplit split : splitList) {
         totalSize += split.getLength();
       }
 
@@ -596,13 +595,13 @@ public class CombineHiveInputFormat<K ex
       int startIndex = splitSample.getSeedNum() % splitList.size();
       long size = 0;
       for (int i = 0; i < splitList.size(); i++) {
-        InputSplitShim split = splitList.get((startIndex + i) % splitList.size());
+        CombineFileSplit split = splitList.get((startIndex + i) % splitList.size());
         retLists.add(split);
         long splitgLength = split.getLength();
         if (size + splitgLength >= targetSize) {
           LOG.info("Sample alias " + entry.getValue() + " using " + (i + 1) + "splits");
           if (size + splitgLength > targetSize) {
-            split.shrinkSplit(targetSize - size);
+            ((InputSplitShim)split).shrinkSplit(targetSize - size);
           }
           break;
         }
@@ -674,6 +673,7 @@ public class CombineHiveInputFormat<K ex
 
     // returns true if the specified path matches the prefix stored
     // in this TestFilter.
+    @Override
     public boolean accept(Path path) {
       boolean find = false;
       while (path != null && !find) {