You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/11/27 02:07:35 UTC
svn commit: r1641980 [2/4] - in /hive/trunk: ./
beeline/src/test/org/apache/hive/beeline/
common/src/java/org/apache/hadoop/hive/common/
common/src/java/org/apache/hadoop/hive/conf/
hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/ hcatalog/...
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java Thu Nov 27 01:07:32 2014
@@ -23,7 +23,6 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.CombineHiveInputSplit;
-import org.apache.hadoop.hive.shims.HadoopShims.InputSplitShim;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileSplit;
@@ -31,6 +30,7 @@ import org.apache.hadoop.mapred.InputFor
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.lib.CombineFileSplit;
/**
* CombineHiveRecordReader.
@@ -45,7 +45,7 @@ public class CombineHiveRecordReader<K e
Reporter reporter, Integer partition) throws IOException {
super((JobConf)conf);
CombineHiveInputSplit hsplit = new CombineHiveInputSplit(jobConf,
- (InputSplitShim) split);
+ (CombineFileSplit) split);
String inputFormatClassName = hsplit.inputFormatClassName();
Class inputFormatClass = null;
try {
@@ -72,14 +72,17 @@ public class CombineHiveRecordReader<K e
recordReader.close();
}
+ @Override
public K createKey() {
return (K) recordReader.createKey();
}
+ @Override
public V createValue() {
return (V) recordReader.createValue();
}
+ @Override
public long getPos() throws IOException {
return recordReader.getPos();
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java Thu Nov 27 01:07:32 2014
@@ -42,16 +42,20 @@ import org.apache.hadoop.hive.ql.plan.Op
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobContext;
+import org.apache.hadoop.mapred.OutputCommitter;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.mapred.TaskAttemptContext;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.ReflectionUtils;
@@ -506,4 +510,42 @@ public final class HiveFileFormatUtils {
private HiveFileFormatUtils() {
// prevent instantiation
}
+
+ public static class NullOutputCommitter extends OutputCommitter {
+ @Override
+ public void setupJob(JobContext jobContext) { }
+ @Override
+ public void cleanupJob(JobContext jobContext) { }
+
+ @Override
+ public void setupTask(TaskAttemptContext taskContext) { }
+ @Override
+ public boolean needsTaskCommit(TaskAttemptContext taskContext) {
+ return false;
+ }
+ @Override
+ public void commitTask(TaskAttemptContext taskContext) { }
+ @Override
+ public void abortTask(TaskAttemptContext taskContext) { }
+ }
+
+ /**
+ * Hive uses side effect files exclusively for it's output. It also manages
+ * the setup/cleanup/commit of output from the hive client. As a result it does
+ * not need support for the same inside the MR framework
+ *
+ * This routine sets the appropriate options related to bypass setup/cleanup/commit
+ * support in the MR framework, but does not set the OutputFormat class.
+ */
+ public static void prepareJobOutput(JobConf conf) {
+ conf.setOutputCommitter(NullOutputCommitter.class);
+
+ // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463)
+ // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+ conf.setBoolean(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDSETUPCLEANUPNEEDED"), false);
+
+ // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206)
+ // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+ conf.setBoolean(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDTASKCLEANUPNEEDED"), false);
+ }
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/merge/MergeFileTask.java Thu Nov 27 01:07:32 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.ql.exec.Ut
import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook;
import org.apache.hadoop.hive.ql.exec.mr.Throttle;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
@@ -86,7 +87,7 @@ public class MergeFileTask extends Task<
ctxCreated = true;
}
- ShimLoader.getHadoopShims().prepareJobOutput(job);
+ HiveFileFormatUtils.prepareJobOutput(job);
job.setInputFormat(work.getInputformatClass());
job.setOutputFormat(HiveOutputFormatImpl.class);
job.setMapperClass(MergeFileMapper.class);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java Thu Nov 27 01:07:32 2014
@@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.exec.mr
import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook;
import org.apache.hadoop.hive.ql.exec.mr.Throttle;
import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.MapredWork;
@@ -102,7 +103,7 @@ public class PartialScanTask extends Tas
HiveConf.setVar(job, HiveConf.ConfVars.HIVEINPUTFORMAT,
CombineHiveInputFormat.class.getName());
success = true;
- ShimLoader.getHadoopShims().prepareJobOutput(job);
+ HiveFileFormatUtils.prepareJobOutput(job);
job.setOutputFormat(HiveOutputFormatImpl.class);
job.setMapperClass(work.getMapperClass());
@@ -140,9 +141,6 @@ public class PartialScanTask extends Tas
}
String inpFormat = HiveConf.getVar(job, HiveConf.ConfVars.HIVEINPUTFORMAT);
- if ((inpFormat == null) || (!StringUtils.isNotBlank(inpFormat))) {
- inpFormat = ShimLoader.getHadoopShims().getInputFormatClassName();
- }
LOG.info("Using " + inpFormat);
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java Thu Nov 27 01:07:32 2014
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.exec.mr
import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook;
import org.apache.hadoop.hive.ql.exec.mr.Throttle;
import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
+import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
@@ -79,7 +80,7 @@ public class ColumnTruncateTask extends
HiveConf.setVar(job, HiveConf.ConfVars.HIVEINPUTFORMAT,
BucketizedHiveInputFormat.class.getName());
success = true;
- ShimLoader.getHadoopShims().prepareJobOutput(job);
+ HiveFileFormatUtils.prepareJobOutput(job);
job.setOutputFormat(HiveOutputFormatImpl.class);
job.setMapperClass(work.getMapperClass());
@@ -117,10 +118,6 @@ public class ColumnTruncateTask extends
}
String inpFormat = HiveConf.getVar(job, HiveConf.ConfVars.HIVEINPUTFORMAT);
- if ((inpFormat == null) || (!StringUtils.isNotBlank(inpFormat))) {
- inpFormat = ShimLoader.getHadoopShims().getInputFormatClassName();
- }
-
LOG.info("Using " + inpFormat);
try {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java Thu Nov 27 01:07:32 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
public class HadoopDefaultAuthenticator implements HiveAuthenticationProvider {
@@ -49,7 +50,7 @@ public class HadoopDefaultAuthenticator
this.conf = conf;
UserGroupInformation ugi = null;
try {
- ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
+ ugi = Utils.getUGIForConf(conf);
} catch (Exception e) {
throw new RuntimeException(e);
}
@@ -59,7 +60,7 @@ public class HadoopDefaultAuthenticator
"Can not initialize HadoopDefaultAuthenticator.");
}
- this.userName = ShimLoader.getHadoopShims().getShortUserName(ugi);
+ this.userName = ugi.getShortUserName();
if (ugi.getGroupNames() != null) {
this.groupNames = Arrays.asList(ugi.getGroupNames());
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java Thu Nov 27 01:07:32 2014
@@ -51,7 +51,7 @@ public class ProxyUserAuthenticator exte
// If we're here, proxy user is set.
try {
- ugi = ShimLoader.getHadoopShims().createRemoteUser(proxyUser,null);
+ ugi = UserGroupInformation.createRemoteUser(proxyUser);
} catch (Exception e) {
throw new RuntimeException(e);
}
@@ -61,7 +61,7 @@ public class ProxyUserAuthenticator exte
"Can not initialize ProxyUserAuthenticator for user ["+proxyUser+"]");
}
- this.userName = ShimLoader.getHadoopShims().getShortUserName(ugi);
+ this.userName = ugi.getShortUserName();
if (ugi.getGroupNames() != null) {
this.groupNames = Arrays.asList(ugi.getGroupNames());
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Thu Nov 27 01:07:32 2014
@@ -65,6 +65,7 @@ import org.apache.hadoop.hive.ql.securit
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl;
import org.apache.hadoop.hive.ql.util.DosToUnix;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
@@ -233,7 +234,7 @@ public class SessionState {
* Whether we are in auto-commit state or not. Currently we are always in auto-commit,
* so there are not setters for this yet.
*/
- private boolean txnAutoCommit = true;
+ private final boolean txnAutoCommit = true;
/**
* store the jars loaded last time
@@ -423,7 +424,7 @@ public class SessionState {
// shared with SessionState, other parts of the code might update the config, but
// Hive.get(HiveConf) would not recognize the case when it needs refreshing
Hive.get(new HiveConf(startSs.conf)).getMSC();
- UserGroupInformation sessionUGI = ShimLoader.getHadoopShims().getUGIForConf(startSs.conf);
+ UserGroupInformation sessionUGI = Utils.getUGIForConf(startSs.conf);
FileSystem.get(startSs.conf);
// Create scratch dirs for this session
@@ -651,7 +652,7 @@ public class SessionState {
authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(),
conf, authenticator, authzContextBuilder.build());
- authorizerV2.applyAuthorizationConfigPolicy(conf);
+ authorizerV2.applyAuthorizationConfigPolicy(conf);
}
// create the create table grants with new config
createTableGrants = CreateTableAutomaticGrant.create(conf);
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java Thu Nov 27 01:07:32 2014
@@ -24,7 +24,7 @@ import java.net.URISyntaxException;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.tez.dag.api.TezException;
@@ -60,9 +60,8 @@ public class SampleTezSessionState exten
public void open(HiveConf conf) throws IOException, LoginException, URISyntaxException,
TezException {
this.hiveConf = conf;
- UserGroupInformation ugi;
- ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
- user = ShimLoader.getHadoopShims().getShortUserName(ugi);
+ UserGroupInformation ugi = Utils.getUGIForConf(conf);
+ user = ugi.getShortUserName();
this.doAsEnabled = conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
}
Modified: hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
create table tst1(key string, value string) partitioned by (ds string) clustered by (key) into 10 buckets;
alter table tst1 clustered by (key) into 8 buckets;
Modified: hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- Tests that when overwriting a partition in a table after altering the bucketing/sorting metadata
-- the partition metadata is updated as well.
Modified: hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2_h23.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2_h23.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2_h23.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table2_h23.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- Tests that when overwriting a partition in a table after altering the bucketing/sorting metadata
-- the partition metadata is updated as well.
Modified: hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table_h23.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table_h23.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table_h23.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/alter_numbuckets_partitioned_table_h23.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
create table tst1(key string, value string) partitioned by (ds string) clustered by (key) into 10 buckets;
alter table tst1 clustered by (key) into 8 buckets;
Modified: hive/trunk/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/archive_excludeHadoop20.q Thu Nov 27 01:07:32 2014
@@ -3,8 +3,6 @@ set hive.enforce.bucketing = true;
set hive.exec.submitviachild=true;
set hive.exec.submit.local.task.via.child=true;
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
-
drop table tstsrc;
drop table tstsrcpart;
Modified: hive/trunk/ql/src/test/queries/clientpositive/auto_join14.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/auto_join14.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/auto_join14.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/auto_join14.q Thu Nov 27 01:07:32 2014
@@ -1,7 +1,7 @@
set hive.auto.convert.join = true;
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE;
Modified: hive/trunk/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/auto_join14_hadoop20.q Thu Nov 27 01:07:32 2014
@@ -1,7 +1,7 @@
set hive.auto.convert.join = true;
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE;
Modified: hive/trunk/ql/src/test/queries/clientpositive/combine2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/combine2.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/combine2.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/combine2.q Thu Nov 27 01:07:32 2014
@@ -17,7 +17,7 @@ set hive.merge.smallfiles.avgsize=0;
create table combine2(key string) partitioned by (value string);
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
-- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0
-- in an attempt to force the generation of multiple splits and multiple output files.
-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
Modified: hive/trunk/ql/src/test/queries/clientpositive/combine2_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/combine2_hadoop20.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/combine2_hadoop20.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/combine2_hadoop20.q Thu Nov 27 01:07:32 2014
@@ -17,7 +17,7 @@ set hive.merge.smallfiles.avgsize=0;
create table combine2(key string) partitioned by (value string);
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0
-- in an attempt to force the generation of multiple splits and multiple output files.
-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
Modified: hive/trunk/ql/src/test/queries/clientpositive/combine2_win.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/combine2_win.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/combine2_win.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/combine2_win.q Thu Nov 27 01:07:32 2014
@@ -11,7 +11,7 @@ set hive.merge.smallfiles.avgsize=0;
-- INCLUDE_OS_WINDOWS
-- included only on windows because of difference in file name encoding logic
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
create table combine2(key string) partitioned by (value string);
Modified: hive/trunk/ql/src/test/queries/clientpositive/ctas.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ctas.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ctas.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/ctas.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table nzhang_Tmp(a int, b string);
select * from nzhang_Tmp;
Modified: hive/trunk/ql/src/test/queries/clientpositive/ctas_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ctas_hadoop20.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ctas_hadoop20.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/ctas_hadoop20.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
create table nzhang_Tmp(a int, b string);
select * from nzhang_Tmp;
Modified: hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1.q Thu Nov 27 01:07:32 2014
@@ -3,7 +3,7 @@ set hive.enforce.sorting = true;
set hive.exec.reducers.max = 10;
set hive.map.groupby.sorted=true;
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- SORT_QUERY_RESULTS
CREATE TABLE T1(key STRING, val STRING)
Modified: hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1_23.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1_23.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1_23.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_1_23.q Thu Nov 27 01:07:32 2014
@@ -3,7 +3,7 @@ set hive.enforce.sorting = true;
set hive.exec.reducers.max = 10;
set hive.map.groupby.sorted=true;
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- SORT_QUERY_RESULTS
CREATE TABLE T1(key STRING, val STRING)
Modified: hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1.q Thu Nov 27 01:07:32 2014
@@ -4,7 +4,7 @@ set hive.exec.reducers.max = 10;
set hive.map.groupby.sorted=true;
set hive.groupby.skewindata=true;
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- SORT_QUERY_RESULTS
CREATE TABLE T1(key STRING, val STRING)
Modified: hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1_23.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1_23.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1_23.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/groupby_sort_skew_1_23.q Thu Nov 27 01:07:32 2014
@@ -4,7 +4,7 @@ set hive.exec.reducers.max = 10;
set hive.map.groupby.sorted=true;
set hive.groupby.skewindata=true;
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- SORT_QUERY_RESULTS
CREATE TABLE T1(key STRING, val STRING)
Modified: hive/trunk/ql/src/test/queries/clientpositive/input12.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/input12.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/input12.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/input12.q Thu Nov 27 01:07:32 2014
@@ -2,7 +2,7 @@ set mapreduce.framework.name=yarn;
set mapreduce.jobtracker.address=localhost:58;
set hive.exec.mode.local.auto=true;
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE;
Modified: hive/trunk/ql/src/test/queries/clientpositive/input12_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/input12_hadoop20.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/input12_hadoop20.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/input12_hadoop20.q Thu Nov 27 01:07:32 2014
@@ -1,7 +1,7 @@
set mapred.job.tracker=localhost:58;
set hive.exec.mode.local.auto=true;
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
CREATE TABLE dest2(key INT, value STRING) STORED AS TEXTFILE;
Modified: hive/trunk/ql/src/test/queries/clientpositive/input39.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/input39.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/input39.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/input39.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table t1(key string, value string) partitioned by (ds string);
Modified: hive/trunk/ql/src/test/queries/clientpositive/join14.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/join14.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/join14.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/join14.q Thu Nov 27 01:07:32 2014
@@ -1,5 +1,5 @@
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
-- SORT_QUERY_RESULTS
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE;
Modified: hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/loadpart_err.q Thu Nov 27 01:07:32 2014
@@ -2,7 +2,7 @@ set hive.cli.errors.ignore=true;
ADD FILE ../../data/scripts/error_script;
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.17, 0.18, 0.19, 0.20, 0.20S, 0.23)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S, 0.23)
-- (this test is flaky so it is currently disabled for all Hadoop versions)
CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING);
Modified: hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q Thu Nov 27 01:07:32 2014
@@ -8,7 +8,7 @@ set mapred.min.split.size.per.rack=300;
set hive.exec.mode.local.auto=true;
set hive.merge.smallfiles.avgsize=1;
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
-- create file inputs
create table sih_i_part (key int, value string) partitioned by (p string);
Modified: hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook_hadoop20.q Thu Nov 27 01:07:32 2014
@@ -8,7 +8,7 @@ set mapred.min.split.size.per.rack=300;
set hive.exec.mode.local.auto=true;
set hive.merge.smallfiles.avgsize=1;
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
-- This test sets mapred.max.split.size=300 and hive.merge.smallfiles.avgsize=1
-- in an attempt to force the generation of multiple splits and multiple output files.
-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
Modified: hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/stats_partscan_1.q Thu Nov 27 01:07:32 2014
@@ -7,7 +7,7 @@ set mapred.min.split.size.per.node=256;
set mapred.min.split.size.per.rack=256;
set mapred.max.split.size=256;
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- This test uses mapred.max.split.size/mapred.max.split.size for controlling
-- number of input splits, which is not effective in hive 0.20.
-- stats_partscan_1_23.q is the same test with this but has different result.
Modified: hive/trunk/ql/src/test/queries/clientpositive/uber_reduce.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/uber_reduce.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/uber_reduce.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/uber_reduce.q Thu Nov 27 01:07:32 2014
@@ -3,7 +3,7 @@ SET mapreduce.job.ubertask.maxreduces=1;
SET mapred.reduce.tasks=1;
-- Uberized mode is a YARN option, ignore this test for non-YARN Hadoop versions
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
CREATE TABLE T1(key STRING, val STRING);
LOAD DATA LOCAL INPATH '../../data/files/T1.txt' INTO TABLE T1;
Modified: hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/udaf_percentile_approx_20.q Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+-- INCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE bucket (key double, value string) CLUSTERED BY (key) SORTED BY (key DESC) INTO 4 BUCKETS STORED AS TEXTFILE;
load data local inpath '../../data/files/srcsortbucket1outof4.txt' INTO TABLE bucket;
Modified: hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out Thu Nov 27 01:07:32 2014
@@ -1,4 +1,4 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- Tests that when overwriting a partition in a table after altering the bucketing/sorting metadata
-- the partition metadata is updated as well.
@@ -6,7 +6,7 @@ CREATE TABLE tst1(key STRING, value STRI
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@tst1
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
-- Tests that when overwriting a partition in a table after altering the bucketing/sorting metadata
-- the partition metadata is updated as well.
Modified: hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out Thu Nov 27 01:07:32 2014
@@ -1,9 +1,9 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
create table tst1(key string, value string) partitioned by (ds string) clustered by (key) into 10 buckets
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@tst1
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
create table tst1(key string, value string) partitioned by (ds string) clustered by (key) into 10 buckets
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
Modified: hive/trunk/ql/src/test/results/clientpositive/archive_excludeHadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/archive_excludeHadoop20.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/archive_excludeHadoop20.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/archive_excludeHadoop20.q.out Thu Nov 27 01:07:32 2014
@@ -1,10 +1,6 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
-
-drop table tstsrc
+PREHOOK: query: drop table tstsrc
PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20)
-
-drop table tstsrc
+POSTHOOK: query: drop table tstsrc
POSTHOOK: type: DROPTABLE
PREHOOK: query: drop table tstsrcpart
PREHOOK: type: DROPTABLE
Modified: hive/trunk/ql/src/test/results/clientpositive/auto_join14.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/auto_join14.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/auto_join14.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/auto_join14.q.out Thu Nov 27 01:07:32 2014
@@ -1,10 +1,10 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@dest1
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
Modified: hive/trunk/ql/src/test/results/clientpositive/combine2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/combine2.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/combine2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/combine2.q.out Thu Nov 27 01:07:32 2014
@@ -22,7 +22,7 @@ create table combine2(key string) partit
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@combine2
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
-- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0
-- in an attempt to force the generation of multiple splits and multiple output files.
-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
@@ -40,7 +40,7 @@ select * from (
PREHOOK: type: QUERY
PREHOOK: Input: default@src
PREHOOK: Output: default@combine2
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
-- This test sets mapred.max.split.size=256 and hive.merge.smallfiles.avgsize=0
-- in an attempt to force the generation of multiple splits and multiple output files.
-- However, Hadoop 0.20 is incapable of generating splits smaller than the block size
Modified: hive/trunk/ql/src/test/results/clientpositive/ctas.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ctas.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ctas.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/ctas.q.out Thu Nov 27 01:07:32 2014
@@ -1,10 +1,10 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table nzhang_Tmp(a int, b string)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@nzhang_Tmp
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table nzhang_Tmp(a int, b string)
POSTHOOK: type: CREATETABLE
Modified: hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out Thu Nov 27 01:07:32 2014 differ
Modified: hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
Files hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out (original) and hive/trunk/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out Thu Nov 27 01:07:32 2014 differ
Modified: hive/trunk/ql/src/test/results/clientpositive/input12.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input12.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input12.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/input12.q.out Thu Nov 27 01:07:32 2014
@@ -1,10 +1,10 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@dest1
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
Modified: hive/trunk/ql/src/test/results/clientpositive/input39.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/input39.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/input39.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/input39.q.out Thu Nov 27 01:07:32 2014
@@ -1,11 +1,11 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table t1(key string, value string) partitioned by (ds string)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@t1
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table t1(key string, value string) partitioned by (ds string)
Modified: hive/trunk/ql/src/test/results/clientpositive/join14.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join14.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/join14.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/join14.q.out Thu Nov 27 01:07:32 2014
@@ -1,12 +1,12 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
--- SORT_QUERY_RESULTS
+PREHOOK: query: -- SORT_QUERY_RESULTS
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@dest1
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
--- SORT_QUERY_RESULTS
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
CREATE TABLE dest1(c1 INT, c2 STRING) STORED AS TEXTFILE
POSTHOOK: type: CREATETABLE
Modified: hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out Thu Nov 27 01:07:32 2014
@@ -1,11 +1,11 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
-- create file inputs
create table sih_i_part (key int, value string) partitioned by (p string)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@sih_i_part
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
-- create file inputs
create table sih_i_part (key int, value string) partitioned by (p string)
Modified: hive/trunk/ql/src/test/results/clientpositive/tez/ctas.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/tez/ctas.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/tez/ctas.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/tez/ctas.q.out Thu Nov 27 01:07:32 2014
@@ -1,10 +1,10 @@
-PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table nzhang_Tmp(a int, b string)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@nzhang_Tmp
-POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS( 0.20S)
create table nzhang_Tmp(a int, b string)
POSTHOOK: type: CREATETABLE
Modified: hive/trunk/ql/src/test/results/clientpositive/uber_reduce.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/uber_reduce.q.out?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/uber_reduce.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/uber_reduce.q.out Thu Nov 27 01:07:32 2014
@@ -1,12 +1,12 @@
PREHOOK: query: -- Uberized mode is a YARN option, ignore this test for non-YARN Hadoop versions
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
CREATE TABLE T1(key STRING, val STRING)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@T1
POSTHOOK: query: -- Uberized mode is a YARN option, ignore this test for non-YARN Hadoop versions
--- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20,0.20S)
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20S)
CREATE TABLE T1(key STRING, val STRING)
POSTHOOK: type: CREATETABLE
Modified: hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java Thu Nov 27 01:07:32 2014
@@ -37,7 +37,9 @@ import org.apache.hadoop.hive.shims.Hado
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
+import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.thrift.TProcessorFactory;
@@ -100,8 +102,7 @@ public class HiveAuthFactory {
if (authTypeStr == null) {
authTypeStr = AuthTypes.NONE.getAuthName();
}
- if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())
- && ShimLoader.getHadoopShims().isSecureShimImpl()) {
+ if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
saslServer = ShimLoader.getHadoopThriftAuthBridge()
.createServer(conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
@@ -180,7 +181,7 @@ public class HiveAuthFactory {
if (principal.isEmpty() || keyTabFile.isEmpty()) {
throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
} else {
- ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
+ UserGroupInformation.loginUserFromKeytab(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
}
}
@@ -192,7 +193,7 @@ public class HiveAuthFactory {
if (principal.isEmpty() || keyTabFile.isEmpty()) {
throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
} else {
- return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(principal, keyTabFile);
+ return UserGroupInformation.loginUserFromKeytabAndReturnUGI(SecurityUtil.getServerPrincipal(principal, "0.0.0.0"), keyTabFile);
}
}
@@ -328,16 +329,17 @@ public class HiveAuthFactory {
HiveConf hiveConf) throws HiveSQLException {
try {
UserGroupInformation sessionUgi;
- if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+ if (UserGroupInformation.isSecurityEnabled()) {
KerberosNameShim kerbName = ShimLoader.getHadoopShims().getKerberosNameShim(realUser);
- String shortPrincipalName = kerbName.getServiceName();
- sessionUgi = ShimLoader.getHadoopShims().createProxyUser(shortPrincipalName);
+ sessionUgi = UserGroupInformation.createProxyUser(
+ kerbName.getServiceName(), UserGroupInformation.getLoginUser());
} else {
- sessionUgi = ShimLoader.getHadoopShims().createRemoteUser(realUser, null);
+ sessionUgi = UserGroupInformation.createRemoteUser(realUser);
}
if (!proxyUser.equalsIgnoreCase(realUser)) {
- ShimLoader.getHadoopShims().
- authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
+ ProxyUsers.refreshSuperUserGroupsConfiguration(hiveConf);
+ ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, sessionUgi),
+ ipAddress, hiveConf);
}
} catch (IOException e) {
throw new HiveSQLException(
Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java Thu Nov 27 01:07:32 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.exec.Fu
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.CompositeService;
import org.apache.hive.service.ServiceException;
@@ -83,10 +84,10 @@ public class CLIService extends Composit
sessionManager = new SessionManager(hiveServer2);
addService(sessionManager);
// If the hadoop cluster is secure, do a kerberos login for the service from the keytab
- if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+ if (UserGroupInformation.isSecurityEnabled()) {
try {
HiveAuthFactory.loginFromKeytab(hiveConf);
- this.serviceUGI = ShimLoader.getHadoopShims().getUGIForConf(hiveConf);
+ this.serviceUGI = Utils.getUGIForConf(hiveConf);
} catch (IOException e) {
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
} catch (LoginException e) {
Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java Thu Nov 27 01:07:32 2014
@@ -51,6 +51,7 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.cli.FetchOrientation;
@@ -205,7 +206,7 @@ public class SQLOperation extends Execut
};
try {
- ShimLoader.getHadoopShims().doAs(currentUGI, doAsAction);
+ currentUGI.doAs(doAsAction);
} catch (Exception e) {
setOperationException(new HiveSQLException(e));
LOG.error("Error running hive query as user : " + currentUGI.getShortUserName(), e);
@@ -245,7 +246,7 @@ public class SQLOperation extends Execut
*/
private UserGroupInformation getCurrentUGI(HiveConf opConfig) throws HiveSQLException {
try {
- return ShimLoader.getHadoopShims().getUGIForConf(opConfig);
+ return Utils.getUGIForConf(opConfig);
} catch (Exception e) {
throw new HiveSQLException("Unable to get current user", e);
}
Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImplwithUGI.java Thu Nov 27 01:07:32 2014
@@ -20,10 +20,14 @@ package org.apache.hive.service.cli.sess
import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.HiveSQLException;
@@ -41,6 +45,7 @@ public class HiveSessionImplwithUGI exte
private String delegationTokenStr = null;
private Hive sessionHive = null;
private HiveSession proxySession = null;
+ static final Log LOG = LogFactory.getLog(HiveSessionImplwithUGI.class);
public HiveSessionImplwithUGI(TProtocolVersion protocol, String username, String password,
HiveConf hiveConf, String ipAddress, String delegationToken) throws HiveSQLException {
@@ -62,14 +67,15 @@ public class HiveSessionImplwithUGI exte
if (owner == null) {
throw new HiveSQLException("No username provided for impersonation");
}
- if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+ if (UserGroupInformation.isSecurityEnabled()) {
try {
- sessionUgi = ShimLoader.getHadoopShims().createProxyUser(owner);
+ sessionUgi = UserGroupInformation.createProxyUser(
+ owner, UserGroupInformation.getLoginUser());
} catch (IOException e) {
throw new HiveSQLException("Couldn't setup proxy user", e);
}
} else {
- sessionUgi = ShimLoader.getHadoopShims().createRemoteUser(owner, null);
+ sessionUgi = UserGroupInformation.createRemoteUser(owner);
}
}
@@ -98,8 +104,10 @@ public class HiveSessionImplwithUGI exte
public void close() throws HiveSQLException {
try {
acquire(true);
- ShimLoader.getHadoopShims().closeAllForUGI(sessionUgi);
+ FileSystem.closeAllForUGI(sessionUgi);
cancelDelegationToken();
+ } catch (IOException ioe) {
+ LOG.error("Could not clean up file-system handles for UGI: " + sessionUgi, ioe);
} finally {
release(true);
super.close();
@@ -118,7 +126,7 @@ public class HiveSessionImplwithUGI exte
if (delegationTokenStr != null) {
getHiveConf().set("hive.metastore.token.signature", HS2TOKEN);
try {
- ShimLoader.getHadoopShims().setTokenStr(sessionUgi, delegationTokenStr, HS2TOKEN);
+ Utils.setTokenStr(sessionUgi, delegationTokenStr, HS2TOKEN);
} catch (IOException e) {
throw new HiveSQLException("Couldn't setup delegation token in the ugi", e);
}
Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionProxy.java Thu Nov 27 01:07:32 2014
@@ -30,7 +30,6 @@ import java.lang.reflect.UndeclaredThrow
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
-import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.cli.HiveSQLException;
@@ -57,7 +56,7 @@ public class HiveSessionProxy implements
if (method.getDeclaringClass() == HiveSessionBase.class) {
return invoke(method, args);
}
- return ShimLoader.getHadoopShims().doAs(ugi,
+ return ugi.doAs(
new PrivilegedExceptionAction<Object> () {
@Override
public Object run() throws HiveSQLException {
Modified: hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/server/HiveServer2.java Thu Nov 27 01:07:32 2014
@@ -43,6 +43,8 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
import org.apache.hadoop.hive.ql.util.ZooKeeperHiveHelper;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.common.util.HiveStringUtils;
import org.apache.hive.common.util.HiveVersionInfo;
import org.apache.hive.service.CompositeService;
@@ -117,7 +119,7 @@ public class HiveServer2 extends Composi
@Override
public List<ACL> getDefaultAcl() {
- if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+ if (UserGroupInformation.isSecurityEnabled()) {
// Read all to the world
nodeAcls.addAll(Ids.READ_ACL_UNSAFE);
// Create/Delete/Write/Admin to the authenticated user
@@ -197,7 +199,7 @@ public class HiveServer2 extends Composi
* @throws Exception
*/
private void setUpZooKeeperAuth(HiveConf hiveConf) throws Exception {
- if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
+ if (UserGroupInformation.isSecurityEnabled()) {
String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
if (principal.isEmpty()) {
throw new IOException("HiveServer2 Kerberos principal is empty");
@@ -207,7 +209,7 @@ public class HiveServer2 extends Composi
throw new IOException("HiveServer2 Kerberos keytab is empty");
}
// Install the JAAS Configuration for the runtime
- ShimLoader.getHadoopShims().setZookeeperClientKerberosJaasConfig(principal, keyTabFile);
+ Utils.setZookeeperClientKerberosJaasConfig(principal, keyTabFile);
}
}
Modified: hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Thu Nov 27 01:07:32 2014
@@ -84,7 +84,6 @@ public class Hadoop20SShims extends Hado
@Override
public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
- JobTrackerState state;
switch (clusterStatus.getJobTrackerState()) {
case INITIALIZING:
return JobTrackerState.INITIALIZING;
Modified: hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Thu Nov 27 01:07:32 2014
@@ -435,7 +435,7 @@ public class Hadoop23Shims extends Hadoo
Reporter.class);
construct.setAccessible(true);
newContext = (org.apache.hadoop.mapred.TaskAttemptContext) construct.newInstance(
- new JobConf(conf), taskId, (Reporter) progressable);
+ new JobConf(conf), taskId, progressable);
} catch (Exception e) {
throw new RuntimeException(e);
}
@@ -453,7 +453,7 @@ public class Hadoop23Shims extends Hadoo
public org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapred.JobConf conf,
org.apache.hadoop.mapreduce.JobID jobId, Progressable progressable) {
return new org.apache.hadoop.mapred.JobContextImpl(
- new JobConf(conf), jobId, (org.apache.hadoop.mapred.Reporter) progressable);
+ new JobConf(conf), jobId, progressable);
}
@Override
@@ -609,8 +609,8 @@ public class Hadoop23Shims extends Hadoo
}
public class Hadoop23FileStatus implements HdfsFileStatus {
- private FileStatus fileStatus;
- private AclStatus aclStatus;
+ private final FileStatus fileStatus;
+ private final AclStatus aclStatus;
public Hadoop23FileStatus(FileStatus fileStatus, AclStatus aclStatus) {
this.fileStatus = fileStatus;
this.aclStatus = aclStatus;
@@ -678,7 +678,7 @@ public class Hadoop23Shims extends Hadoo
public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
throws FileNotFoundException, IOException {
return new RemoteIterator<LocatedFileStatus>() {
- private RemoteIterator<LocatedFileStatus> stats =
+ private final RemoteIterator<LocatedFileStatus> stats =
ProxyFileSystem23.super.listLocatedStatus(
ProxyFileSystem23.super.swizzleParamPath(f));
@@ -711,7 +711,6 @@ public class Hadoop23Shims extends Hadoo
accessMethod.invoke(fs, underlyingFsPath, action);
} else {
// If the FS has no access() method, we can try DefaultFileAccess ..
- UserGroupInformation ugi = getUGIForConf(getConf());
DefaultFileAccess.checkFileAccess(fs, underlyingFsStatus, action);
}
} catch (AccessControlException err) {
@@ -900,28 +899,33 @@ public class Hadoop23Shims extends Hadoo
*/
public class KerberosNameShim implements HadoopShimsSecure.KerberosNameShim {
- private KerberosName kerberosName;
+ private final KerberosName kerberosName;
public KerberosNameShim(String name) {
kerberosName = new KerberosName(name);
}
+ @Override
public String getDefaultRealm() {
return kerberosName.getDefaultRealm();
}
+ @Override
public String getServiceName() {
return kerberosName.getServiceName();
}
+ @Override
public String getHostName() {
return kerberosName.getHostName();
}
+ @Override
public String getRealm() {
return kerberosName.getRealm();
}
+ @Override
public String getShortName() throws IOException {
return kerberosName.getShortName();
}
Modified: hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java (original)
+++ hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge23.java Thu Nov 27 01:07:32 2014
@@ -31,7 +31,7 @@ import org.apache.hadoop.security.SaslRp
*
* This is a 0.23/2.x specific implementation
*/
-public class HadoopThriftAuthBridge23 extends HadoopThriftAuthBridge20S {
+public class HadoopThriftAuthBridge23 extends HadoopThriftAuthBridge {
private static Field SASL_PROPS_FIELD;
private static Class<?> SASL_PROPERTIES_RESOLVER_CLASS;
Modified: hive/trunk/shims/aggregator/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/shims/aggregator/pom.xml?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/shims/aggregator/pom.xml (original)
+++ hive/trunk/shims/aggregator/pom.xml Thu Nov 27 01:07:32 2014
@@ -41,12 +41,6 @@
</dependency>
<dependency>
<groupId>org.apache.hive.shims</groupId>
- <artifactId>hive-shims-0.20</artifactId>
- <version>${project.version}</version>
- <scope>runtime</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hive.shims</groupId>
<artifactId>hive-shims-common-secure</artifactId>
<version>${project.version}</version>
<scope>compile</scope>
Modified: hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1641980&r1=1641979&r2=1641980&view=diff
==============================================================================
--- hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Thu Nov 27 01:07:32 2014
@@ -19,25 +19,17 @@ package org.apache.hadoop.hive.shims;
import java.io.DataInput;
import java.io.DataOutput;
-import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.URI;
-import java.net.URISyntaxException;
import java.security.AccessControlException;
-import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
-
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -50,34 +42,16 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
-import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
-import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
-import org.apache.hadoop.http.HtmlQuoting;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobContext;
-import org.apache.hadoop.mapred.OutputCommitter;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.TaskAttemptContext;
import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.security.token.TokenSelector;
-import org.apache.hadoop.tools.HadoopArchives;
import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.zookeeper.client.ZooKeeperSaslClient;
import com.google.common.primitives.Longs;
@@ -89,11 +63,6 @@ public abstract class HadoopShimsSecure
static final Log LOG = LogFactory.getLog(HadoopShimsSecure.class);
@Override
- public String unquoteHtmlChars(String item) {
- return HtmlQuoting.unquoteHtmlChars(item);
- }
-
- @Override
public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
return new CombineFileInputFormatShim() {
@Override
@@ -104,7 +73,7 @@ public abstract class HadoopShimsSecure
};
}
- public static class InputSplitShim extends CombineFileSplit implements HadoopShims.InputSplitShim {
+ public static class InputSplitShim extends CombineFileSplit {
long shrinkedLength;
boolean _isShrinked;
public InputSplitShim() {
@@ -118,7 +87,6 @@ public abstract class HadoopShimsSecure
_isShrinked = false;
}
- @Override
public void shrinkSplit(long length) {
_isShrinked = true;
shrinkedLength = length;
@@ -336,7 +304,7 @@ public abstract class HadoopShimsSecure
}
@Override
- public InputSplitShim[] getSplits(JobConf job, int numSplits) throws IOException {
+ public CombineFileSplit[] getSplits(JobConf job, int numSplits) throws IOException {
long minSize = job.getLong(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZE"), 0);
// For backward compatibility, let the above parameter be used
@@ -378,261 +346,17 @@ public abstract class HadoopShimsSecure
}
@Override
- public RecordReader getRecordReader(JobConf job, HadoopShims.InputSplitShim split,
+ public RecordReader getRecordReader(JobConf job, CombineFileSplit split,
Reporter reporter,
Class<RecordReader<K, V>> rrClass)
throws IOException {
- CombineFileSplit cfSplit = (CombineFileSplit) split;
+ CombineFileSplit cfSplit = split;
return new CombineFileRecordReader(job, cfSplit, reporter, rrClass);
}
}
@Override
- public String getInputFormatClassName() {
- return "org.apache.hadoop.hive.ql.io.CombineHiveInputFormat";
- }
-
- String[] ret = new String[2];
-
- @Override
- public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
- String archiveName) throws Exception {
-
- HadoopArchives har = new HadoopArchives(conf);
- List<String> args = new ArrayList<String>();
-
- args.add("-archiveName");
- args.add(archiveName);
- args.add("-p");
- args.add(sourceDir.toString());
- args.add(destDir.toString());
-
- return ToolRunner.run(har, args.toArray(new String[0]));
- }
-
- /*
- * This particular instance is for Hadoop 1.0 which creates an archive
- * with only the relative path of the archived directory stored within
- * the archive as compared to the full path in case of earlier versions.
- * See this api in Hadoop20Shims for comparison.
- */
- @Override
- public URI getHarUri(URI original, URI base, URI originalBase)
- throws URISyntaxException {
- URI relative = originalBase.relativize(original);
- if (relative.isAbsolute()) {
- throw new URISyntaxException("Couldn't create URI for location.",
- "Relative: " + relative + " Base: "
- + base + " OriginalBase: " + originalBase);
- }
-
- return base.resolve(relative);
- }
-
- public static class NullOutputCommitter extends OutputCommitter {
- @Override
- public void setupJob(JobContext jobContext) { }
- @Override
- public void cleanupJob(JobContext jobContext) { }
-
- @Override
- public void setupTask(TaskAttemptContext taskContext) { }
- @Override
- public boolean needsTaskCommit(TaskAttemptContext taskContext) {
- return false;
- }
- @Override
- public void commitTask(TaskAttemptContext taskContext) { }
- @Override
- public void abortTask(TaskAttemptContext taskContext) { }
- }
-
- @Override
- public void prepareJobOutput(JobConf conf) {
- conf.setOutputCommitter(NullOutputCommitter.class);
-
- // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463)
- // but can be backported. So we disable setup/cleanup in all versions >= 0.19
- conf.setBoolean(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDSETUPCLEANUPNEEDED"), false);
-
- // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206)
- // but can be backported. So we disable setup/cleanup in all versions >= 0.19
- conf.setBoolean(ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDTASKCLEANUPNEEDED"), false);
- }
-
- @Override
- public UserGroupInformation getUGIForConf(Configuration conf) throws IOException {
- String doAs = System.getenv("HADOOP_USER_NAME");
- if(doAs != null && doAs.length() > 0) {
- /*
- * this allows doAs (proxy user) to be passed along across process boundary where
- * delegation tokens are not supported. For example, a DDL stmt via WebHCat with
- * a doAs parameter, forks to 'hcat' which needs to start a Session that
- * proxies the end user
- */
- return UserGroupInformation.createProxyUser(doAs, UserGroupInformation.getLoginUser());
- }
- return UserGroupInformation.getCurrentUser();
- }
-
- @Override
- public boolean isSecureShimImpl() {
- return true;
- }
-
- @Override
- public String getShortUserName(UserGroupInformation ugi) {
- return ugi.getShortUserName();
- }
-
- @Override
- public String getTokenStrForm(String tokenSignature) throws IOException {
- UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
-
- Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
- tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
- return token != null ? token.encodeToUrlString() : null;
- }
-
- /**
- * Create a delegation token object for the given token string and service.
- * Add the token to given UGI
- */
- @Override
- public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService) throws IOException {
- Token<DelegationTokenIdentifier> delegationToken = createToken(tokenStr, tokenService);
- ugi.addToken(delegationToken);
- }
-
- /**
- * Add a given service to delegation token string.
- */
- @Override
- public String addServiceToToken(String tokenStr, String tokenService)
- throws IOException {
- Token<DelegationTokenIdentifier> delegationToken = createToken(tokenStr, tokenService);
- return delegationToken.encodeToUrlString();
- }
-
- /**
- * Create a new token using the given string and service
- * @param tokenStr
- * @param tokenService
- * @return
- * @throws IOException
- */
- private Token<DelegationTokenIdentifier> createToken(String tokenStr, String tokenService)
- throws IOException {
- Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
- delegationToken.decodeFromUrlString(tokenStr);
- delegationToken.setService(new Text(tokenService));
- return delegationToken;
- }
-
- @Override
- public <T> T doAs(UserGroupInformation ugi, PrivilegedExceptionAction<T> pvea) throws IOException, InterruptedException {
- return ugi.doAs(pvea);
- }
-
- @Override
- public Path createDelegationTokenFile(Configuration conf) throws IOException {
-
- //get delegation token for user
- String uname = UserGroupInformation.getLoginUser().getShortUserName();
- FileSystem fs = FileSystem.get(conf);
- Token<?> fsToken = fs.getDelegationToken(uname);
-
- File t = File.createTempFile("hive_hadoop_delegation_token", null);
- Path tokenPath = new Path(t.toURI());
-
- //write credential with token to file
- Credentials cred = new Credentials();
- cred.addToken(fsToken.getService(), fsToken);
- cred.writeTokenStorageFile(tokenPath, conf);
-
- return tokenPath;
- }
-
- @Override
- public UserGroupInformation createProxyUser(String userName) throws IOException {
- return UserGroupInformation.createProxyUser(
- userName, UserGroupInformation.getLoginUser());
- }
-
- @Override
- public void authorizeProxyAccess(String proxyUser, UserGroupInformation realUserUgi,
- String ipAddress, Configuration conf) throws IOException {
- ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
- ProxyUsers.authorize(UserGroupInformation.createProxyUser(proxyUser, realUserUgi),
- ipAddress, conf);
- }
-
- @Override
- public boolean isSecurityEnabled() {
- return UserGroupInformation.isSecurityEnabled();
- }
-
- @Override
- public UserGroupInformation createRemoteUser(String userName, List<String> groupNames) {
- return UserGroupInformation.createRemoteUser(userName);
- }
-
- @Override
- public void closeAllForUGI(UserGroupInformation ugi) {
- try {
- FileSystem.closeAllForUGI(ugi);
- } catch (IOException e) {
- LOG.error("Could not clean up file-system handles for UGI: " + ugi, e);
- }
- }
-
- @Override
- public void loginUserFromKeytab(String principal, String keytabFile) throws IOException {
- String hostPrincipal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
- UserGroupInformation.loginUserFromKeytab(hostPrincipal, keytabFile);
- }
-
- @Override
- public UserGroupInformation loginUserFromKeytabAndReturnUGI(
- String principal, String keytabFile) throws IOException {
- String hostPrincipal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
- return UserGroupInformation.loginUserFromKeytabAndReturnUGI(hostPrincipal, keytabFile);
- }
-
- /**
- * Convert Kerberos principal name pattern to valid Kerberos principal names.
- * @param principal (principal name pattern)
- * @return
- * @throws IOException
- */
- @Override
- public String getResolvedPrincipal(String principal) throws IOException {
- return SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
- }
-
- @Override
- public String getTokenFileLocEnvName() {
- return UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION;
- }
-
- @Override
- public void reLoginUserFromKeytab() throws IOException{
- UserGroupInformation ugi = UserGroupInformation.getLoginUser();
- //checkTGT calls ugi.relogin only after checking if it is close to tgt expiry
- //hadoop relogin is actually done only every x minutes (x=10 in hadoop 1.x)
- if(ugi.isFromKeytab()){
- ugi.checkTGTAndReloginFromKeytab();
- }
- }
-
- @Override
- public boolean isLoginKeytabBased() throws IOException {
- return UserGroupInformation.isLoginKeytabBased();
- }
-
- @Override
abstract public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception;
@Override
@@ -714,58 +438,4 @@ public abstract class HadoopShimsSecure
throws IOException, AccessControlException, Exception {
DefaultFileAccess.checkFileAccess(fs, stat, action);
}
-
- @Override
- public void setZookeeperClientKerberosJaasConfig(String principal, String keyTabFile) throws IOException {
- // ZooKeeper property name to pick the correct JAAS conf section
- final String SASL_LOGIN_CONTEXT_NAME = "HiveZooKeeperClient";
- System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY, SASL_LOGIN_CONTEXT_NAME);
-
- principal = getResolvedPrincipal(principal);
- JaasConfiguration jaasConf = new JaasConfiguration(SASL_LOGIN_CONTEXT_NAME, principal, keyTabFile);
-
- // Install the Configuration in the runtime.
- javax.security.auth.login.Configuration.setConfiguration(jaasConf);
- }
-
- /**
- * A JAAS configuration for ZooKeeper clients intended to use for SASL
- * Kerberos.
- */
- private static class JaasConfiguration extends javax.security.auth.login.Configuration {
- // Current installed Configuration
- private final javax.security.auth.login.Configuration baseConfig = javax.security.auth.login.Configuration
- .getConfiguration();
- private final String loginContextName;
- private final String principal;
- private final String keyTabFile;
-
- public JaasConfiguration(String hiveLoginContextName, String principal, String keyTabFile) {
- this.loginContextName = hiveLoginContextName;
- this.principal = principal;
- this.keyTabFile = keyTabFile;
- }
-
- @Override
- public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
- if (loginContextName.equals(appName)) {
- Map<String, String> krbOptions = new HashMap<String, String>();
- krbOptions.put("doNotPrompt", "true");
- krbOptions.put("storeKey", "true");
- krbOptions.put("useKeyTab", "true");
- krbOptions.put("principal", principal);
- krbOptions.put("keyTab", keyTabFile);
- krbOptions.put("refreshKrb5Config", "true");
- AppConfigurationEntry hiveZooKeeperClientEntry = new AppConfigurationEntry(
- KerberosUtil.getKrb5LoginModuleName(), LoginModuleControlFlag.REQUIRED, krbOptions);
- return new AppConfigurationEntry[] { hiveZooKeeperClientEntry };
- }
- // Try the base config
- if (baseConfig != null) {
- return baseConfig.getAppConfigurationEntry(appName);
- }
- return null;
- }
- }
-
}