You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/03/29 12:30:01 UTC
svn commit: r1086541 - in
/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src:
main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
Author: vinodkv
Date: Tue Mar 29 10:30:00 2011
New Revision: 1086541
URL: http://svn.apache.org/viewvc?rev=1086541&view=rev
Log:
Adding the accidentally removed code for client tokens on Client to talk to AM securely.
Modified:
hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java?rev=1086541&r1=1086540&r2=1086541&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java Tue Mar 29 10:30:00 2011
@@ -27,6 +27,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.TaskAttemptID;
@@ -39,6 +40,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityInfo;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.ApplicationID;
import org.apache.hadoop.yarn.ApplicationMaster;
import org.apache.hadoop.yarn.ApplicationState;
@@ -46,6 +48,7 @@ import org.apache.hadoop.yarn.YarnExcept
import org.apache.hadoop.yarn.YarnRemoteException;
import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.ipc.YarnRPC;
+import org.apache.hadoop.yarn.security.ApplicationTokenIdentifier;
import org.apache.hadoop.yarn.security.SchedulerSecurityInfo;
public class ClientServiceDelegate {
@@ -92,6 +95,19 @@ public class ClientServiceDelegate {
} else if (ApplicationState.RUNNING.equals(appMaster.state)){
serviceAddr = appMaster.host + ":" + appMaster.rpcPort;
serviceHttpAddr = appMaster.host + ":" + appMaster.httpPort;
+ if (UserGroupInformation.isSecurityEnabled()) {
+ String clientTokenEncoded = appMaster.clientToken.toString();
+ Token<ApplicationTokenIdentifier> clientToken =
+ new Token<ApplicationTokenIdentifier>();
+ try {
+ clientToken.decodeFromUrlString(clientTokenEncoded);
+ clientToken.setService(new Text(appMaster.host.toString() + ":"
+ + appMaster.rpcPort));
+ UserGroupInformation.getCurrentUser().addToken(clientToken);
+ } catch (IOException e) {
+ throw new YarnException(e);
+ }
+ }
} else {
LOG.warn("Cannot connect to Application with state " + appMaster.state);
throw new YarnException(
Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java?rev=1086541&r1=1086540&r2=1086541&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java Tue Mar 29 10:30:00 2011
@@ -32,7 +32,10 @@ import org.apache.hadoop.SleepJob;
import org.apache.hadoop.RandomTextWriterJob.RandomInputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobStatus;
@@ -41,6 +44,7 @@ import org.apache.hadoop.mapreduce.TaskA
import org.apache.hadoop.mapreduce.TaskCompletionEvent;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.security.UserGroupInformation;
@@ -109,13 +113,27 @@ public class TestMRJobs {
mrCluster.getConfig().set(RandomTextWriterJob.TOTAL_BYTES, "3072");
mrCluster.getConfig().set(RandomTextWriterJob.BYTES_PER_MAP, "1024");
Job job = randomWriterJob.createJob(mrCluster.getConfig());
- FileOutputFormat.setOutputPath(job, new Path(mrCluster.getTestWorkDir().getAbsolutePath(),
- "random-output"));
+ Path outputDir = new Path(mrCluster.getTestWorkDir().getAbsolutePath(),
+ "random-output");
+ FileOutputFormat.setOutputPath(job, outputDir);
// TODO: We should not be setting MRAppJar as job.jar. It should be
// uploaded separately by YarnRunner.
job.setJar(new File(MiniMRYarnCluster.APPJAR).getAbsolutePath());
job.waitForCompletion(true);
Assert.assertEquals(JobStatus.State.SUCCEEDED, job.getJobState());
+ // Make sure there are three files in the output-dir
+ RemoteIterator<FileStatus> iterator =
+ FileContext.getFileContext(mrCluster.getConfig()).listStatus(
+ outputDir);
+ int count = 0;
+ while (iterator.hasNext()) {
+ FileStatus file = iterator.next();
+ if (!file.getPath().getName()
+ .equals(FileOutputCommitter.SUCCEEDED_FILE_NAME)) {
+ count++;
+ }
+ }
+ Assert.assertEquals("Number of part files is wrong!", 3, count);
}
@Test