You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ma...@apache.org on 2011/11/01 02:54:20 UTC

svn commit: r1195763 - in /hadoop/common/branches/branch-0.23/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/...

Author: mahadev
Date: Tue Nov  1 01:54:20 2011
New Revision: 1195763

URL: http://svn.apache.org/viewvc?rev=1195763&view=rev
Log:
MAPREDUCE-3103. Implement Job ACLs for MRAppMaster. (mahadev) - Merging r1195761 from trunk.

Modified:
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt?rev=1195763&r1=1195762&r2=1195763&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt Tue Nov  1 01:54:20 2011
@@ -1857,6 +1857,9 @@ Release 0.23.0 - Unreleased
 
     MAPREDUCE-3220. Fixed TestCombineOutputCollector. (Devaraj K via acmurthy) 
 
+    MAPREDUCE-3103. Implement Job ACLs for MRAppMaster. 
+    (mahadev)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java?rev=1195763&r1=1195762&r2=1195763&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java Tue Nov  1 01:54:20 2011
@@ -18,11 +18,9 @@
 
 package org.apache.hadoop.mapreduce.v2.app.client;
 
-import java.io.IOException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
-import java.security.AccessControlException;
 import java.util.Arrays;
 import java.util.Collection;
 
@@ -32,7 +30,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
@@ -196,13 +193,6 @@ public class MRClientService extends Abs
       if (job == null) {
         throw RPCUtil.getRemoteException("Unknown job " + jobID);
       }
-      //TODO fix job acls.
-      //JobACL operation = JobACL.VIEW_JOB;
-      //if (modifyAccess) {
-      //  operation = JobACL.MODIFY_JOB;
-      //}
-      //TO disable check access ofr now.
-      //checkAccess(job, operation);
       return job;
     }
  
@@ -226,24 +216,6 @@ public class MRClientService extends Abs
       return attempt;
     }
 
-    private void checkAccess(Job job, JobACL jobOperation) 
-      throws YarnRemoteException {
-      if (!UserGroupInformation.isSecurityEnabled()) {
-        return;
-      }
-      UserGroupInformation callerUGI;
-      try {
-        callerUGI = UserGroupInformation.getCurrentUser();
-      } catch (IOException e) {
-        throw RPCUtil.getRemoteException(e);
-      }
-      if(!job.checkAccess(callerUGI, jobOperation)) {
-        throw RPCUtil.getRemoteException(new AccessControlException("User "
-            + callerUGI.getShortUserName() + " cannot perform operation "
-            + jobOperation.name() + " on " + job.getID()));
-      }
-    }
-
     @Override
     public GetCountersResponse getCounters(GetCountersRequest request) 
       throws YarnRemoteException {
@@ -304,6 +276,7 @@ public class MRClientService extends Abs
       return response;
     }
     
+    @SuppressWarnings("unchecked")
     @Override
     public KillJobResponse killJob(KillJobRequest request) 
       throws YarnRemoteException {
@@ -320,6 +293,7 @@ public class MRClientService extends Abs
       return response;
     }
 
+    @SuppressWarnings("unchecked")
     @Override
     public KillTaskResponse killTask(KillTaskRequest request) 
       throws YarnRemoteException {
@@ -334,6 +308,7 @@ public class MRClientService extends Abs
       return response;
     }
     
+    @SuppressWarnings("unchecked")
     @Override
     public KillTaskAttemptResponse killTaskAttempt(
         KillTaskAttemptRequest request) throws YarnRemoteException {
@@ -363,6 +338,7 @@ public class MRClientService extends Abs
       return response;
     }
 
+    @SuppressWarnings("unchecked")
     @Override
     public FailTaskAttemptResponse failTaskAttempt(
         FailTaskAttemptRequest request) throws YarnRemoteException {

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java?rev=1195763&r1=1195762&r2=1195763&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java Tue Nov  1 01:54:20 2011
@@ -28,9 +28,12 @@ import javax.servlet.http.HttpServletRes
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobACL;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.util.StringHelper;
 import org.apache.hadoop.yarn.util.Times;
@@ -267,6 +270,29 @@ public class AppController extends Contr
     setStatus(HttpServletResponse.SC_NOT_FOUND);
     setTitle(join("Not found: ", s));
   }
+  
+  /**
+   * Render a ACCESS_DENIED error.
+   * @param s the error message to include.
+   */
+  void accessDenied(String s) {
+    setStatus(HttpServletResponse.SC_FORBIDDEN);
+    setTitle(join("Access denied: ", s));
+    throw new RuntimeException("Access denied: " + s);
+  }
+
+  /**
+   * check for job access.
+   * @param job the job that is being accessed
+   */
+  void checkAccess(Job job) {
+    UserGroupInformation callerUgi = UserGroupInformation.createRemoteUser(
+        request().getRemoteUser());
+    if (!job.checkAccess(callerUgi, JobACL.VIEW_JOB)) {
+      accessDenied("User " + request().getRemoteUser() + " does not have " +
+          " permissions.");
+    }
+  }
 
   /**
    * Ensure that a JOB_ID was passed into the page.
@@ -281,6 +307,9 @@ public class AppController extends Contr
       if (app.getJob() == null) {
         notFound($(JOB_ID));
       }
+      /* check for acl access */
+      Job job = app.context.getJob(jobID);
+      checkAccess(job);
     } catch (Exception e) {
       badRequest(e.getMessage() == null ? 
           e.getClass().getName() : e.getMessage());
@@ -296,7 +325,8 @@ public class AppController extends Contr
         throw new RuntimeException("missing task ID");
       }
       TaskId taskID = MRApps.toTaskID($(TASK_ID));
-      app.setJob(app.context.getJob(taskID.getJobId()));
+      Job job = app.context.getJob(taskID.getJobId());
+      app.setJob(job);
       if (app.getJob() == null) {
         notFound(MRApps.toString(taskID.getJobId()));
       } else {
@@ -305,6 +335,7 @@ public class AppController extends Contr
           notFound($(TASK_ID));
         }
       }
+      checkAccess(job);
     } catch (Exception e) {
       badRequest(e.getMessage());
     }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1195763&r1=1195762&r2=1195763&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Tue Nov  1 01:54:20 2011
@@ -74,19 +74,20 @@ public class CompletedJob implements org
   private final Map<TaskId, Task> reduceTasks = new HashMap<TaskId, Task>();
   private final String user;
   private final Path confFile;
-  
+  private JobACLsManager aclsMgr;
   private List<TaskAttemptCompletionEvent> completionEvents = null;
   private JobInfo jobInfo;
 
   public CompletedJob(Configuration conf, JobId jobId, Path historyFile, 
-      boolean loadTasks, String userName, Path confFile) throws IOException {
+      boolean loadTasks, String userName, Path confFile, JobACLsManager aclsMgr) 
+          throws IOException {
     LOG.info("Loading job: " + jobId + " from file: " + historyFile);
     this.conf = conf;
     this.jobId = jobId;
     this.confFile = confFile;
+    this.aclsMgr = aclsMgr;
     
     loadFullHistoryData(loadTasks, historyFile);
-
     user = userName;
     counters = TypeConverter.toYarn(jobInfo.getTotalCounters());
     diagnostics.add(jobInfo.getErrorInfo());
@@ -314,7 +315,6 @@ public class CompletedJob implements org
     }
     Map<JobACL, AccessControlList> jobACLs = jobInfo.getJobACLs();
     AccessControlList jobACL = jobACLs.get(jobOperation);
-    JobACLsManager aclsMgr = new JobACLsManager(conf);
     return aclsMgr.checkAccess(callerUGI, jobOperation, 
         jobInfo.getUsername(), jobACL);
   }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java?rev=1195763&r1=1195762&r2=1195763&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java Tue Nov  1 01:54:20 2011
@@ -48,6 +48,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.mapred.JobACLsManager;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.jobhistory.JobSummary;
@@ -125,6 +126,8 @@ public class JobHistory extends Abstract
   //The number of jobs to maintain in the job list cache.
   private int jobListCacheSize;
   
+  private JobACLsManager aclsMgr;
+  
   //The number of loaded jobs.
   private int loadedJobCacheSize;
   
@@ -203,7 +206,7 @@ public class JobHistory extends Abstract
           + intermediateDoneDirPath + "]", e);
     }
     
-    
+    this.aclsMgr = new JobACLsManager(conf);
     
     jobListCacheSize = conf.getInt(JHAdminConfig.MR_HISTORY_JOBLIST_CACHE_SIZE,
         DEFAULT_JOBLIST_CACHE_SIZE);
@@ -648,7 +651,7 @@ public class JobHistory extends Abstract
       try {
         Job job = new CompletedJob(conf, metaInfo.getJobIndexInfo().getJobId(), 
             metaInfo.getHistoryFile(), true, metaInfo.getJobIndexInfo().getUser(),
-            metaInfo.getConfFile());
+            metaInfo.getConfFile(), this.aclsMgr);
         addToLoadedJobCache(job);
         return job;
       } catch (IOException e) {