You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by oz...@apache.org on 2015/05/08 09:01:06 UTC

hadoop git commit: MAPREDUCE-6284. Add Task Attempt State API to MapReduce Application Master REST API. Contributed by Ryu Kobayashi.

Repository: hadoop
Updated Branches:
  refs/heads/trunk c5afe444d -> d18f10ad1


MAPREDUCE-6284. Add Task Attempt State API to MapReduce Application Master REST API. Contributed by Ryu Kobayashi.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d18f10ad
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d18f10ad
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d18f10ad

Branch: refs/heads/trunk
Commit: d18f10ad1b3e497fa1aaaeb85ba055f87d9849f7
Parents: c5afe44
Author: Tsuyoshi Ozawa <oz...@apache.org>
Authored: Fri May 8 15:54:14 2015 +0900
Committer: Tsuyoshi Ozawa <oz...@apache.org>
Committed: Fri May 8 15:59:46 2015 +0900

----------------------------------------------------------------------
 hadoop-mapreduce-project/CHANGES.txt            |   3 +
 .../v2/app/client/MRClientService.java          |   6 +
 .../mapreduce/v2/app/webapp/AMWebServices.java  | 112 ++++++++
 .../v2/app/webapp/JAXBContextResolver.java      |  33 ++-
 .../v2/app/webapp/dao/JobTaskAttemptState.java  |  48 ++++
 .../hadoop/mapreduce/v2/app/MockAppContext.java |   2 +-
 .../mapreduce/v2/app/MockEventHandler.java      |  28 ++
 .../v2/app/webapp/TestAMWebServicesAttempt.java | 273 +++++++++++++++++++
 .../src/site/markdown/MapredAppMasterRest.md    | 167 ++++++++++++
 .../server/webproxy/WebAppProxyServlet.java     |  84 +++++-
 10 files changed, 733 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index 8f3c960..2120194 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -261,6 +261,9 @@ Release 2.8.0 - UNRELEASED
 
   NEW FEATURES
 
+   MAPREDUCE-6284. Add Task Attempt State API to MapReduce Application
+   Master REST API. (Ryu Kobayashi via ozawa)
+
   IMPROVEMENTS
 
     MAPREDUCE-6291. Correct mapred queue usage command.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
index b52afd8..ceb1dbf 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
@@ -84,6 +84,7 @@ import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
@@ -424,6 +425,11 @@ public class MRClientService extends AbstractService implements ClientService {
     }
   }
 
+  public KillTaskAttemptResponse forceKillTaskAttempt(
+      KillTaskAttemptRequest request) throws YarnException, IOException {
+    return protocolHandler.killTaskAttempt(request);
+  }
+
   public WebApp getWebApp() {
     return webApp;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
index d02eb18..5d50db7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
@@ -19,10 +19,15 @@
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
 import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.security.AccessControlException;
+import java.security.PrivilegedExceptionAction;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.Consumes;
 import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
 import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
@@ -30,15 +35,21 @@ import javax.ws.rs.QueryParam;
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
 import javax.ws.rs.core.Response.Status;
 
 import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptRequestPBImpl;
 import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -50,6 +61,7 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskCounterInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobsInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
@@ -59,16 +71,19 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.webapp.BadRequestException;
 import org.apache.hadoop.yarn.webapp.NotFoundException;
 
+import com.google.common.base.Preconditions;
 import com.google.inject.Inject;
 
 @Path("/ws/v1/mapreduce")
 public class AMWebServices {
   private final AppContext appCtx;
   private final App app;
+  private final MRClientService service;
 
   private @Context HttpServletResponse response;
   
@@ -76,6 +91,7 @@ public class AMWebServices {
   public AMWebServices(final App app, final AppContext context) {
     this.appCtx = context;
     this.app = app;
+    this.service = new MRClientService(context);
   }
 
   Boolean hasAccess(Job job, HttpServletRequest request) {
@@ -396,6 +412,59 @@ public class AMWebServices {
   }
 
   @GET
+  @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobTaskAttemptState getJobTaskAttemptState(
+      @Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid, @PathParam("taskid") String tid,
+      @PathParam("attemptid") String attId)
+          throws IOException, InterruptedException {
+    init();
+    Job job = getJobFromJobIdString(jid, appCtx);
+    checkAccess(job, hsr);
+    Task task = getTaskFromTaskIdString(tid, job);
+    TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
+    return new JobTaskAttemptState(ta.getState().toString());
+  }
+
+  @PUT
+  @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  @Consumes({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public Response updateJobTaskAttemptState(JobTaskAttemptState targetState,
+      @Context HttpServletRequest hsr, @PathParam("jobid") String jid,
+      @PathParam("taskid") String tid, @PathParam("attemptid") String attId)
+          throws IOException, InterruptedException {
+    init();
+    Job job = getJobFromJobIdString(jid, appCtx);
+    checkAccess(job, hsr);
+
+    String remoteUser = hsr.getRemoteUser();
+    UserGroupInformation callerUGI = null;
+    if (remoteUser != null) {
+      callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+    }
+
+    Task task = getTaskFromTaskIdString(tid, job);
+    TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
+    if (!ta.getState().toString().equals(targetState.getState())) {
+      // user is attempting to change state. right we only
+      // allow users to kill the job task attempt
+      if (targetState.getState().equals(TaskAttemptState.KILLED.toString())) {
+        return killJobTaskAttempt(ta, callerUGI, hsr);
+      }
+      throw new BadRequestException("Only '"
+          + TaskAttemptState.KILLED.toString()
+          + "' is allowed as a target state.");
+    }
+
+    JobTaskAttemptState ret = new JobTaskAttemptState();
+    ret.setState(ta.getState().toString());
+
+    return Response.status(Status.OK).entity(ret).build();
+  }
+
+  @GET
   @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/counters")
   @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
   public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
@@ -409,4 +478,47 @@ public class AMWebServices {
     TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
     return new JobTaskAttemptCounterInfo(ta);
   }
+
+  protected Response killJobTaskAttempt(TaskAttempt ta,
+      UserGroupInformation callerUGI, HttpServletRequest hsr)
+          throws IOException, InterruptedException {
+    Preconditions.checkNotNull(ta, "ta cannot be null");
+
+    String userName = callerUGI.getUserName();
+    final TaskAttemptId attemptId = ta.getID();
+    try {
+      callerUGI
+          .doAs(new PrivilegedExceptionAction<KillTaskAttemptResponse>() {
+            @Override
+            public KillTaskAttemptResponse run()
+                throws IOException, YarnException {
+              KillTaskAttemptRequest req =  new KillTaskAttemptRequestPBImpl();
+              req.setTaskAttemptId(attemptId);
+              return service.forceKillTaskAttempt(req);
+            }
+          });
+    } catch (UndeclaredThrowableException ue) {
+      // if the root cause is a permissions issue
+      // bubble that up to the user
+      if (ue.getCause() instanceof YarnException) {
+        YarnException ye = (YarnException) ue.getCause();
+        if (ye.getCause() instanceof AccessControlException) {
+          String taId = attemptId.toString();
+          String msg =
+              "Unauthorized attempt to kill task attempt " + taId
+                  + " by remote user " + userName;
+          return Response.status(Status.FORBIDDEN).entity(msg).build();
+        } else {
+          throw ue;
+        }
+      } else {
+        throw ue;
+      }
+    }
+
+    JobTaskAttemptState ret = new JobTaskAttemptState();
+    ret.setState(TaskAttemptState.KILLED.toString());
+
+    return Response.status(Status.OK).entity(ret).build();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java
index b0da4c2..88c7d86 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java
@@ -18,9 +18,8 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import java.util.Set;
-import java.util.HashSet;
-import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
 
 import com.sun.jersey.api.json.JSONConfiguration;
 import com.sun.jersey.api.json.JSONJAXBContext;
@@ -39,6 +38,7 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.CounterGroupInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.CounterInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptState;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobsInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskCounterInfo;
@@ -55,8 +55,7 @@ import org.apache.hadoop.yarn.webapp.RemoteExceptionData;
 @Provider
 public class JAXBContextResolver implements ContextResolver<JAXBContext> {
 
-  private JAXBContext context;
-  private final Set<Class> types;
+  private final Map<Class, JAXBContext> typesContextMap;
 
   // you have to specify all the dao classes here
   private final Class[] cTypes = {AMAttemptInfo.class, AMAttemptsInfo.class,
@@ -67,14 +66,30 @@ public class JAXBContextResolver implements ContextResolver<JAXBContext> {
     TaskAttemptInfo.class, TaskInfo.class, TasksInfo.class,
     TaskAttemptsInfo.class, ConfEntryInfo.class, RemoteExceptionData.class};
 
+  // these dao classes need root unwrapping
+  private final Class[] rootUnwrappedTypes = {JobTaskAttemptState.class};
+
   public JAXBContextResolver() throws Exception {
-    this.types = new HashSet<Class>(Arrays.asList(cTypes));
-    this.context = new JSONJAXBContext(JSONConfiguration.natural().
-        rootUnwrapping(false).build(), cTypes);
+    JAXBContext context;
+    JAXBContext unWrappedRootContext;
+
+    this.typesContextMap = new HashMap<Class, JAXBContext>();
+    context =
+        new JSONJAXBContext(JSONConfiguration.natural().rootUnwrapping(false)
+            .build(), cTypes);
+    unWrappedRootContext =
+        new JSONJAXBContext(JSONConfiguration.natural().rootUnwrapping(true)
+            .build(), rootUnwrappedTypes);
+    for (Class type : cTypes) {
+      typesContextMap.put(type, context);
+    }
+    for (Class type : rootUnwrappedTypes) {
+      typesContextMap.put(type, unWrappedRootContext);
+    }
   }
 
   @Override
   public JAXBContext getContext(Class<?> objectType) {
-    return (types.contains(objectType)) ? context : null;
+    return typesContextMap.get(objectType);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptState.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptState.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptState.java
new file mode 100644
index 0000000..ef5463a
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptState.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * Job task attempt state.
+ */
+@XmlRootElement(name = "jobTaskAttemptState")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobTaskAttemptState {
+
+  private String state;
+
+  public JobTaskAttemptState() {
+  }
+
+  public JobTaskAttemptState(String state) {
+    this.state = state;
+  }
+
+  public void setState(String state) {
+    this.state = state;
+  }
+
+  public String getState() {
+    return this.state;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java
index dae0aa7..a900241 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockAppContext.java
@@ -95,7 +95,7 @@ public class MockAppContext implements AppContext {
   @SuppressWarnings("rawtypes")
   @Override
   public EventHandler getEventHandler() {
-    return null;
+    return new MockEventHandler();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockEventHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockEventHandler.java
new file mode 100644
index 0000000..80b50e7
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockEventHandler.java
@@ -0,0 +1,28 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.app;
+
+import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
+import org.apache.hadoop.yarn.event.EventHandler;
+
+public class MockEventHandler implements EventHandler<TaskAttemptEvent> {
+  @Override
+  public void handle(TaskAttemptEvent event) {
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java
new file mode 100644
index 0000000..884e66b
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempt.java
@@ -0,0 +1,273 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.StringReader;
+import java.util.Enumeration;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the app master web service Rest API for getting task attempts, a
+ * specific task attempt, and task attempt counters
+ *
+ * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state
+ */
+public class TestAMWebServicesAttempt extends JerseyTest {
+
+  private static Configuration conf = new Configuration();
+  private static AppContext appContext;
+  private String webserviceUserName = "testuser";
+
+  private Injector injector = Guice.createInjector(new ServletModule() {
+    @Override
+    protected void configureServlets() {
+      appContext = new MockAppContext(0, 1, 2, 1);
+      bind(JAXBContextResolver.class);
+      bind(AMWebServices.class);
+      bind(GenericExceptionHandler.class);
+      bind(AppContext.class).toInstance(appContext);
+      bind(Configuration.class).toInstance(conf);
+
+      serve("/*").with(GuiceContainer.class);
+      filter("/*").through(TestRMCustomAuthFilter.class);
+    }
+  });
+
+  @Singleton
+  public static class TestRMCustomAuthFilter extends AuthenticationFilter {
+    @Override
+    protected Properties getConfiguration(String configPrefix,
+        FilterConfig filterConfig) throws ServletException {
+      Properties props = new Properties();
+      Enumeration<?> names = filterConfig.getInitParameterNames();
+      while (names.hasMoreElements()) {
+        String name = (String) names.nextElement();
+        if (name.startsWith(configPrefix)) {
+          String value = filterConfig.getInitParameter(name);
+          props.put(name.substring(configPrefix.length()), value);
+        }
+      }
+      props.put(AuthenticationFilter.AUTH_TYPE, "simple");
+      props.put(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
+      return props;
+    }
+  }
+
+
+  public class GuiceServletConfig extends GuiceServletContextListener {
+    @Override
+    protected Injector getInjector() {
+      return injector;
+    }
+  }
+
+  @Before
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+  }
+
+  public TestAMWebServicesAttempt() {
+    super(new WebAppDescriptor.Builder(
+        "org.apache.hadoop.mapreduce.v2.app.webapp")
+        .contextListenerClass(GuiceServletConfig.class)
+        .filterClass(com.google.inject.servlet.GuiceFilter.class)
+        .contextPath("jersey-guice-filter").servletPath("/").build());
+  }
+
+  @Test
+  public void testGetTaskAttemptIdState() throws Exception {
+    WebResource r = resource();
+    Map<JobId, Job> jobsMap = appContext.getAllJobs();
+
+    for (JobId id : jobsMap.keySet()) {
+      String jobId = MRApps.toString(id);
+
+      for (Task task : jobsMap.get(id).getTasks().values()) {
+        String tid = MRApps.toString(task.getID());
+
+        for (TaskAttempt att : task.getAttempts().values()) {
+          TaskAttemptId attemptid = att.getID();
+          String attid = MRApps.toString(attemptid);
+
+          ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+              .path("jobs").path(jobId).path("tasks").path(tid)
+              .path("attempts").path(attid).path("state")
+              .queryParam("user.name", webserviceUserName)
+              .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+          assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+          JSONObject json = response.getEntity(JSONObject.class);
+          assertEquals("incorrect number of elements", 1, json.length());
+          assertEquals(att.getState().toString(), json.get("state"));
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testGetTaskAttemptIdXMLState() throws Exception {
+    WebResource r = resource();
+    Map<JobId, Job> jobsMap = appContext.getAllJobs();
+    for (JobId id : jobsMap.keySet()) {
+      String jobId = MRApps.toString(id);
+      for (Task task : jobsMap.get(id).getTasks().values()) {
+
+        String tid = MRApps.toString(task.getID());
+        for (TaskAttempt att : task.getAttempts().values()) {
+          TaskAttemptId attemptid = att.getID();
+          String attid = MRApps.toString(attemptid);
+
+          ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+              .path("jobs").path(jobId).path("tasks").path(tid)
+              .path("attempts").path(attid).path("state")
+              .queryParam("user.name", webserviceUserName)
+              .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+
+          assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+          String xml = response.getEntity(String.class);
+          DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+          DocumentBuilder db = dbf.newDocumentBuilder();
+          InputSource is = new InputSource();
+          is.setCharacterStream(new StringReader(xml));
+          Document dom = db.parse(is);
+          NodeList nodes = dom.getElementsByTagName("jobTaskAttemptState");
+          assertEquals(1, nodes.getLength());
+          String state = WebServicesTestUtils.getXmlString(
+              (Element) nodes.item(0), "state");
+          assertEquals(att.getState().toString(), state);
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testPutTaskAttemptIdState() throws Exception {
+    WebResource r = resource();
+    Map<JobId, Job> jobsMap = appContext.getAllJobs();
+
+    for (JobId id : jobsMap.keySet()) {
+      String jobId = MRApps.toString(id);
+
+      for (Task task : jobsMap.get(id).getTasks().values()) {
+        String tid = MRApps.toString(task.getID());
+
+        for (TaskAttempt att : task.getAttempts().values()) {
+          TaskAttemptId attemptid = att.getID();
+          String attid = MRApps.toString(attemptid);
+
+          ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+              .path("jobs").path(jobId).path("tasks").path(tid)
+              .path("attempts").path(attid).path("state")
+              .queryParam("user.name", webserviceUserName)
+              .accept(MediaType.APPLICATION_JSON)
+              .type(MediaType.APPLICATION_JSON)
+              .put(ClientResponse.class, "{\"state\":\"KILLED\"}");
+          assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+          JSONObject json = response.getEntity(JSONObject.class);
+          assertEquals("incorrect number of elements", 1, json.length());
+          assertEquals(TaskAttemptState.KILLED.toString(), json.get("state"));
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testPutTaskAttemptIdXMLState() throws Exception {
+    WebResource r = resource();
+    Map<JobId, Job> jobsMap = appContext.getAllJobs();
+
+    for (JobId id : jobsMap.keySet()) {
+      String jobId = MRApps.toString(id);
+
+      for (Task task : jobsMap.get(id).getTasks().values()) {
+        String tid = MRApps.toString(task.getID());
+
+        for (TaskAttempt att : task.getAttempts().values()) {
+          TaskAttemptId attemptid = att.getID();
+          String attid = MRApps.toString(attemptid);
+
+          ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+              .path("jobs").path(jobId).path("tasks").path(tid)
+              .path("attempts").path(attid).path("state")
+              .queryParam("user.name", webserviceUserName)
+              .accept(MediaType.APPLICATION_XML_TYPE)
+              .type(MediaType.APPLICATION_XML_TYPE)
+              .put(ClientResponse.class,
+                  "<jobTaskAttemptState><state>KILLED" +
+                      "</state></jobTaskAttemptState>");
+          assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+          String xml = response.getEntity(String.class);
+          DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+          DocumentBuilder db = dbf.newDocumentBuilder();
+          InputSource is = new InputSource();
+          is.setCharacterStream(new StringReader(xml));
+          Document dom = db.parse(is);
+          NodeList nodes = dom.getElementsByTagName("jobTaskAttemptState");
+          assertEquals(1, nodes.getLength());
+          String state = WebServicesTestUtils.getXmlString(
+              (Element) nodes.item(0), "state");
+          assertEquals(TaskAttemptState.KILLED.toString(), state);
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/MapredAppMasterRest.md
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/MapredAppMasterRest.md b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/MapredAppMasterRest.md
index b0962c0..e128eb1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/MapredAppMasterRest.md
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/site/markdown/MapredAppMasterRest.md
@@ -28,6 +28,7 @@ MapReduce Application Master REST API's.
     * [Task Counters API](#Task_Counters_API)
     * [Task Attempts API](#Task_Attempts_API)
     * [Task Attempt API](#Task_Attempt_API)
+    * [Task Attempt State API](#Task_Attempt_State_API)
     * [Task Attempt Counters API](#Task_Attempt_Counters_API)
 
 Overview
@@ -2024,6 +2025,172 @@ Response Body:
       <elapsedReduceTime>0</elapsedReduceTime>
     </taskAttempt>
 
+Task Attempt State API
+-------------------------
+With the task attempt state API, you can query the state of a submitted task attempt as well kill a running task attempt by modifying the state of a running task attempt using a PUT request with the state set to "KILLED". To perform the PUT operation, authentication has to be setup for the AM web services. In addition, you must be authorized to kill the task attempt. Currently you can only change the state to "KILLED"; an attempt to change the state to any other results in a 400 error response. Examples of the unauthorized and bad request errors are below. When you carry out a successful PUT, the iniital response may be a 202. You can confirm that the app is killed by repeating the PUT request until you get a 200, querying the state using the GET method or querying for task attempt information and checking the state. In the examples below, we repeat the PUT request and get a 200 response.
+
+Please note that in order to kill a task attempt, you must have an authentication filter setup for the HTTP interface. The functionality requires that a username is set in the HttpServletRequest. If no filter is setup, the response will be an "UNAUTHORIZED" response.
+
+This feature is currently in the alpha stage and may change in the future.
+
+### URI
+
+      * http://<proxy http address:port>/proxy/{appid}/ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/state
+
+### HTTP Operations Supported
+
+      * GET
+      * POST
+
+### Query Parameters Supported
+
+      None
+
+### Elements of *jobTaskAttemptState* object
+
+When you make a request for the state of an app, the information returned has the following fields
+
+| Item | Data Type | Description |
+|:---- |:---- |:---- |
+| state | string | The application state - can be one of "NEW", "STARTING", "RUNNING", "COMMIT_PENDING", "SUCCEEDED", "FAILED", "KILLED" |
+
+### Response Examples
+
+**JSON responses**
+
+HTTP Request
+
+      GET http://<proxy http address:port>/proxy/application_1429692837321_0001/ws/v1/mapreduce/jobs/job_1429692837321_0001/tasks/task_1429692837321_0001_m_000000/attempts/attempt_1429692837321_0001_m_000000_0/state
+
+Response Header:
+
+    HTTP/1.1 200 OK
+    Content-Type: application/json
+    Server: Jetty(6.1.26)
+    Content-Length: 20
+
+Response Body:
+
+    {
+      "state":"STARTING"
+    }
+
+HTTP Request
+
+      PUT http://<proxy http address:port>/proxy/application_1429692837321_0001/ws/v1/mapreduce/jobs/job_1429692837321_0001/tasks/task_1429692837321_0001_m_000000/attempts/attempt_1429692837321_0001_m_000000_0/state
+
+Request Body:
+
+    {
+      "state":"KILLED"
+    }
+
+Response Header:
+
+    HTTP/1.1 200 OK
+    Content-Type: application/json
+    Server: Jetty(6.1.26)
+    Content-Length: 18
+
+Response Body:
+
+    {
+      "state":"KILLED"
+    }
+
+**XML responses**
+
+HTTP Request
+
+      GET http://<proxy http address:port>/proxy/application_1429692837321_0001/ws/v1/mapreduce/jobs/job_1429692837321_0001/tasks/task_1429692837321_0001_m_000000/attempts/attempt_1429692837321_0001_m_000000_0/state
+
+Response Header:
+
+    HTTP/1.1 200 OK
+    Content-Type: application/xml
+    Server: Jetty(6.1.26)
+    Content-Length: 121
+
+Response Body:
+
+    <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+    <jobTaskAttemptState>
+      <state>STARTING</state>
+    </jobTaskAttemptState>
+
+HTTP Request
+
+      PUT http://<proxy http address:port>/proxy/application_1429692837321_0001/ws/v1/mapreduce/jobs/job_1429692837321_0001/tasks/task_1429692837321_0001_m_000000/attempts/attempt_1429692837321_0001_m_000000_0/state
+
+Request Body:
+
+    <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+    <jobTaskAttemptState>
+      <state>KILLED</state>
+    </jobTaskAttemptState>
+
+Response Header:
+
+    HTTP/1.1 200 OK
+    Content-Type: application/xml
+    Server: Jetty(6.1.26)
+    Content-Length: 121
+
+Response Body:
+
+    <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+    <jobTaskAttemptState>
+      <state>KILLED</state>
+    </jobTaskAttemptState>
+
+**Unauthorized Error Response**
+
+HTTP Request
+
+      PUT http://<proxy http address:port>/proxy/application_1429692837321_0001/ws/v1/mapreduce/jobs/job_1429692837321_0001/tasks/task_1429692837321_0001_m_000000/attempts/attempt_1429692837321_0001_m_000000_0/state
+
+Request Body:
+
+    <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+    <jobTaskAttemptState>
+      <state>KILLED</state>
+    </jobTaskAttemptState>
+
+Response Header:
+
+    HTTP/1.1 403 Unauthorized
+    Content-Type: application/json
+    Server: Jetty(6.1.26)
+
+**Bad Request Error Response**
+
+HTTP Request
+
+      PUT http://<proxy http address:port>/proxy/application_1429692837321_0001/ws/v1/mapreduce/jobs/job_1429692837321_0001/tasks/task_1429692837321_0001_m_000000/attempts/attempt_1429692837321_0001_m_000000_0/state
+
+Request Body:
+
+    <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+    <jobTaskAttemptState>
+      <state>RUNNING</state>
+    </jobTaskAttemptState>
+
+Response Header:
+
+    HTTP/1.1 400
+    Content-Length: 295
+    Content-Type: application/xml
+    Server: Jetty(6.1.26)
+
+Response Body:
+
+    <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+    <RemoteException>
+      <exception>BadRequestException</exception>
+      <message>java.lang.Exception: Only 'KILLED' is allowed as a target state.</message>
+      <javaClassName>org.apache.hadoop.yarn.webapp.BadRequestException</javaClassName>
+    </RemoteException>
+
 Task Attempt Counters API
 -------------------------
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d18f10ad/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
index d45beb6..b1e355d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.yarn.server.webproxy;
 
+import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.io.ObjectInputStream;
 import java.io.OutputStream;
 import java.io.PrintWriter;
@@ -34,6 +36,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import javax.servlet.ServletException;
 import javax.servlet.http.Cookie;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
@@ -56,10 +59,13 @@ import org.apache.http.Header;
 import org.apache.http.HttpResponse;
 import org.apache.http.NameValuePair;
 import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.client.methods.HttpRequestBase;
 import org.apache.http.client.params.ClientPNames;
 import org.apache.http.client.params.CookiePolicy;
 import org.apache.http.client.utils.URLEncodedUtils;
 import org.apache.http.conn.params.ConnRoutePNames;
+import org.apache.http.entity.StringEntity;
 import org.apache.http.impl.client.DefaultHttpClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -74,14 +80,23 @@ public class WebAppProxyServlet extends HttpServlet {
         "Accept",
         "Accept-Encoding",
         "Accept-Language",
-        "Accept-Charset"));
-  
+        "Accept-Charset",
+        "Content-Type"));
+
   public static final String PROXY_USER_COOKIE_NAME = "proxy-user";
 
   private transient List<TrackingUriPlugin> trackingUriPlugins;
   private final String rmAppPageUrlBase;
   private transient YarnConfiguration conf;
 
+  /**
+   * HTTP methods.
+   */
+  private enum HTTP { GET, POST, HEAD, PUT, DELETE };
+
+  /**
+   * Empty Hamlet class.
+   */
   private static class _ implements Hamlet._ {
     //Empty
   }
@@ -150,11 +165,13 @@ public class WebAppProxyServlet extends HttpServlet {
    * @param resp the http response
    * @param link the link to download
    * @param c the cookie to set if any
+   * @param proxyHost the proxy host
+   * @param method the http method
    * @throws IOException on any error.
    */
-  private static void proxyLink(HttpServletRequest req, 
-      HttpServletResponse resp, URI link, Cookie c, String proxyHost)
-      throws IOException {
+  private static void proxyLink(final HttpServletRequest req,
+      final HttpServletResponse resp, final URI link, final Cookie c,
+      final String proxyHost, final HTTP method) throws IOException {
     DefaultHttpClient client = new DefaultHttpClient();
     client
         .getParams()
@@ -170,7 +187,28 @@ public class WebAppProxyServlet extends HttpServlet {
     }
     client.getParams()
         .setParameter(ConnRoutePNames.LOCAL_ADDRESS, localAddress);
-    HttpGet httpGet = new HttpGet(link);
+
+    HttpRequestBase base = null;
+    if (method.equals(HTTP.GET)) {
+      base = new HttpGet(link);
+    } else if (method.equals(HTTP.PUT)) {
+      base = new HttpPut(link);
+
+      StringBuilder sb = new StringBuilder();
+      BufferedReader reader =
+          new BufferedReader(
+              new InputStreamReader(req.getInputStream(), "UTF-8"));
+      String line;
+      while ((line = reader.readLine()) != null) {
+        sb.append(line);
+      }
+
+      ((HttpPut) base).setEntity(new StringEntity(sb.toString()));
+    } else {
+      resp.setStatus(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
+      return;
+    }
+
     @SuppressWarnings("unchecked")
     Enumeration<String> names = req.getHeaderNames();
     while(names.hasMoreElements()) {
@@ -180,18 +218,18 @@ public class WebAppProxyServlet extends HttpServlet {
         if (LOG.isDebugEnabled()) {
           LOG.debug("REQ HEADER: {} : {}", name, value);
         }
-        httpGet.setHeader(name, value);
+        base.setHeader(name, value);
       }
     }
 
     String user = req.getRemoteUser();
     if (user != null && !user.isEmpty()) {
-      httpGet.setHeader("Cookie",
+      base.setHeader("Cookie",
           PROXY_USER_COOKIE_NAME + "=" + URLEncoder.encode(user, "ASCII"));
     }
     OutputStream out = resp.getOutputStream();
     try {
-      HttpResponse httpResp = client.execute(httpGet);
+      HttpResponse httpResp = client.execute(base);
       resp.setStatus(httpResp.getStatusLine().getStatusCode());
       for (Header header : httpResp.getAllHeaders()) {
         resp.setHeader(header.getName(), header.getValue());
@@ -204,7 +242,7 @@ public class WebAppProxyServlet extends HttpServlet {
         IOUtils.copyBytes(in, out, 4096, true);
       }
     } finally {
-      httpGet.releaseConnection();
+      base.releaseConnection();
     }
   }
   
@@ -237,8 +275,28 @@ public class WebAppProxyServlet extends HttpServlet {
   }
   
   @Override
-  protected void doGet(HttpServletRequest req, HttpServletResponse resp) 
-  throws IOException{
+  protected void doGet(HttpServletRequest req, HttpServletResponse resp)
+      throws ServletException, IOException {
+    methodAction(req, resp, HTTP.GET);
+  }
+
+  @Override
+  protected final void doPut(final HttpServletRequest req,
+      final HttpServletResponse resp) throws ServletException, IOException {
+    methodAction(req, resp, HTTP.PUT);
+  }
+
+  /**
+   * The action against the HTTP method.
+   * @param req the HttpServletRequest
+   * @param resp the HttpServletResponse
+   * @param method the HTTP method
+   * @throws ServletException
+   * @throws IOException
+   */
+  private void methodAction(final HttpServletRequest req,
+      final HttpServletResponse resp,
+      final HTTP method) throws ServletException, IOException {
     try {
       String userApprovedParamS = 
         req.getParameter(ProxyUriUtils.PROXY_APPROVAL_PARAM);
@@ -359,7 +417,7 @@ public class WebAppProxyServlet extends HttpServlet {
       if (userWasWarned && userApproved) {
         c = makeCheckCookie(id, true);
       }
-      proxyLink(req, resp, toFetch, c, getProxyHost());
+      proxyLink(req, resp, toFetch, c, getProxyHost(), method);
 
     } catch(URISyntaxException | YarnException e) {
       throw new IOException(e);