You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2014/01/13 22:44:45 UTC

[2/3] AMBARI-4267. Enable BatchRequest(s) to transform to API calls to the server. (mpapirkovskyy)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
index 9813cee..a2d0996 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionScheduleManager.java
@@ -18,14 +18,20 @@
 
 package org.apache.ambari.server.scheduler;
 
+import com.google.gson.Gson;
+import com.google.gson.JsonSyntaxException;
 import com.google.inject.Inject;
-import com.google.inject.Injector;
 import com.google.inject.Singleton;
+import com.sun.jersey.api.client.*;
+import com.sun.jersey.api.client.filter.ClientFilter;
+import com.sun.jersey.api.client.filter.CsrfProtectionFilter;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.orm.dao.RequestScheduleBatchRequestDAO;
-import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity;
-import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntityPK;
+import org.apache.ambari.server.security.authorization.internal.InternalTokenClientFilter;
+import org.apache.ambari.server.security.authorization.internal.InternalTokenStorage;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.scheduler.Batch;
 import org.apache.ambari.server.state.scheduler.BatchRequest;
 import org.apache.ambari.server.state.scheduler.BatchRequestJob;
@@ -33,6 +39,7 @@ import org.apache.ambari.server.state.scheduler.BatchRequestResponse;
 import org.apache.ambari.server.state.scheduler.RequestExecution;
 import org.apache.ambari.server.state.scheduler.Schedule;
 import org.apache.ambari.server.utils.DateUtils;
+import org.apache.commons.lang.text.StrBuilder;
 import org.quartz.CronExpression;
 import org.quartz.JobDetail;
 import org.quartz.JobExecutionContext;
@@ -43,10 +50,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.text.ParseException;
-import java.util.Collections;
-import java.util.Date;
-import java.util.List;
-import java.util.ListIterator;
+import java.util.*;
 
 import static org.quartz.CronScheduleBuilder.cronSchedule;
 import static org.quartz.JobBuilder.newJob;
@@ -60,21 +64,59 @@ import static org.quartz.TriggerBuilder.newTrigger;
 public class ExecutionScheduleManager {
   private static final Logger LOG = LoggerFactory.getLogger
     (ExecutionScheduleManager.class);
-  @Inject
-  private ExecutionScheduler executionScheduler;
-  @Inject
-  private Configuration configuration;
-  @Inject
-  private RequestScheduleBatchRequestDAO batchRequestDAO;
+
+  private final InternalTokenStorage tokenStorage;
+  private final Gson gson;
+  private final Clusters clusters;
+  ExecutionScheduler executionScheduler;
+  Configuration configuration;
 
   private volatile boolean schedulerAvailable = false;
   protected static final String BATCH_REQUEST_JOB_PREFIX = "BatchRequestJob";
   protected static final String REQUEST_EXECUTION_TRIGGER_PREFIX =
     "RequestExecution";
+  protected static final String DEFAULT_API_PATH = "api/v1";
+
+  protected Client ambariClient;
+  protected WebResource ambariWebResource;
 
   @Inject
-  public ExecutionScheduleManager(Injector injector) {
-    injector.injectMembers(this);
+  public ExecutionScheduleManager(Configuration configuration,
+                                  ExecutionScheduler executionScheduler,
+                                  InternalTokenStorage tokenStorage,
+                                  Clusters clusters,
+                                  Gson gson) {
+    this.configuration = configuration;
+    this.executionScheduler = executionScheduler;
+    this.tokenStorage = tokenStorage;
+    this.clusters = clusters;
+    this.gson = gson;
+
+    buildApiClient();
+  }
+
+  protected void buildApiClient() {
+    if (configuration.getApiSSLAuthentication()) {
+      //TODO build SSL client
+
+    } else {
+      Client client = Client.create();
+
+      this.ambariClient = client;
+
+      String pattern = "http://localhost:%s/";
+      String url = String.format(pattern, configuration.getClientApiPort());
+
+      this.ambariWebResource = client.resource(url);
+
+    }
+
+    //Install auth filters
+    ClientFilter csrfFilter = new CsrfProtectionFilter("RequestSchedule");
+    ClientFilter tokenFilter = new InternalTokenClientFilter(tokenStorage);
+    ambariClient.addFilter(csrfFilter);
+    ambariClient.addFilter(tokenFilter);
+
   }
 
   /**
@@ -197,8 +239,11 @@ public class ExecutionScheduleManager {
           .endAt(endDate)
           .build();
 
+
+
       try {
         executionScheduler.scheduleJob(trigger);
+        LOG.debug("Scheduled trigger next fire time: " + trigger.getNextFireTime());
       } catch (SchedulerException e) {
         LOG.error("Unable to schedule request execution.", e);
         throw new AmbariException(e.getMessage());
@@ -216,6 +261,7 @@ public class ExecutionScheduleManager {
 
       try {
         executionScheduler.scheduleJob(trigger);
+        LOG.debug("Scheduled trigger next fire time: " + trigger.getNextFireTime());
       } catch (SchedulerException e) {
         LOG.error("Unable to schedule request execution.", e);
         throw new AmbariException(e.getMessage());
@@ -252,6 +298,8 @@ public class ExecutionScheduleManager {
               requestExecution.getId())
             .usingJobData(BatchRequestJob.BATCH_REQUEST_BATCH_ID_KEY,
               batchRequest.getOrderId())
+            .usingJobData(BatchRequestJob.BATCH_REQUEST_CLUSTER_NAME_KEY,
+              requestExecution.getClusterName())
             .storeDurably()
             .build();
 
@@ -369,30 +417,31 @@ public class ExecutionScheduleManager {
    * @return request id
    * @throws AmbariException
    */
-  public synchronized Long executeBatchRequest(Long executionId,
-                                               Long batchId) throws AmbariException {
+  public Long executeBatchRequest(Long executionId,
+                                  Long batchId,
+                                  String clusterName) throws AmbariException {
 
     String type = null;
     String uri = null;
     String body = null;
 
     try {
-      RequestScheduleBatchRequestEntityPK batchRequestEntityPK = new
-        RequestScheduleBatchRequestEntityPK();
-      batchRequestEntityPK.setScheduleId(executionId);
-      batchRequestEntityPK.setBatchId(batchId);
-      RequestScheduleBatchRequestEntity batchRequestEntity =
-        batchRequestDAO.findByPk(batchRequestEntityPK);
+      RequestExecution requestExecution = clusters.getCluster(clusterName).getAllRequestExecutions().get(executionId);
+      BatchRequest batchRequest = requestExecution.getBatchRequest(batchId);
+      type = batchRequest.getType();
+      uri = batchRequest.getUri();
 
-      type = batchRequestEntity.getRequestType();
-      uri = batchRequestEntity.getRequestUri();
-      body = batchRequestEntity.getRequestBodyAsString();
+      body = requestExecution.getRequestBody(batchId);
 
-    } catch (Exception e) {
+      BatchRequestResponse batchRequestResponse = performApiRequest(uri, body, type);
+
+      updateBatchRequest(executionId, batchId, clusterName, batchRequestResponse, false);
 
+      return batchRequestResponse.getRequestId();
+    } catch (Exception e) {
+      throw new AmbariException("Exception occurred while performing request", e);
     }
 
-    return -1L;
   }
 
   /**
@@ -400,10 +449,118 @@ public class ExecutionScheduleManager {
    * @return
    * @throws AmbariException
    */
-  public BatchRequestResponse getBatchRequestResponse(Long requestId)
+  public BatchRequestResponse getBatchRequestResponse(Long requestId, String clusterName)
     throws AmbariException {
 
+    StrBuilder sb = new StrBuilder();
+    sb.append(DEFAULT_API_PATH).append("/clusters/").append(clusterName).append("/requests/").append(requestId);
+
+    return performApiGetRequest(sb.toString(), true);
+
+  }
+
+  private BatchRequestResponse convertToBatchRequestResponse(ClientResponse clientResponse) {
     BatchRequestResponse batchRequestResponse = new BatchRequestResponse();
+    int retCode = clientResponse.getStatus();
+
+    batchRequestResponse.setReturnCode(retCode);
+
+    String responseString = clientResponse.getEntity(String.class);
+    LOG.debug("Processing API response: status={}, body={}", retCode, responseString);
+    Map httpResponseMap;
+    try {
+      httpResponseMap = gson.fromJson(responseString, Map.class);
+      LOG.debug("Processing responce as JSON");
+    } catch (JsonSyntaxException e) {
+      LOG.debug("Response is not valid JSON object. Recording as is");
+      httpResponseMap = new HashMap();
+      httpResponseMap.put("message", responseString);
+    }
+
+
+    if (retCode < 300) {
+      if (httpResponseMap == null) {
+        //Empty response on successful scenario
+        batchRequestResponse.setStatus(HostRoleStatus.COMPLETED.toString());
+        return batchRequestResponse;
+      }
+
+      Map requestMap = null;
+      Object requestMapObject = httpResponseMap.get("Requests");
+      if (requestMapObject instanceof Map) {
+        requestMap = (Map) requestMapObject;
+      }
+
+      if (requestMap != null) {
+        batchRequestResponse.setRequestId(((Double) requestMap.get("id")).longValue());
+        //TODO fix different names for field
+        String status = null;
+        if (requestMap.get("request_status") != null) {
+          status = requestMap.get("request_status").toString();
+        }
+        if (requestMap.get("status") != null) {
+          status = requestMap.get("status").toString();
+        }
+        batchRequestResponse.setStatus(status);
+      }
+
+    } else {
+      //unsuccessful response
+      batchRequestResponse.setReturnMessage((String) httpResponseMap.get("message"));
+      batchRequestResponse.setStatus(HostRoleStatus.FAILED.toString());
+    }
+
     return batchRequestResponse;
   }
+
+  public void updateBatchRequest(long executionId, long batchId, String clusterName,
+                                 BatchRequestResponse batchRequestResponse,
+                                 boolean statusOnly)
+      throws AmbariException{
+
+    Cluster cluster = clusters.getCluster(clusterName);
+    RequestExecution requestExecution = cluster.getAllRequestExecutions().get(executionId);
+
+    requestExecution.updateBatchRequest(batchId, batchRequestResponse, statusOnly);
+
+  }
+
+  protected BatchRequestResponse performUriRequest(String url, String body, String method) {
+    ClientResponse response;
+    try {
+      response = ambariClient.resource(url).entity(body).method(method, ClientResponse.class);
+    } catch (UniformInterfaceException e) {
+      response = e.getResponse();
+    }
+    //Don't read response entity for logging purposes, it can be read only once from http stream
+
+    return convertToBatchRequestResponse(response);
+  }
+
+  protected BatchRequestResponse performApiGetRequest(String relativeUri, boolean queryAllFields) {
+    WebResource webResource = ambariWebResource.path(relativeUri);
+    if (queryAllFields) {
+      webResource = webResource.queryParam("fields", "*");
+    }
+    ClientResponse response;
+    try {
+      response = webResource.get(ClientResponse.class);
+    } catch (UniformInterfaceException e) {
+      response = e.getResponse();
+    }
+    return convertToBatchRequestResponse(response);
+  }
+
+  protected BatchRequestResponse performApiRequest(String relativeUri, String body, String method) {
+    ClientResponse response;
+    try {
+      response = ambariWebResource.path(relativeUri).method(method, ClientResponse.class, body);
+    } catch (UniformInterfaceException e) {
+      response = e.getResponse();
+    }
+
+    return convertToBatchRequestResponse(response);
+  }
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
index 2b938bc..353aaf0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/scheduler/ExecutionSchedulerImpl.java
@@ -22,6 +22,7 @@ import com.google.inject.Injector;
 import com.google.inject.Singleton;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.state.scheduler.GuiceJobFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecution;
 import org.apache.ambari.server.state.scheduler.Schedule;
 import org.quartz.Job;
@@ -40,6 +41,9 @@ import java.util.Properties;
 public class ExecutionSchedulerImpl implements ExecutionScheduler {
   @Inject
   private Configuration configuration;
+  @Inject
+  GuiceJobFactory guiceJobFactory;
+
   private static final Logger LOG = LoggerFactory.getLogger(ExecutionSchedulerImpl.class);
   protected static final String DEFAULT_SCHEDULER_NAME = "ExecutionScheduler";
   protected Scheduler scheduler;
@@ -71,6 +75,7 @@ public class ExecutionSchedulerImpl implements ExecutionScheduler {
     }
     try {
       scheduler = sf.getScheduler();
+      scheduler.setJobFactory(guiceJobFactory);
       isInitialized = true;
     } catch (SchedulerException e) {
       LOG.warn("Failed to create Request Execution scheduler !");

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/AmbariInternalAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/AmbariInternalAuthenticationProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/AmbariInternalAuthenticationProvider.java
new file mode 100644
index 0000000..243c843
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/AmbariInternalAuthenticationProvider.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authorization.internal;
+
+import com.google.inject.Inject;
+import org.springframework.security.authentication.AuthenticationProvider;
+import org.springframework.security.authentication.BadCredentialsException;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.AuthenticationException;
+
+public class AmbariInternalAuthenticationProvider implements AuthenticationProvider {
+
+  private final InternalTokenStorage internalTokenStorage;
+
+  @Inject
+  public AmbariInternalAuthenticationProvider(InternalTokenStorage internalTokenStorage) {
+    this.internalTokenStorage = internalTokenStorage;
+  }
+
+  @Override
+  public Authentication authenticate(Authentication authentication) throws AuthenticationException {
+    InternalAuthenticationToken token = (InternalAuthenticationToken) authentication;
+    if (internalTokenStorage.isValidInternalToken(token.getCredentials())) {
+      token.setAuthenticated(true);
+    } else {
+      String message = "Bad credentials";
+      throw new BadCredentialsException(message);
+    }
+    return token;
+  }
+
+  @Override
+  public boolean supports(Class<?> authentication) {
+    return InternalAuthenticationToken.class.isAssignableFrom(authentication);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
new file mode 100644
index 0000000..5d865ea
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authorization.internal;
+
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.core.userdetails.User;
+
+import java.util.Collection;
+import java.util.Collections;
+
+public class InternalAuthenticationToken implements Authentication {
+
+  private static final String INTERNAL_NAME = "internal";
+  private static final Collection<? extends GrantedAuthority> AUTHORITIES =
+      Collections.singleton(new SimpleGrantedAuthority("ADMIN"));
+  private static final User INTERNAL_USER = new User(INTERNAL_NAME, "empty", AUTHORITIES);
+
+  private String token;
+  private boolean authenticated = false;
+
+
+  public InternalAuthenticationToken(String tokenString) {
+    this.token = tokenString;
+  }
+
+  @Override
+  public Collection<? extends GrantedAuthority> getAuthorities() {
+    return AUTHORITIES;
+  }
+
+  @Override
+  public String getCredentials() {
+    return token;
+  }
+
+  @Override
+  public Object getDetails() {
+    return null;
+  }
+
+  @Override
+  public Object getPrincipal() {
+    return INTERNAL_USER;
+  }
+
+  @Override
+  public boolean isAuthenticated() {
+    return authenticated;
+  }
+
+  @Override
+  public void setAuthenticated(boolean isAuthenticated) throws IllegalArgumentException {
+    this.authenticated = isAuthenticated;
+  }
+
+  @Override
+  public String getName() {
+    return INTERNAL_NAME;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenAuthenticationFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenAuthenticationFilter.java
new file mode 100644
index 0000000..c05de28
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenAuthenticationFilter.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authorization.internal;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.security.core.context.SecurityContext;
+import org.springframework.security.core.context.SecurityContextHolder;
+
+import javax.servlet.*;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+public class InternalTokenAuthenticationFilter implements Filter {
+  public static final String INTERNAL_TOKEN_HEADER = "X-Internal-Token";
+
+  @Override
+  public void init(FilterConfig filterConfig) throws ServletException {
+
+  }
+
+  @Override
+  public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+    HttpServletRequest httpRequest = (HttpServletRequest) request;
+    HttpServletResponse httpResponse = (HttpServletResponse) response;
+
+    SecurityContext context = SecurityContextHolder.getContext();
+
+    if (context.getAuthentication() == null || !context.getAuthentication().isAuthenticated()) {
+      String token = httpRequest.getHeader(INTERNAL_TOKEN_HEADER);
+      if (token != null) {
+        context.setAuthentication(new InternalAuthenticationToken(token));
+      }
+    }
+
+    chain.doFilter(request, response);
+  }
+
+  @Override
+  public void destroy() {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenClientFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenClientFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenClientFilter.java
new file mode 100644
index 0000000..c1ba9e1
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenClientFilter.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authorization.internal;
+
+import com.google.inject.Inject;
+import com.sun.jersey.api.client.ClientHandlerException;
+import com.sun.jersey.api.client.ClientRequest;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.filter.ClientFilter;
+
+public class InternalTokenClientFilter extends ClientFilter {
+  public static final String INTERNAL_TOKEN_HEADER = "X-Internal-Token";
+  private final InternalTokenStorage tokenStorage;
+
+  @Inject
+  public InternalTokenClientFilter(InternalTokenStorage tokenStorage) {
+    this.tokenStorage = tokenStorage;
+  }
+
+  @Override
+  public ClientResponse handle(ClientRequest cr) throws ClientHandlerException {
+    cr.getHeaders().add(INTERNAL_TOKEN_HEADER, tokenStorage.getInternalToken());
+    return getNext().handle(cr);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenStorage.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenStorage.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenStorage.java
new file mode 100644
index 0000000..92d7fa9
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalTokenStorage.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authorization.internal;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+
+import java.math.BigInteger;
+import java.security.SecureRandom;
+
+@Singleton
+/**
+ * Generates single token for internal authentication
+ */
+public class InternalTokenStorage {
+  private final SecureRandom random;
+  private final String token;
+
+  @Inject
+  public InternalTokenStorage(SecureRandom secureRandom) {
+    this.random = secureRandom;
+    token = createNewToken();
+  }
+
+  public String getInternalToken() {
+    return token;
+  }
+
+  public boolean isValidInternalToken(String token) {
+    return this.token.equals(token);
+  }
+
+  public String createNewToken() {
+    return new BigInteger(130, random).toString(32);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestJob.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestJob.java b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestJob.java
index 7405706..9fbb571 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestJob.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestJob.java
@@ -17,29 +17,44 @@
  */
 package org.apache.ambari.server.state.scheduler;
 
+import com.google.inject.Inject;
+import com.google.inject.name.Named;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.scheduler.AbstractLinearExecutionJob;
 import org.apache.ambari.server.scheduler.ExecutionScheduleManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 
 public class BatchRequestJob extends AbstractLinearExecutionJob {
+  private static final Logger LOG = LoggerFactory.getLogger(BatchRequestJob.class);
+
   public static final String BATCH_REQUEST_EXECUTION_ID_KEY =
     "BatchRequestJob.ExecutionId";
   public static final String BATCH_REQUEST_BATCH_ID_KEY =
     "BatchRequestJob.BatchId";
+  public static final String BATCH_REQUEST_CLUSTER_NAME_KEY =
+      "BatchRequestJob.ClusterName";
+
+  private final long statusCheckInterval;
 
-  public BatchRequestJob(ExecutionScheduleManager executionScheduleManager) {
+  @Inject
+  public BatchRequestJob(ExecutionScheduleManager executionScheduleManager,
+                         @Named("statusCheckInterval") long statusCheckInterval) {
     super(executionScheduleManager);
+    this.statusCheckInterval = statusCheckInterval;
   }
 
   @Override
   protected void doWork(Map<String, Object> properties) throws AmbariException {
 
-    String executionId = properties.get(BATCH_REQUEST_EXECUTION_ID_KEY) != null ?
-      (String) properties.get(BATCH_REQUEST_EXECUTION_ID_KEY) : null;
-    String batchId = properties.get(BATCH_REQUEST_BATCH_ID_KEY) != null ?
-      (String) properties.get(BATCH_REQUEST_BATCH_ID_KEY) : null;
+    Long executionId = properties.get(BATCH_REQUEST_EXECUTION_ID_KEY) != null ?
+      (Long) properties.get(BATCH_REQUEST_EXECUTION_ID_KEY) : null;
+    Long batchId = properties.get(BATCH_REQUEST_BATCH_ID_KEY) != null ?
+      (Long) properties.get(BATCH_REQUEST_BATCH_ID_KEY) : null;
+    String clusterName = (String) properties.get(BATCH_REQUEST_CLUSTER_NAME_KEY);
 
 
     if (executionId == null || batchId == null) {
@@ -49,13 +64,26 @@ public class BatchRequestJob extends AbstractLinearExecutionJob {
     }
 
     Long requestId = executionScheduleManager.executeBatchRequest
-      (Long.parseLong(executionId), Long.parseLong(batchId));
+      (executionId, batchId, clusterName);
 
     if (requestId != null) {
-      // Wait on request completion
+      HostRoleStatus status;
+      do {
+        BatchRequestResponse batchRequestResponse =
+            executionScheduleManager.getBatchRequestResponse(requestId, clusterName);
+
+        status = HostRoleStatus.valueOf(batchRequestResponse.getStatus());
+
+        executionScheduleManager.updateBatchRequest(executionId, batchId, clusterName, batchRequestResponse, true);
 
-      BatchRequestResponse batchRequestResponse =
-        executionScheduleManager.getBatchRequestResponse(requestId);
+        try {
+          Thread.sleep(statusCheckInterval);
+        } catch (InterruptedException e) {
+          String message = "Job Thread interrupted";
+          LOG.error(message, e);
+          throw new AmbariException(message, e);
+        }
+      } while (!status.isCompletedState());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestResponse.java
index 2710ffa..59a45fd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/BatchRequestResponse.java
@@ -23,4 +23,42 @@ package org.apache.ambari.server.state.scheduler;
  * tolerance calculations
  */
 public class BatchRequestResponse {
+
+  private Long requestId;
+  private String status;
+  private int returnCode;
+  private String returnMessage;
+
+
+  public Long getRequestId() {
+    return requestId;
+  }
+
+  public void setRequestId(Long requestId) {
+    this.requestId = requestId;
+  }
+
+  public String getStatus() {
+    return status;
+  }
+
+  public void setStatus(String status) {
+    this.status = status;
+  }
+
+  public int getReturnCode() {
+    return returnCode;
+  }
+
+  public void setReturnCode(int returnCode) {
+    this.returnCode = returnCode;
+  }
+
+  public String getReturnMessage() {
+    return returnMessage;
+  }
+
+  public void setReturnMessage(String returnMessage) {
+    this.returnMessage = returnMessage;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/GuiceJobFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/GuiceJobFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/GuiceJobFactory.java
new file mode 100644
index 0000000..5756853
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/GuiceJobFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.state.scheduler;
+
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.quartz.Job;
+import org.quartz.Scheduler;
+import org.quartz.SchedulerException;
+import org.quartz.spi.JobFactory;
+import org.quartz.spi.TriggerFiredBundle;
+
+public class GuiceJobFactory implements JobFactory {
+
+  private final Injector injector;
+
+  @Inject
+  public GuiceJobFactory(Injector injector) {
+    this.injector = injector;
+  }
+
+  @Override
+  public Job newJob(TriggerFiredBundle bundle, Scheduler scheduler) throws SchedulerException {
+    return injector.getInstance(bundle.getJobDetail().getJobClass());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecution.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecution.java b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecution.java
index bbeece3..44e8ece 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecution.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecution.java
@@ -151,6 +151,19 @@ public interface RequestExecution {
   public String getRequestBody(Long batchId);
 
   /**
+   * Get batch request with specified order id
+   */
+  BatchRequest getBatchRequest(long batchId);
+
+  /**
+   * Updates batch request data
+   * @param batchId order id of batch request
+   * @param batchRequestResponse
+   * @param statusOnly true if only status should be updated
+   */
+  void updateBatchRequest(long batchId, BatchRequestResponse batchRequestResponse, boolean statusOnly);
+
+  /**
    * Status of the Request execution
    */
   public enum Status {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java
index 8c89392..a1e7d53 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/scheduler/RequestExecutionImpl.java
@@ -30,6 +30,7 @@ import org.apache.ambari.server.orm.dao.RequestScheduleBatchRequestDAO;
 import org.apache.ambari.server.orm.dao.RequestScheduleDAO;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity;
+import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntityPK;
 import org.apache.ambari.server.orm.entities.RequestScheduleEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -412,4 +413,47 @@ public class RequestExecutionImpl implements RequestExecution {
     return body;
   }
 
+  @Override
+  public BatchRequest getBatchRequest(long batchId) {
+    for (BatchRequest batchRequest : batch.getBatchRequests()) {
+      if (batchId == batchRequest.getOrderId()) {
+        return batchRequest;
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public void updateBatchRequest(long batchId, BatchRequestResponse batchRequestResponse, boolean statusOnly) {
+    long executionId = requestScheduleEntity.getScheduleId();
+
+    RequestScheduleBatchRequestEntityPK batchRequestEntityPK = new
+        RequestScheduleBatchRequestEntityPK();
+    batchRequestEntityPK.setScheduleId(executionId);
+    batchRequestEntityPK.setBatchId(batchId);
+    RequestScheduleBatchRequestEntity batchRequestEntity =
+        batchRequestDAO.findByPk(batchRequestEntityPK);
+
+    batchRequestEntity.setRequestStatus(batchRequestResponse.getStatus());
+
+    if (!statusOnly) {
+      batchRequestEntity.setReturnCode(batchRequestResponse.getReturnCode());
+      batchRequestEntity.setRequestId(batchRequestResponse.getRequestId());
+      batchRequestEntity.setReturnMessage(batchRequestResponse.getReturnMessage());
+    }
+
+    batchRequestDAO.merge(batchRequestEntity);
+
+    BatchRequest batchRequest = getBatchRequest(batchId);
+
+    batchRequest.setStatus(batchRequestResponse.getStatus());
+
+    if (!statusOnly) {
+      batchRequest.setReturnCode(batchRequestResponse.getReturnCode());
+      batchRequest.setResponseMsg(batchRequestResponse.getReturnMessage());
+    }
+
+    setLastExecutionStatus(batchRequestResponse.getStatus());
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index aea886a..ba6f1d4 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -41,7 +41,8 @@ CREATE TABLE users (user_id INTEGER, create_time TIMESTAMP DEFAULT NOW(), ldap_u
 CREATE TABLE execution_command (task_id BIGINT NOT NULL, command LONGBLOB, PRIMARY KEY (task_id));
 CREATE TABLE host_role_command (task_id BIGINT NOT NULL, attempt_count SMALLINT NOT NULL, event LONGTEXT NOT NULL, exitcode INTEGER NOT NULL, host_name VARCHAR(255) NOT NULL, last_attempt_time BIGINT NOT NULL, request_id BIGINT NOT NULL, role VARCHAR(255), role_command VARCHAR(255), stage_id BIGINT NOT NULL, start_time BIGINT NOT NULL, end_time BIGINT, status VARCHAR(255), std_error LONGBLOB, std_out LONGBLOB, structured_out LONGBLOB, PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor DOUBLE NOT NULL, PRIMARY KEY (role, request_id, stage_id));
-CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info LONGBLOB NOT NULL, PRIMARY KEY (stage_id, request_id));
+CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info LONGBLOB, PRIMARY KEY (stage_id, request_id));
+CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs LONGTEXT, request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts LONGTEXT, target_service VARCHAR(255), PRIMARY KEY (request_id));
 CREATE TABLE key_value_store (`key` VARCHAR(255), `value` LONGTEXT, PRIMARY KEY (`key`));
 CREATE TABLE clusterconfigmapping (type_name VARCHAR(255) NOT NULL, create_timestamp BIGINT NOT NULL, cluster_id BIGINT NOT NULL, selected INTEGER NOT NULL DEFAULT 0, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
 CREATE TABLE hostconfigmapping (create_timestamp BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, type_name VARCHAR(255) NOT NULL, selected INTEGER NOT NULL DEFAULT 0, service_name VARCHAR(255), version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -53,7 +54,7 @@ CREATE TABLE confgroupclusterconfigmapping (config_group_id BIGINT NOT NULL, clu
 CREATE TABLE configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
 CREATE TABLE configgrouphostmapping (config_group_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
 CREATE TABLE requestschedule (schedule_id bigint, cluster_id BIGINT NOT NULL, description varchar(255), status varchar(255), batch_separation_seconds smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime varchar(50), endTime varchar(50), last_execution_status varchar(255), PRIMARY KEY(schedule_id));
-CREATE TABLE requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body LONGBLOB, request_status varchar(255), return_code smallint, return_message varchar(255), PRIMARY KEY(schedule_id, batch_id));
+CREATE TABLE requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body LONGBLOB, request_status varchar(255), return_code smallint, return_message varchar(2000), PRIMARY KEY(schedule_id, batch_id));
 CREATE TABLE action (action_name VARCHAR(255) NOT NULL, action_type VARCHAR(32) NOT NULL, inputs VARCHAR(1000), target_service VARCHAR(255), target_component VARCHAR(255), default_timeout SMALLINT NOT NULL, description VARCHAR(1000), target_type VARCHAR(32), PRIMARY KEY (action_name));
 
 ALTER TABLE users ADD CONSTRAINT UNQ_users_0 UNIQUE (user_name, ldap_user);
@@ -72,6 +73,8 @@ ALTER TABLE host_role_command ADD CONSTRAINT FK_host_role_command_stage_id FOREI
 ALTER TABLE host_role_command ADD CONSTRAINT FK_host_role_command_host_name FOREIGN KEY (host_name) REFERENCES hosts (host_name);
 ALTER TABLE role_success_criteria ADD CONSTRAINT FK_role_success_criteria_stage_id FOREIGN KEY (stage_id, request_id) REFERENCES stage (stage_id, request_id);
 ALTER TABLE stage ADD CONSTRAINT FK_stage_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
+ALTER TABLE stage ADD CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id);
+ALTER TABLE request ADD CONSTRAINT FK_request_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterconfigmapping ADD CONSTRAINT FK_clusterconfigmapping_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE hostconfigmapping ADD CONSTRAINT FK_hostconfigmapping_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE hostconfigmapping ADD CONSTRAINT FK_hostconfigmapping_host_name FOREIGN KEY (host_name) REFERENCES hosts (host_name);

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 9246c4a..4dcd37f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -32,6 +32,7 @@ CREATE TABLE execution_command (task_id NUMBER(19) NOT NULL, command BLOB NULL,
 CREATE TABLE host_role_command (task_id NUMBER(19) NOT NULL, attempt_count NUMBER(5) NOT NULL, event CLOB NULL, exitcode NUMBER(10) NOT NULL, host_name VARCHAR2(255) NOT NULL, last_attempt_time NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, role VARCHAR2(255) NULL, role_command VARCHAR2(255) NULL, stage_id NUMBER(19) NOT NULL, start_time NUMBER(19) NOT NULL, end_time NUMBER(19), status VARCHAR2(255) NULL, std_error BLOB NULL, std_out BLOB NULL, structured_out BLOB NULL, PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR2(255) NOT NULL, request_id NUMBER(19) NOT NULL, stage_id NUMBER(19) NOT NULL, success_factor NUMBER(19,4) NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 CREATE TABLE stage (stage_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NULL, log_info VARCHAR2(255) NULL, request_context VARCHAR2(255) NULL, cluster_host_info BLOB NOT NULL, PRIMARY KEY (stage_id, request_id));
+CREATE TABLE request (request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19), command_name VARCHAR(255), create_time NUMBER(19) NOT NULL, end_time NUMBER(19) NOT NULL, inputs CLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time NUMBER(19) NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts CLOB, target_service VARCHAR(255), PRIMARY KEY (request_id));
 CREATE TABLE key_value_store ("key" VARCHAR2(255) NOT NULL, "value" CLOB NULL, PRIMARY KEY ("key"));
 CREATE TABLE clusterconfigmapping (type_name VARCHAR2(255) NOT NULL, create_timestamp NUMBER(19) NOT NULL, cluster_id NUMBER(19) NOT NULL, selected NUMBER(10) NOT NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
 CREATE TABLE hostconfigmapping (create_timestamp NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, cluster_id NUMBER(19) NOT NULL, type_name VARCHAR2(255) NOT NULL, selected NUMBER(10) NOT NULL, service_name VARCHAR2(255) NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -44,7 +45,7 @@ CREATE TABLE confgroupclusterconfigmapping (config_group_id NUMBER(19) NOT NULL,
 CREATE TABLE configgrouphostmapping (config_group_id NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
 CREATE TABLE action (action_name VARCHAR2(255) NOT NULL, action_type VARCHAR2(255) NOT NULL, inputs VARCHAR2(1024), target_service VARCHAR2(255), target_component VARCHAR2(255), default_timeout NUMBER(10) NOT NULL, description VARCHAR2(1024), target_type VARCHAR2(255), PRIMARY KEY (action_name));
 CREATE TABLE ambari.requestschedule (schedule_id NUMBER(19), cluster_id NUMBER(19) NOT NULL, description VARCHAR2(255), status VARCHAR2(255), batch_separation_seconds smallint, batch_toleration_limit smallint, create_user VARCHAR2(255), create_timestamp NUMBER(19), update_user VARCHAR2(255), update_timestamp NUMBER(19), minutes VARCHAR2(10), hours VARCHAR2(10), days_of_month VARCHAR2(10), month VARCHAR2(10), day_of_week VARCHAR2(10), yearToSchedule VARCHAR2(10), startTime VARCHAR2(50), endTime VARCHAR2(50), last_execution_status VARCHAR2(255), PRIMARY KEY(schedule_id));
-CREATE TABLE ambari.requestschedulebatchrequest (schedule_id NUMBER(19), batch_id NUMBER(19), request_id NUMBER(19), request_type VARCHAR2(255), request_uri VARCHAR2(1024), request_body BLOB, request_status VARCHAR2(255), return_code smallint, return_message VARCHAR2(255), PRIMARY KEY(schedule_id, batch_id));
+CREATE TABLE ambari.requestschedulebatchrequest (schedule_id NUMBER(19), batch_id NUMBER(19), request_id NUMBER(19), request_type VARCHAR2(255), request_uri VARCHAR2(1024), request_body BLOB, request_status VARCHAR2(255), return_code smallint, return_message VARCHAR2(2000), PRIMARY KEY(schedule_id, batch_id));
 
 ALTER TABLE users ADD CONSTRAINT UNQ_users_0 UNIQUE (user_name, ldap_user);
 ALTER TABLE clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
@@ -62,6 +63,8 @@ ALTER TABLE host_role_command ADD CONSTRAINT FK_host_role_command_stage_id FOREI
 ALTER TABLE host_role_command ADD CONSTRAINT FK_host_role_command_host_name FOREIGN KEY (host_name) REFERENCES hosts (host_name);
 ALTER TABLE role_success_criteria ADD CONSTRAINT role_success_criteria_stage_id FOREIGN KEY (stage_id, request_id) REFERENCES stage (stage_id, request_id);
 ALTER TABLE stage ADD CONSTRAINT FK_stage_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
+ALTER TABLE stage ADD CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id);
+ALTER TABLE request ADD CONSTRAINT FK_request_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE clusterconfigmapping ADD CONSTRAINT clusterconfigmappingcluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE ClusterHostMapping ADD CONSTRAINT ClusterHostMapping_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE ClusterHostMapping ADD CONSTRAINT ClusterHostMapping_host_name FOREIGN KEY (host_name) REFERENCES hosts (host_name);

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index fdb6eb5..2bdb174 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -79,6 +79,9 @@ GRANT ALL PRIVILEGES ON TABLE ambari.role_success_criteria TO :username;
 CREATE TABLE ambari.stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info BYTEA NOT NULL, PRIMARY KEY (stage_id, request_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.stage TO :username;
 
+CREATE TABLE ambari.request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs VARCHAR(32000), request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts TEXT, target_service VARCHAR(255), PRIMARY KEY (request_id));
+GRANT ALL PRIVILEGES ON TABLE ambari.request TO :username;
+
 CREATE TABLE ambari.ClusterHostMapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
 GRANT ALL PRIVILEGES ON TABLE ambari.ClusterHostMapping TO :username;
 
@@ -113,7 +116,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.action TO :username;
 CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id bigint NOT NULL, description varchar(255), status varchar(255), batch_separation_seconds smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime varchar(50), endTime varchar(50), last_execution_status varchar(255), PRIMARY KEY(schedule_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.requestschedule TO :username;
 
-CREATE TABLE ambari.requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body BYTEA, request_status varchar(255), return_code smallint, return_message varchar(255), PRIMARY KEY(schedule_id, batch_id));
+CREATE TABLE ambari.requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body BYTEA, request_status varchar(255), return_code smallint, return_message varchar(20000), PRIMARY KEY(schedule_id, batch_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.requestschedulebatchrequest TO :username;
 
 --------altering tables by creating foreign keys----------
@@ -133,6 +136,8 @@ ALTER TABLE ambari.host_role_command ADD CONSTRAINT FK_host_role_command_stage_i
 ALTER TABLE ambari.host_role_command ADD CONSTRAINT FK_host_role_command_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
 ALTER TABLE ambari.role_success_criteria ADD CONSTRAINT FK_role_success_criteria_stage_id FOREIGN KEY (stage_id, request_id) REFERENCES ambari.stage (stage_id, request_id);
 ALTER TABLE ambari.stage ADD CONSTRAINT FK_stage_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
+ALTER TABLE ambari.stage ADD CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES ambari.request (request_id);
+ALTER TABLE ambari.request ADD CONSTRAINT FK_request_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.ClusterHostMapping ADD CONSTRAINT FK_ClusterHostMapping_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
 ALTER TABLE ambari.ClusterHostMapping ADD CONSTRAINT FK_ClusterHostMapping_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.user_roles ADD CONSTRAINT FK_user_roles_user_id FOREIGN KEY (user_id) REFERENCES ambari.users (user_id);
@@ -176,7 +181,7 @@ COMMIT;
 
 -- Quartz tables
 
-CREATE TABLE qrtz_job_details
+CREATE TABLE ambari.qrtz_job_details
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     JOB_NAME  VARCHAR(200) NOT NULL,
@@ -190,8 +195,9 @@ CREATE TABLE qrtz_job_details
     JOB_DATA BYTEA NULL,
     PRIMARY KEY (SCHED_NAME,JOB_NAME,JOB_GROUP)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_job_details TO :username;
 
-CREATE TABLE qrtz_triggers
+CREATE TABLE ambari.qrtz_triggers
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     TRIGGER_NAME VARCHAR(200) NOT NULL,
@@ -211,10 +217,11 @@ CREATE TABLE qrtz_triggers
     JOB_DATA BYTEA NULL,
     PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
     FOREIGN KEY (SCHED_NAME,JOB_NAME,JOB_GROUP)
-	REFERENCES QRTZ_JOB_DETAILS(SCHED_NAME,JOB_NAME,JOB_GROUP)
+	REFERENCES ambari.QRTZ_JOB_DETAILS(SCHED_NAME,JOB_NAME,JOB_GROUP)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_triggers TO :username;
 
-CREATE TABLE qrtz_simple_triggers
+CREATE TABLE ambari.qrtz_simple_triggers
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     TRIGGER_NAME VARCHAR(200) NOT NULL,
@@ -224,10 +231,11 @@ CREATE TABLE qrtz_simple_triggers
     TIMES_TRIGGERED BIGINT NOT NULL,
     PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
     FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
-	REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
+	REFERENCES ambari.QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_simple_triggers TO :username;
 
-CREATE TABLE qrtz_cron_triggers
+CREATE TABLE ambari.qrtz_cron_triggers
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     TRIGGER_NAME VARCHAR(200) NOT NULL,
@@ -236,10 +244,11 @@ CREATE TABLE qrtz_cron_triggers
     TIME_ZONE_ID VARCHAR(80),
     PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
     FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
-	REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
+	REFERENCES ambari.QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_cron_triggers TO :username;
 
-CREATE TABLE qrtz_simprop_triggers
+CREATE TABLE ambari.qrtz_simprop_triggers
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     TRIGGER_NAME VARCHAR(200) NOT NULL,
@@ -257,10 +266,11 @@ CREATE TABLE qrtz_simprop_triggers
     BOOL_PROP_2 BOOL NULL,
     PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
     FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
-    REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
+    REFERENCES ambari.QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_simprop_triggers TO :username;
 
-CREATE TABLE qrtz_blob_triggers
+CREATE TABLE ambari.qrtz_blob_triggers
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     TRIGGER_NAME VARCHAR(200) NOT NULL,
@@ -268,26 +278,29 @@ CREATE TABLE qrtz_blob_triggers
     BLOB_DATA BYTEA NULL,
     PRIMARY KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP),
     FOREIGN KEY (SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
-        REFERENCES QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
+        REFERENCES ambari.QRTZ_TRIGGERS(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_blob_triggers TO :username;
 
-CREATE TABLE qrtz_calendars
+CREATE TABLE ambari.qrtz_calendars
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     CALENDAR_NAME  VARCHAR(200) NOT NULL,
     CALENDAR BYTEA NOT NULL,
     PRIMARY KEY (SCHED_NAME,CALENDAR_NAME)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_calendars TO :username;
 
 
-CREATE TABLE qrtz_paused_trigger_grps
+CREATE TABLE ambari.qrtz_paused_trigger_grps
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     TRIGGER_GROUP  VARCHAR(200) NOT NULL,
     PRIMARY KEY (SCHED_NAME,TRIGGER_GROUP)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_paused_trigger_grps TO :username;
 
-CREATE TABLE qrtz_fired_triggers
+CREATE TABLE ambari.qrtz_fired_triggers
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     ENTRY_ID VARCHAR(95) NOT NULL,
@@ -304,8 +317,9 @@ CREATE TABLE qrtz_fired_triggers
     REQUESTS_RECOVERY BOOL NULL,
     PRIMARY KEY (SCHED_NAME,ENTRY_ID)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_fired_triggers TO :username;
 
-CREATE TABLE qrtz_scheduler_state
+CREATE TABLE ambari.qrtz_scheduler_state
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     INSTANCE_NAME VARCHAR(200) NOT NULL,
@@ -313,36 +327,38 @@ CREATE TABLE qrtz_scheduler_state
     CHECKIN_INTERVAL BIGINT NOT NULL,
     PRIMARY KEY (SCHED_NAME,INSTANCE_NAME)
 );
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_scheduler_state TO :username;
 
-CREATE TABLE qrtz_locks
+CREATE TABLE ambari.qrtz_locks
   (
     SCHED_NAME VARCHAR(120) NOT NULL,
     LOCK_NAME  VARCHAR(40) NOT NULL,
     PRIMARY KEY (SCHED_NAME,LOCK_NAME)
 );
-
-create index idx_qrtz_j_req_recovery on qrtz_job_details(SCHED_NAME,REQUESTS_RECOVERY);
-create index idx_qrtz_j_grp on qrtz_job_details(SCHED_NAME,JOB_GROUP);
-
-create index idx_qrtz_t_j on qrtz_triggers(SCHED_NAME,JOB_NAME,JOB_GROUP);
-create index idx_qrtz_t_jg on qrtz_triggers(SCHED_NAME,JOB_GROUP);
-create index idx_qrtz_t_c on qrtz_triggers(SCHED_NAME,CALENDAR_NAME);
-create index idx_qrtz_t_g on qrtz_triggers(SCHED_NAME,TRIGGER_GROUP);
-create index idx_qrtz_t_state on qrtz_triggers(SCHED_NAME,TRIGGER_STATE);
-create index idx_qrtz_t_n_state on qrtz_triggers(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_STATE);
-create index idx_qrtz_t_n_g_state on qrtz_triggers(SCHED_NAME,TRIGGER_GROUP,TRIGGER_STATE);
-create index idx_qrtz_t_next_fire_time on qrtz_triggers(SCHED_NAME,NEXT_FIRE_TIME);
-create index idx_qrtz_t_nft_st on qrtz_triggers(SCHED_NAME,TRIGGER_STATE,NEXT_FIRE_TIME);
-create index idx_qrtz_t_nft_misfire on qrtz_triggers(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME);
-create index idx_qrtz_t_nft_st_misfire on qrtz_triggers(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_STATE);
-create index idx_qrtz_t_nft_st_misfire_grp on qrtz_triggers(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_GROUP,TRIGGER_STATE);
-
-create index idx_qrtz_ft_trig_inst_name on qrtz_fired_triggers(SCHED_NAME,INSTANCE_NAME);
-create index idx_qrtz_ft_inst_job_req_rcvry on qrtz_fired_triggers(SCHED_NAME,INSTANCE_NAME,REQUESTS_RECOVERY);
-create index idx_qrtz_ft_j_g on qrtz_fired_triggers(SCHED_NAME,JOB_NAME,JOB_GROUP);
-create index idx_qrtz_ft_jg on qrtz_fired_triggers(SCHED_NAME,JOB_GROUP);
-create index idx_qrtz_ft_t_g on qrtz_fired_triggers(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
-create index idx_qrtz_ft_tg on qrtz_fired_triggers(SCHED_NAME,TRIGGER_GROUP);
+GRANT ALL PRIVILEGES ON TABLE ambari.qrtz_locks TO :username;
+
+create index idx_qrtz_j_req_recovery on ambari.qrtz_job_details(SCHED_NAME,REQUESTS_RECOVERY);
+create index idx_qrtz_j_grp on ambari.qrtz_job_details(SCHED_NAME,JOB_GROUP);
+
+create index idx_qrtz_t_j on ambari.qrtz_triggers(SCHED_NAME,JOB_NAME,JOB_GROUP);
+create index idx_qrtz_t_jg on ambari.qrtz_triggers(SCHED_NAME,JOB_GROUP);
+create index idx_qrtz_t_c on ambari.qrtz_triggers(SCHED_NAME,CALENDAR_NAME);
+create index idx_qrtz_t_g on ambari.qrtz_triggers(SCHED_NAME,TRIGGER_GROUP);
+create index idx_qrtz_t_state on ambari.qrtz_triggers(SCHED_NAME,TRIGGER_STATE);
+create index idx_qrtz_t_n_state on ambari.qrtz_triggers(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_STATE);
+create index idx_qrtz_t_n_g_state on ambari.qrtz_triggers(SCHED_NAME,TRIGGER_GROUP,TRIGGER_STATE);
+create index idx_qrtz_t_next_fire_time on ambari.qrtz_triggers(SCHED_NAME,NEXT_FIRE_TIME);
+create index idx_qrtz_t_nft_st on ambari.qrtz_triggers(SCHED_NAME,TRIGGER_STATE,NEXT_FIRE_TIME);
+create index idx_qrtz_t_nft_misfire on ambari.qrtz_triggers(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME);
+create index idx_qrtz_t_nft_st_misfire on ambari.qrtz_triggers(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_STATE);
+create index idx_qrtz_t_nft_st_misfire_grp on ambari.qrtz_triggers(SCHED_NAME,MISFIRE_INSTR,NEXT_FIRE_TIME,TRIGGER_GROUP,TRIGGER_STATE);
+
+create index idx_qrtz_ft_trig_inst_name on ambari.qrtz_fired_triggers(SCHED_NAME,INSTANCE_NAME);
+create index idx_qrtz_ft_inst_job_req_rcvry on ambari.qrtz_fired_triggers(SCHED_NAME,INSTANCE_NAME,REQUESTS_RECOVERY);
+create index idx_qrtz_ft_j_g on ambari.qrtz_fired_triggers(SCHED_NAME,JOB_NAME,JOB_GROUP);
+create index idx_qrtz_ft_jg on ambari.qrtz_fired_triggers(SCHED_NAME,JOB_GROUP);
+create index idx_qrtz_ft_t_g on ambari.qrtz_fired_triggers(SCHED_NAME,TRIGGER_NAME,TRIGGER_GROUP);
+create index idx_qrtz_ft_tg on ambari.qrtz_fired_triggers(SCHED_NAME,TRIGGER_GROUP);
 
 commit;
 
@@ -438,4 +454,4 @@ CREATE TABLE clusterEvent (
   error     TEXT, data TEXT,
   host      TEXT, rack TEXT
 );
-GRANT ALL PRIVILEGES ON TABLE clusterEvent TO "mapred";
\ No newline at end of file
+GRANT ALL PRIVILEGES ON TABLE clusterEvent TO "mapred";

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
index 4ecc697..7c77d61 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-REMOTE-CREATE.sql
@@ -36,6 +36,7 @@ CREATE TABLE ambari.execution_command (command bytea, task_id BIGINT NOT NULL, P
 CREATE TABLE ambari.host_role_command (task_id BIGINT NOT NULL, attempt_count SMALLINT NOT NULL, event VARCHAR(32000) NOT NULL, exitcode INTEGER NOT NULL, host_name VARCHAR(255) NOT NULL, last_attempt_time BIGINT NOT NULL, request_id BIGINT NOT NULL, role VARCHAR(255), stage_id BIGINT NOT NULL, start_time BIGINT NOT NULL, end_time BIGINT, status VARCHAR(255), std_error BYTEA, std_out BYTEA, structured_out BYTEA, role_command VARCHAR(255), PRIMARY KEY (task_id));
 CREATE TABLE ambari.role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor FLOAT NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 CREATE TABLE ambari.stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), PRIMARY KEY (stage_id, request_id));
+CREATE TABLE ambari.request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs VARCHAR(32000), request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts TEXT, target_service VARCHAR(255), PRIMARY KEY (request_id));
 CREATE TABLE ambari.ClusterHostMapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
 CREATE TABLE ambari.user_roles (role_name VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, PRIMARY KEY (role_name, user_id));
 CREATE TABLE ambari.key_value_store ("key" VARCHAR(255), "value" VARCHAR, PRIMARY KEY("key"));
@@ -46,7 +47,7 @@ CREATE TABLE ambari.configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, gr
 CREATE TABLE ambari.confgroupclusterconfigmapping (config_group_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, config_type VARCHAR(255) NOT NULL, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', create_timestamp BIGINT NOT NULL, PRIMARY KEY(config_group_id, cluster_id, config_type));
 CREATE TABLE ambari.configgrouphostmapping (config_group_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
 CREATE TABLE ambari.requestschedule (schedule_id bigint, cluster_id BIGINT NOT NULL, status varchar(255), batch_separation_seconds smallint, batch_toleration_limit smallint, create_user varchar(255), create_timestamp bigint, update_user varchar(255), update_timestamp bigint, minutes varchar(10), hours varchar(10), days_of_month varchar(10), month varchar(10), day_of_week varchar(10), yearToSchedule varchar(10), startTime varchar(50), endTime varchar(50), last_execution_status varchar(255), PRIMARY KEY(schedule_id));
-CREATE TABLE ambari.requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body BYTEA, request_status varchar(255), return_code smallint, return_message varchar(255), PRIMARY KEY(schedule_id, batch_id));
+CREATE TABLE ambari.requestschedulebatchrequest (schedule_id bigint, batch_id bigint, request_id bigint, request_type varchar(255), request_uri varchar(1024), request_body BYTEA, request_status varchar(255), return_code smallint, return_message varchar(2000), PRIMARY KEY(schedule_id, batch_id));
 
 ALTER TABLE ambari.clusterconfig ADD CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.clusterservices ADD CONSTRAINT FK_clusterservices_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
@@ -64,6 +65,8 @@ ALTER TABLE ambari.host_role_command ADD CONSTRAINT FK_host_role_command_stage_i
 ALTER TABLE ambari.host_role_command ADD CONSTRAINT FK_host_role_command_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
 ALTER TABLE ambari.role_success_criteria ADD CONSTRAINT FK_role_success_criteria_stage_id FOREIGN KEY (stage_id, request_id) REFERENCES ambari.stage (stage_id, request_id);
 ALTER TABLE ambari.stage ADD CONSTRAINT FK_stage_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
+ALTER TABLE ambari.stage ADD CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id);
+ALTER TABLE ambari.request ADD CONSTRAINT FK_request_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id);
 ALTER TABLE ambari.ClusterHostMapping ADD CONSTRAINT FK_ClusterHostMapping_host_name FOREIGN KEY (host_name) REFERENCES ambari.hosts (host_name);
 ALTER TABLE ambari.ClusterHostMapping ADD CONSTRAINT FK_ClusterHostMapping_cluster_id FOREIGN KEY (cluster_id) REFERENCES ambari.clusters (cluster_id);
 ALTER TABLE ambari.user_roles ADD CONSTRAINT FK_user_roles_user_id FOREIGN KEY (user_id) REFERENCES ambari.users (user_id);
@@ -348,4 +351,4 @@ CREATE TABLE clusterEvent (
   service TEXT, status TEXT,
   error TEXT, data TEXT ,
   host TEXT, rack TEXT
-);
\ No newline at end of file
+);

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml b/ambari-server/src/main/resources/META-INF/persistence.xml
index 6c8a5d6..3b07bd7 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -30,6 +30,7 @@
     <class>org.apache.ambari.server.orm.entities.HostRoleCommandEntity</class>
     <class>org.apache.ambari.server.orm.entities.RoleSuccessCriteriaEntity</class>
     <class>org.apache.ambari.server.orm.entities.StageEntity</class>
+    <class>org.apache.ambari.server.orm.entities.RequestEntity</class>
     <class>org.apache.ambari.server.orm.entities.KeyValueEntity</class>
     <class>org.apache.ambari.server.orm.entities.ClusterConfigMappingEntity</class>
     <class>org.apache.ambari.server.orm.entities.HostConfigMappingEntity</class>
@@ -47,6 +48,8 @@
       <property name="eclipselink.cache.size.default" value="10000" />
       <property name="eclipselink.jdbc.batch-writing" value="JDBC"/>
       <property name="eclipselink.weaving" value="static" />
+      <!--<property name="eclipselink.id-validation" value="NULL" />-->
+
     </properties>
   </persistence-unit>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 9c9529f..52c0a20 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -96,6 +96,14 @@
         "Requests/cluster_name",
         "Requests/request_status",
         "Requests/request_context",
+        "Requests/type",
+        "Requests/inputs",
+        "Requests/target_service",
+        "Requests/target_component",
+        "Requests/target_hosts",
+        "Requests/create_time",
+        "Requests/start_time",
+        "Requests/end_time",
         "Requests/task_count",
         "Requests/failed_task_count",
         "Requests/aborted_task_count",

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml b/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
index 6a722d5..bb232b7 100644
--- a/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
+++ b/ambari-server/src/main/resources/webapp/WEB-INF/spring-security.xml
@@ -27,6 +27,7 @@
     <http-basic entry-point-ref="ambariEntryPoint"/>
     <intercept-url pattern="/**" access="isAuthenticated()" method="GET"/>
     <intercept-url pattern="/**" access="hasRole('ADMIN')"/>
+    <custom-filter ref="internalTokenAuthenticationFilter" after="BASIC_AUTH_FILTER"/>
   </http>
 
   <!--<ldap-server id="ldapServer" root="dc=ambari,dc=apache,dc=org"/>-->
@@ -39,8 +40,10 @@
 
     <authentication-provider ref="ambariLdapAuthenticationProvider"/>
 
+    <authentication-provider ref="ambariInternalAuthenticationProvider"/>
+
   </authentication-manager>
 
   <beans:bean id="ambariEntryPoint" class="org.apache.ambari.server.security.AmbariEntryPoint">
   </beans:bean>
-</beans:beans>
\ No newline at end of file
+</beans:beans>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
index 62e3c88..183c126 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
@@ -160,7 +160,8 @@ public class ExecutionCommandWrapperTest {
             hostName, System.currentTimeMillis()), clusterName, "HDFS");
     List<Stage> stages = new ArrayList<Stage>();
     stages.add(s);
-    db.persistActions(stages);
+    Request request = new Request(stages, clusters);
+    db.persistActions(request);
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index d8042ae..6f8c884 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -83,7 +83,7 @@ public class TestActionDBAccessorImpl {
     cdb = injector.getInstance(CustomActionDBAccessor.class);
 
     am = new ActionManager(5000, 1200000, new ActionQueue(), clusters, db,
-        new HostsMap((String) null), null, injector.getInstance(UnitOfWork.class), cdb);
+        new HostsMap((String) null), null, injector.getInstance(UnitOfWork.class), cdb, injector.getInstance(RequestFactory.class));
   }
 
   @After
@@ -121,9 +121,13 @@ public class TestActionDBAccessorImpl {
   @Test
   public void testGetStagesInProgress() {
     String hostname = "host1";
-    populateActionDB(db, hostname, requestId, stageId);
-    populateActionDB(db, hostname, requestId, stageId+1);
-    List<Stage> stages = db.getStagesInProgress();
+    List<Stage> stages = new ArrayList<Stage>();
+    stages.add(createStubStage(hostname, requestId, stageId));
+    stages.add(createStubStage(hostname, requestId, stageId + 1));
+    Request request = new Request(stages, clusters);
+    db.persistActions(request);
+
+    List<Stage> stages2 = db.getStagesInProgress();
     assertEquals(2, stages.size());
   }
   
@@ -314,7 +318,8 @@ public class TestActionDBAccessorImpl {
     String hostName = cmd.getHostName();
     cmd.setStatus(HostRoleStatus.COMPLETED);
 
-    db.persistActions(stages);
+    Request request = new Request(stages, clusters);
+    db.persistActions(request);
     db.abortOperation(requestId);
 
     List<HostRoleCommand> commands = db.getRequestTasks(requestId);
@@ -329,6 +334,14 @@ public class TestActionDBAccessorImpl {
 
   private void populateActionDB(ActionDBAccessor db, String hostname,
       long requestId, long stageId) {
+    Stage s = createStubStage(hostname, requestId, stageId);
+    List<Stage> stages = new ArrayList<Stage>();
+    stages.add(s);
+    Request request = new Request(stages, clusters);
+    db.persistActions(request);
+  }
+
+  private Stage createStubStage(String hostname, long requestId, long stageId) {
     Stage s = new Stage(requestId, "/a/b", "cluster1", "action db accessor test", "clusterHostInfo");
     s.setStageId(stageId);
     s.addHostRoleExecutionCommand(hostname, Role.HBASE_MASTER,
@@ -341,9 +354,7 @@ public class TestActionDBAccessorImpl {
         RoleCommand.START,
         new ServiceComponentHostStartEvent(Role.HBASE_REGIONSERVER
             .toString(), hostname, System.currentTimeMillis()), "cluster1", "HBASE");
-    List<Stage> stages = new ArrayList<Stage>();
-    stages.add(s);
-    db.persistActions(stages);
+    return s;
   }
 
   private void populateActionDBWithCustomAction(ActionDBAccessor db, String hostname,
@@ -356,8 +367,9 @@ public class TestActionDBAccessorImpl {
             hostname, System.currentTimeMillis()), "cluster1", "HBASE");
     List<Stage> stages = new ArrayList<Stage>();
     stages.add(s);
-    ExecuteActionRequest request = new ExecuteActionRequest("cluster1", null, actionName, "HBASE",
+    ExecuteActionRequest executeActionRequest = new ExecuteActionRequest("cluster1", null, actionName, "HBASE",
         "HBASE_MASTER", null, null);
-    db.persistActions(stages);
+    Request request = new Request(stages, clusters);
+    db.persistActions(request);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c8697655/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
index 20d6792..a266cc6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionManager.java
@@ -84,7 +84,7 @@ public class TestActionManager {
   public void testActionResponse() {
     ActionDBAccessor db = injector.getInstance(ActionDBAccessorImpl.class);
     ActionManager am = new ActionManager(5000, 1200000, new ActionQueue(),
-        clusters, db, new HostsMap((String) null), null, unitOfWork, null);
+        clusters, db, new HostsMap((String) null), null, unitOfWork, null, injector.getInstance(RequestFactory.class));
     populateActionDB(db, hostname);
     Stage stage = db.getAllStages(requestId).get(0);
     Assert.assertEquals(stageId, stage.getStageId());
@@ -124,7 +124,7 @@ public class TestActionManager {
   public void testLargeLogs() {
     ActionDBAccessor db = injector.getInstance(ActionDBAccessorImpl.class);
     ActionManager am = new ActionManager(5000, 1200000, new ActionQueue(),
-        clusters, db, new HostsMap((String) null), null, unitOfWork, null);
+        clusters, db, new HostsMap((String) null), null, unitOfWork, null, injector.getInstance(RequestFactory.class));
     populateActionDB(db, hostname);
     Stage stage = db.getAllStages(requestId).get(0);
     Assert.assertEquals(stageId, stage.getStageId());
@@ -171,7 +171,8 @@ public class TestActionManager {
             hostname, System.currentTimeMillis()), "cluster1", "HBASE");
     List<Stage> stages = new ArrayList<Stage>();
     stages.add(s);
-    db.persistActions(stages);
+    Request request = new Request(stages, clusters);
+    db.persistActions(request);
   }
 
   // Test failing ... tracked by Jira BUG-4966
@@ -212,7 +213,7 @@ public class TestActionManager {
 
     replay(queue, db, clusters);
 
-    ActionManager manager = new ActionManager(0, 0, queue, clusters, db, null, null, unitOfWork, null);
+    ActionManager manager = new ActionManager(0, 0, queue, clusters, db, null, null, unitOfWork, null, injector.getInstance(RequestFactory.class));
     assertSame(listStages, manager.getActions(requestId));
 
     verify(queue, db, clusters);