You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2014/05/08 22:02:29 UTC
[1/2] AMBARI-5580. Introduce operation_level request parameter
(dlysnichenko)
Repository: ambari
Updated Branches:
refs/heads/trunk 2fb2a17ec -> 5dff4516a
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestOperationLevelTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestOperationLevelTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestOperationLevelTest.java
new file mode 100644
index 0000000..d832015
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestOperationLevelTest.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import junit.framework.TestCase;
+import org.junit.Test;
+import static junit.framework.TestCase.*;
+
+public class RequestOperationLevelTest {
+
+ private final String host_component = "HOST_COMPONENT";
+ private final String hostComponent = "HostComponent";
+
+ @Test
+ public void testGetInternalLevelName() throws Exception {
+ String internal = RequestOperationLevel.getInternalLevelName(host_component);
+ assertEquals(internal, hostComponent);
+ // Check case-insensitivity
+ internal = RequestOperationLevel.getInternalLevelName(host_component.toLowerCase());
+ assertEquals(internal, hostComponent);
+ // Check wrong param
+ try {
+ RequestOperationLevel.getInternalLevelName("Wrong_param");
+ fail("Should throw exception");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+ }
+
+ @Test
+ public void testGetExternalLevelName() throws Exception {
+ String external = RequestOperationLevel.getExternalLevelName(hostComponent);
+ assertEquals(external, host_component);
+ // Check wrong param
+ try {
+ RequestOperationLevel.getExternalLevelName("Wrong_param");
+ fail("Should throw exception");
+ } catch (IllegalArgumentException e) {
+ // expected
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
index c0ad8ce..2312383 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
@@ -30,7 +30,10 @@ import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
import org.apache.ambari.server.controller.utilities.PredicateBuilder;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.state.Cluster;
@@ -914,8 +917,8 @@ public class RequestResourceProviderTest {
Map<String, String> requestInfoProperties = new HashMap<String, String>();
- requestInfoProperties.put("/parameters/param1", "value1");
- requestInfoProperties.put("/parameters/param2", "value2");
+ requestInfoProperties.put("parameters/param1", "value1");
+ requestInfoProperties.put("parameters/param2", "value2");
String[] expectedHosts = new String[]{"host1", "host2", "host3"};
Map<String, String> expectedParams = new HashMap<String, String>() {{
@@ -965,4 +968,99 @@ public class RequestResourceProviderTest {
Assert.assertEquals(expectedParams.get(key), capturedRequest.getParameters().get(key));
}
}
+
+ @Test
+ public void testCreateResourcesForCommandsWithOpLvl() throws Exception {
+ Resource.Type type = Resource.Type.Request;
+
+ Capture<ExecuteActionRequest> actionRequest = new Capture<ExecuteActionRequest>();
+ Capture<HashMap<String, String>> propertyMap = new Capture<HashMap<String, String>>();
+
+ AmbariManagementController managementController = createMock(AmbariManagementController.class);
+ RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
+
+ expect(managementController.createAction(capture(actionRequest), capture(propertyMap)))
+ .andReturn(response).anyTimes();
+
+ // replay
+ replay(managementController);
+
+ // add the property map to a set for the request. add more maps for multiple creates
+ Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+
+ Map<String, Object> properties = new LinkedHashMap<String, Object>();
+
+ String c1 = "c1";
+ String host_component = "HOST_COMPONENT";
+ String service_id = "HDFS";
+ String hostcomponent_id = "Namenode";
+ String host_id = "host1";
+
+ properties.put(RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID, c1);
+
+ Set<Map<String, Object>> filterSet = new HashSet<Map<String, Object>>();
+ Map<String, Object> filterMap = new HashMap<String, Object>();
+ filterMap.put(RequestResourceProvider.SERVICE_ID, service_id);
+ filterMap.put(RequestResourceProvider.HOSTS_ID, host_id);
+ filterSet.add(filterMap);
+
+ properties.put(RequestResourceProvider.REQUEST_RESOURCE_FILTER_ID, filterSet);
+
+ propertySet.add(properties);
+
+ Map<String, String> requestInfoProperties = new HashMap<String, String>();
+ requestInfoProperties.put(RequestResourceProvider.COMMAND_ID, "RESTART");
+
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_LEVEL_ID,
+ host_component);
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_CLUSTER_ID, c1);
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_SERVICE_ID,
+ service_id);
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_HOSTCOMPONENT_ID,
+ hostcomponent_id);
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_HOST_ID,
+ host_id);
+
+ Request request = PropertyHelper.getCreateRequest(propertySet, requestInfoProperties);
+ ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider(
+ type,
+ PropertyHelper.getPropertyIds(type),
+ PropertyHelper.getKeyPropertyIds(type),
+ managementController);
+
+ // Check exception wrong operation level is specified
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_LEVEL_ID,
+ "wrong_value");
+ try {
+ provider.createResources(request);
+ Assert.fail("Should throw an exception");
+ } catch (UnsupportedOperationException e) {
+ // expected
+ }
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_LEVEL_ID,
+ host_component);
+
+ // Check exception when cluster name is not specified
+ requestInfoProperties.remove(RequestResourceProvider.OPERATION_CLUSTER_ID);
+ try {
+ provider.createResources(request);
+ Assert.fail("Should throw an exception");
+ } catch (UnsupportedOperationException e) {
+ // expected
+ }
+ requestInfoProperties.put(RequestResourceProvider.OPERATION_CLUSTER_ID, c1);
+
+ // create request in a normal way (positive scenario)
+ provider.createResources(request);
+ Assert.assertTrue(actionRequest.hasCaptured());
+ ExecuteActionRequest capturedRequest = actionRequest.getValue();
+ RequestOperationLevel level = capturedRequest.getOperationLevel();
+ Assert.assertEquals(level.getLevel().toString(), "HostComponent");
+ Assert.assertEquals(level.getClusterName(), c1);
+ Assert.assertEquals(level.getServiceName(), service_id);
+ Assert.assertEquals(level.getHostComponentName(), hostcomponent_id);
+ Assert.assertEquals(level.getHostName(), host_id);
+ }
+
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
new file mode 100644
index 0000000..415951f
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.orm.DBAccessor;
+import org.easymock.Capture;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.sql.SQLException;
+import java.util.List;
+import java.util.Map;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertFalse;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+/**
+ * UpgradeCatalog161 unit tests.
+ */
+public class UpgradeCatalog161Test {
+
+ @Test
+ public void testExecuteDDLUpdates() throws Exception {
+
+ final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+ Configuration configuration = createNiceMock(Configuration.class);
+ Capture<List<DBAccessor.DBColumnInfo>> operationLevelEntitycolumnCapture = new Capture<List<DBAccessor.DBColumnInfo>>();
+
+ expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
+
+ setOperationLevelEntityConfigExpectations(dbAccessor, operationLevelEntitycolumnCapture);
+
+ replay(dbAccessor, configuration);
+ AbstractUpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
+ Class<?> c = AbstractUpgradeCatalog.class;
+ Field f = c.getDeclaredField("configuration");
+ f.setAccessible(true);
+ f.set(upgradeCatalog, configuration);
+
+ upgradeCatalog.executeDDLUpdates();
+ verify(dbAccessor, configuration);
+
+ assertOperationLevelEntityColumns(operationLevelEntitycolumnCapture);
+ }
+
+
+ @Test
+ public void testExecuteDMLUpdates() throws Exception {
+ Configuration configuration = createNiceMock(Configuration.class);
+ DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+
+ Method m = AbstractUpgradeCatalog.class.getDeclaredMethod
+ ("updateConfigurationProperties", String.class, Map.class, boolean.class);
+
+ UpgradeCatalog161 upgradeCatalog = createMockBuilder(UpgradeCatalog161.class)
+ .addMockedMethod(m).createMock();
+
+ expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
+
+ replay(upgradeCatalog, dbAccessor, configuration);
+
+ Class<?> c = AbstractUpgradeCatalog.class;
+ Field f = c.getDeclaredField("configuration");
+ f.setAccessible(true);
+ f.set(upgradeCatalog, configuration);
+ f = c.getDeclaredField("dbAccessor");
+ f.setAccessible(true);
+ f.set(upgradeCatalog, dbAccessor);
+
+ upgradeCatalog.executeDMLUpdates();
+
+ verify(upgradeCatalog, dbAccessor, configuration);
+ }
+
+
+ @Test
+ public void testGetTargetVersion() throws Exception {
+ final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+ UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
+
+ Assert.assertEquals("1.6.1", upgradeCatalog.getTargetVersion());
+ }
+
+
+ private AbstractUpgradeCatalog getUpgradeCatalog(final DBAccessor dbAccessor) {
+ Module module = new Module() {
+ @Override
+ public void configure(Binder binder) {
+ binder.bind(DBAccessor.class).toInstance(dbAccessor);
+ }
+ };
+ Injector injector = Guice.createInjector(module);
+ return injector.getInstance(UpgradeCatalog161.class);
+ }
+
+
+ private void setOperationLevelEntityConfigExpectations(DBAccessor dbAccessor,
+ Capture<List<DBAccessor.DBColumnInfo>> operationLevelEntitycolumnCapture)
+ throws SQLException {
+
+ dbAccessor.createTable(eq("requestoperationlevel"),
+ capture(operationLevelEntitycolumnCapture), eq("operation_level_id"));
+
+ dbAccessor.addFKConstraint("requestoperationlevel", "FK_req_op_level_req_id",
+ "request_id", "request", "request_id", true);
+ }
+
+
+ private void assertOperationLevelEntityColumns(Capture<List<DBAccessor.DBColumnInfo>> operationLevelEntitycolumnCapture) {
+ List<DBAccessor.DBColumnInfo> columns = operationLevelEntitycolumnCapture.getValue();
+ assertEquals(7, columns.size());
+
+ DBAccessor.DBColumnInfo column = columns.get(0);
+ assertEquals("operation_level_id", column.getName());
+ assertNull(column.getLength());
+ assertEquals(Long.class, column.getType());
+ assertNull(column.getDefaultValue());
+ assertFalse(column.isNullable());
+
+ column = columns.get(1);
+ assertEquals("request_id", column.getName());
+ assertNull(column.getLength());
+ assertEquals(Long.class, column.getType());
+ assertNull(column.getDefaultValue());
+ assertFalse(column.isNullable());
+
+ column = columns.get(2);
+ assertEquals("level_name", column.getName());
+ assertEquals(255, (int) column.getLength());
+ assertEquals(String.class, column.getType());
+ assertNull(column.getDefaultValue());
+ assertTrue(column.isNullable());
+
+ column = columns.get(3);
+ assertEquals("cluster_name", column.getName());
+ assertEquals(255, (int) column.getLength());
+ assertEquals(String.class, column.getType());
+ assertNull(column.getDefaultValue());
+ assertTrue(column.isNullable());
+
+ column = columns.get(4);
+ assertEquals("service_name", column.getName());
+ assertEquals(255, (int) column.getLength());
+ assertEquals(String.class, column.getType());
+ assertNull(column.getDefaultValue());
+ assertTrue(column.isNullable());
+
+ column = columns.get(5);
+ assertEquals("host_component_name", column.getName());
+ assertEquals(255, (int) column.getLength());
+ assertEquals(String.class, column.getType());
+ assertNull(column.getDefaultValue());
+ assertTrue(column.isNullable());
+
+ column = columns.get(6);
+ assertEquals("host_name", column.getName());
+ assertEquals(255, (int) column.getLength());
+ assertEquals(String.class, column.getType());
+ assertNull(column.getDefaultValue());
+ assertTrue(column.isNullable());
+
+ }
+
+}
[2/2] git commit: AMBARI-5580. Introduce operation_level request
parameter (dlysnichenko)
Posted by dm...@apache.org.
AMBARI-5580. Introduce operation_level request parameter (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5dff4516
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5dff4516
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5dff4516
Branch: refs/heads/trunk
Commit: 5dff4516ae9ab962aaefcdf535e0d41df698e948
Parents: 2fb2a17
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Apr 15 23:02:35 2014 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu May 8 23:02:00 2014 +0300
----------------------------------------------------------------------
.../ambari/server/actionmanager/Request.java | 37 ++++
.../ambari/server/agent/ExecutionCommand.java | 4 +
.../ambari/server/agent/HeartbeatMonitor.java | 3 +
.../services/parsers/JsonRequestBodyParser.java | 8 +-
.../api/services/parsers/RequestBodyParser.java | 5 +
.../controller/ActionExecutionContext.java | 11 ++
.../AmbariCustomCommandExecutionHelper.java | 8 +
.../AmbariManagementControllerImpl.java | 60 +++---
.../server/controller/ExecuteActionRequest.java | 8 +
.../controller/MaintenanceStateHelper.java | 56 +++---
.../internal/RequestOperationLevel.java | 147 ++++++++++++++
.../internal/RequestResourceProvider.java | 122 +++++-------
.../server/orm/entities/ClusterStateEntity.java | 2 +-
.../server/orm/entities/RequestEntity.java | 12 ++
.../entities/RequestOperationLevelEntity.java | 141 +++++++++++++
.../server/upgrade/SchemaUpgradeHelper.java | 1 +
.../server/upgrade/UpgradeCatalog160.java | 4 +-
.../server/upgrade/UpgradeCatalog161.java | 89 +++++++++
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 3 +
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 3 +
.../resources/Ambari-DDL-Postgres-CREATE.sql | 7 +-
.../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql | 8 +-
.../src/main/resources/META-INF/persistence.xml | 1 +
.../src/main/resources/properties.json | 1 +
.../actionmanager/TestActionDBAccessorImpl.java | 2 +-
.../AmbariManagementControllerTest.java | 56 +++---
.../internal/RequestOperationLevelTest.java | 58 ++++++
.../internal/RequestResourceProviderTest.java | 102 +++++++++-
.../server/upgrade/UpgradeCatalog161Test.java | 196 +++++++++++++++++++
29 files changed, 992 insertions(+), 163 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
index e209076..b3b5279 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
@@ -23,8 +23,11 @@ import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.controller.ExecuteActionRequest;
+import org.apache.ambari.server.controller.internal.RequestOperationLevel;
import org.apache.ambari.server.controller.internal.RequestResourceFilter;
+import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.RequestOperationLevelEntity;
import org.apache.ambari.server.orm.entities.RequestResourceFilterEntity;
import org.apache.ambari.server.orm.entities.StageEntity;
import org.apache.ambari.server.state.Clusters;
@@ -51,6 +54,7 @@ public class Request {
private HostRoleStatus status; // not persisted yet
private String inputs;
private List<RequestResourceFilter> resourceFilters;
+ private RequestOperationLevel operationLevel;
private RequestType requestType;
private Collection<Stage> stages = new ArrayList<Stage>();
@@ -114,6 +118,7 @@ public class Request {
this(stages, clusters);
if (actionRequest != null) {
this.resourceFilters = actionRequest.getResourceFilters();
+ this.operationLevel = actionRequest.getOperationLevel();
this.inputs = gson.toJson(actionRequest.getParameters());
this.requestType = actionRequest.isCommand() ? RequestType.COMMAND : RequestType.ACTION;
this.commandName = actionRequest.isCommand() ? actionRequest.getCommandName() : actionRequest.getActionName();
@@ -161,6 +166,16 @@ public class Request {
this.resourceFilters.add(resourceFilter);
}
}
+ RequestOperationLevelEntity operationLevelEntity = entity.getRequestOperationLevel();
+ if (operationLevelEntity != null) {
+ this.operationLevel = new RequestOperationLevel(
+ Resource.Type.valueOf(operationLevelEntity.getLevel()),
+ operationLevelEntity.getClusterName(),
+ operationLevelEntity.getServiceName(),
+ operationLevelEntity.getHostComponentName(),
+ operationLevelEntity.getHostName()
+ );
+ }
}
private List<String> getHostsList(String hosts) {
@@ -216,6 +231,19 @@ public class Request {
requestEntity.setResourceFilterEntities(filterEntities);
}
+ if (operationLevel != null) {
+ RequestOperationLevelEntity operationLevelEntity =
+ new RequestOperationLevelEntity();
+ operationLevelEntity.setLevel(operationLevel.getLevel().toString());
+ operationLevelEntity.setClusterName(operationLevel.getClusterName());
+ operationLevelEntity.setServiceName(operationLevel.getServiceName());
+ operationLevelEntity.setHostComponentName(operationLevel.getHostComponentName());
+ operationLevelEntity.setHostName(operationLevel.getHostName());
+ operationLevelEntity.setRequestEntity(requestEntity);
+ operationLevelEntity.setRequestId(requestId);
+ requestEntity.setRequestOperationLevel(operationLevelEntity);
+ }
+
return requestEntity;
}
@@ -272,6 +300,14 @@ public class Request {
this.resourceFilters = resourceFilters;
}
+ public RequestOperationLevel getOperationLevel() {
+ return operationLevel;
+ }
+
+ public void setOperationLevel(RequestOperationLevel operationLevel) {
+ this.operationLevel = operationLevel;
+ }
+
public RequestType getRequestType() {
return requestType;
}
@@ -316,6 +352,7 @@ public class Request {
", endTime=" + endTime +
", inputs='" + inputs + '\'' +
", resourceFilters='" + resourceFilters + '\'' +
+ ", operationLevel='" + operationLevel + '\'' +
", requestType=" + requestType +
", stages=" + stages +
'}';
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index 40e9abb..e4b5fc5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -55,6 +55,10 @@ public class ExecutionCommand extends AgentCommand {
private Map<String, String> commandParams;
private String serviceName;
private String componentName;
+
+ /**
+ * Used for ignoring nagios alerts at agent
+ */
private Set<Map<String,String>> passiveInfo;
@JsonProperty("commandId")
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index 7452a7b..5eb7ead 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -29,6 +29,7 @@ import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.state.*;
import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
import org.apache.ambari.server.state.host.HostHeartbeatLostEvent;
@@ -52,6 +53,7 @@ public class HeartbeatMonitor implements Runnable {
private final AmbariMetaInfo ambariMetaInfo;
private final AmbariManagementController ambariManagementController;
private final Configuration configuration;
+ private final MaintenanceStateHelper maintenanceStateHelper;
public HeartbeatMonitor(Clusters clusters, ActionQueue aq, ActionManager am,
int threadWakeupInterval, Injector injector) {
@@ -64,6 +66,7 @@ public class HeartbeatMonitor implements Runnable {
this.ambariManagementController = injector.getInstance(
AmbariManagementController.class);
this.configuration = injector.getInstance(Configuration.class);
+ this.maintenanceStateHelper = injector.getInstance(MaintenanceStateHelper.class);
}
public void shutdown() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
index 3443b72..0d75a8e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/JsonRequestBodyParser.java
@@ -133,9 +133,13 @@ public class JsonRequestBodyParser implements RequestBodyParser {
}
} else {
// field
- if (path.startsWith(REQUEST_INFO_PATH)) {
+ if (path.equals(REQUEST_INFO_PATH)) {
+ requestInfoProps.put(PropertyHelper.getPropertyId(null, name),
+ child.asText());
+ } else if (path.startsWith(REQUEST_INFO_PATH)) {
requestInfoProps.put(PropertyHelper.getPropertyId(
- path.substring(REQUEST_INFO_PATH.length()), name), child.asText());
+ path.substring(REQUEST_INFO_PATH.length() + SLASH.length()), name),
+ child.asText());
} else {
propertySet.getProperties().put(PropertyHelper.getPropertyId(
path.equals(BODY_TITLE) ? "" : path, name), child.asText());
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/RequestBodyParser.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/RequestBodyParser.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/RequestBodyParser.java
index aefcb52..6e9c4c5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/RequestBodyParser.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/parsers/RequestBodyParser.java
@@ -33,6 +33,11 @@ public interface RequestBodyParser {
public static final String REQUEST_INFO_PATH = "RequestInfo";
/**
+ * Slash symbol
+ */
+ public static final String SLASH = "/";
+
+ /**
* Category path to ignore parsing of the child node
*/
public static final String REQUEST_BLOB_TITLE = "RequestBodyInfo";
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
index 37a404f..ee64c0d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
@@ -20,6 +20,7 @@
package org.apache.ambari.server.controller;
import org.apache.ambari.server.actionmanager.TargetHostType;
+import org.apache.ambari.server.controller.internal.RequestOperationLevel;
import org.apache.ambari.server.controller.internal.RequestResourceFilter;
import java.util.List;
@@ -32,6 +33,7 @@ public class ActionExecutionContext {
private final String clusterName;
private final String actionName;
private List<RequestResourceFilter> resourceFilters;
+ private RequestOperationLevel operationLevel;
private Map<String, String> parameters;
private TargetHostType targetType;
private Short timeout;
@@ -97,6 +99,14 @@ public class ActionExecutionContext {
return resourceFilters;
}
+ public RequestOperationLevel getOperationLevel() {
+ return operationLevel;
+ }
+
+ public void setOperationLevel(RequestOperationLevel operationLevel) {
+ this.operationLevel = operationLevel;
+ }
+
public String getExpectedServiceName() {
return expectedServiceName;
}
@@ -111,6 +121,7 @@ public class ActionExecutionContext {
"clusterName='" + clusterName + '\'' +
", actionName='" + actionName + '\'' +
", resourceFilters=" + resourceFilters +
+ ", operationLevel=" + operationLevel +
", parameters=" + parameters +
", targetType=" + targetType +
", timeout=" + timeout +
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 89eaa40..d2ba553 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -30,6 +30,7 @@ import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.internal.RequestOperationLevel;
import org.apache.ambari.server.controller.internal.RequestResourceFilter;
import org.apache.ambari.server.metadata.ActionMetadata;
import org.apache.ambari.server.state.Cluster;
@@ -188,11 +189,18 @@ public class AmbariCustomCommandExecutionHelper {
sb.append(" ");
sb.append(resourceFilter.getServiceName());
}
+
if (resourceFilter.getComponentName() != null
&& !resourceFilter.getComponentName().equals("")) {
sb.append("/");
sb.append(resourceFilter.getComponentName());
}
+
+ RequestOperationLevel level = actionRequest.getOperationLevel();
+ if (level != null) {
+ sb.append("op_lvl: " + level.getLevel().toString());
+ }
+
return sb.toString();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 6c88429..bdfd436 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -82,6 +82,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VER
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.internal.RequestResourceFilter;
+import org.apache.ambari.server.controller.internal.RequestOperationLevel;
import org.apache.ambari.server.controller.internal.RequestStageContainer;
import org.apache.ambari.server.controller.internal.URLStreamProvider;
import org.apache.ambari.server.customactions.ActionDefinition;
@@ -1281,15 +1282,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
commandParams = new TreeMap<String, String>();
}
commandParams.put(SCHEMA_VERSION, serviceInfo.getSchemaVersion());
-
-
- // Get command script info for custom command/custom action
+ String commandTimeout = configs.getDefaultAgentTaskTimeout();
/*
- * TODO: Custom actions are not supported yet, that's why we just pass
- * component main commandScript to agent. This script is only used for
- * default commads like INSTALL/STOP/START/CONFIGURE
+ * This script is only used for
+ * default commads like INSTALL/STOP/START
*/
- String commandTimeout = configs.getDefaultAgentTaskTimeout();
CommandScriptDefinition script = componentInfo.getCommandScript();
if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) {
if (script != null) {
@@ -1394,26 +1391,33 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
}
private ActionExecutionContext getActionExecutionContext
- (ExecuteActionRequest actionRequest) throws AmbariException {
-
+ (ExecuteActionRequest actionRequest) throws AmbariException {
+ RequestOperationLevel operationLevel = actionRequest.getOperationLevel();
if (actionRequest.isCommand()) {
- return new ActionExecutionContext(actionRequest.getClusterName(),
- actionRequest.getCommandName(), actionRequest.getResourceFilters(),
- actionRequest.getParameters());
- } else {
+ ActionExecutionContext actionExecutionContext =
+ new ActionExecutionContext(actionRequest.getClusterName(),
+ actionRequest.getCommandName(), actionRequest.getResourceFilters(),
+ actionRequest.getParameters());
+ actionExecutionContext.setOperationLevel(operationLevel);
+ return actionExecutionContext;
+ } else { // If action
- ActionDefinition actionDef = ambariMetaInfo.getActionDefinition(actionRequest.getActionName());
+ ActionDefinition actionDef =
+ ambariMetaInfo.getActionDefinition(actionRequest.getActionName());
- if (actionDef == null) {
- throw new AmbariException("Action " + actionRequest.getActionName() + " does not exist");
- }
-
- return new ActionExecutionContext(actionRequest.getClusterName(),
- actionRequest.getActionName(), actionRequest.getResourceFilters(),
- actionRequest.getParameters(), actionDef.getTargetType(),
- actionDef.getDefaultTimeout(), actionDef.getTargetService(),
- actionDef.getTargetComponent());
+ if (actionDef == null) {
+ throw new AmbariException(
+ "Action " + actionRequest.getActionName() + " does not exist");
+ }
+ ActionExecutionContext actionExecutionContext =
+ new ActionExecutionContext(actionRequest.getClusterName(),
+ actionRequest.getActionName(), actionRequest.getResourceFilters(),
+ actionRequest.getParameters(), actionDef.getTargetType(),
+ actionDef.getDefaultTimeout(), actionDef.getTargetService(),
+ actionDef.getTargetComponent());
+ actionExecutionContext.setOperationLevel(operationLevel);
+ return actionExecutionContext;
}
}
@@ -1503,7 +1507,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
nowTimestamp,
scHost.getDesiredStackVersion().getStackId());
} else if (oldSchState == State.STARTED
- || oldSchState == State.INSTALLED
+// TODO: oldSchState == State.INSTALLED is always false, looks like a bug
+// || oldSchState == State.INSTALLED
|| oldSchState == State.STOPPING) {
roleCommand = RoleCommand.STOP;
event = new ServiceComponentHostStopEvent(
@@ -2045,6 +2050,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
return serviceName;
}
+
+ /**
+ * Checks if assigning new state does not require performing
+ * any additional actions
+ */
private boolean isDirectTransition(State oldState, State newState) {
switch (newState) {
case INSTALLED:
@@ -2228,7 +2238,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
= new RequestResourceFilter(serviceName, "RESOURCEMANAGER", null);
ExecuteActionRequest actionRequest = new ExecuteActionRequest(
clusterName, "DECOMMISSION", null,
- Collections.singletonList(resourceFilter), params);
+ Collections.singletonList(resourceFilter), null, params);
response = createAction(actionRequest, requestProperties);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
index 3b5afcd..5e42276 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
@@ -17,6 +17,7 @@
*/
package org.apache.ambari.server.controller;
+import org.apache.ambari.server.controller.internal.RequestOperationLevel;
import org.apache.ambari.server.controller.internal.RequestResourceFilter;
import java.util.ArrayList;
@@ -31,18 +32,21 @@ public class ExecuteActionRequest {
private final String clusterName;
private final String commandName;
private final List<RequestResourceFilter> resourceFilters;
+ private RequestOperationLevel operationLevel = null;
private String actionName;
private Map<String, String> parameters;
public ExecuteActionRequest(String clusterName, String commandName,
String actionName,
List<RequestResourceFilter> resourceFilters,
+ RequestOperationLevel operationLevel,
Map<String, String> parameters) {
this(clusterName, commandName, parameters);
this.actionName = actionName;
if (resourceFilters != null) {
this.resourceFilters.addAll(resourceFilters);
}
+ this.operationLevel = operationLevel;
}
/**
@@ -76,6 +80,10 @@ public class ExecuteActionRequest {
return resourceFilters;
}
+ public RequestOperationLevel getOperationLevel() {
+ return operationLevel;
+ }
+
public Map<String, String> getParameters() {
return parameters;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
index 23583b5..3e4b149 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
@@ -137,7 +137,6 @@ public class MaintenanceStateHelper {
* {@link MaintenanceState#IMPLIED_FROM_SERVICE_AND_HOST})
*/
public Set<Map<String, String>> getMaintenanceHostComponents(Clusters clusters, Cluster cluster) throws AmbariException {
-
Set<Map<String, String>> set = new HashSet<Map<String, String>>();
Map<String, Host> hosts = clusters.getHostsForCluster(cluster.getClusterName());
@@ -187,7 +186,8 @@ public class MaintenanceStateHelper {
for (String clusterName : clusterNames) {
ExecuteActionRequest actionRequest = new ExecuteActionRequest(
clusterName, null, NAGIOS_ACTION_NAME,
- Collections.singletonList(resourceFilter), params);
+ Collections.singletonList(resourceFilter),
+ null, params);
if (null == response) {
response = amc.createAction(actionRequest, requestProperties);
@@ -200,45 +200,41 @@ public class MaintenanceStateHelper {
* Determine based on the requesting Resource level and the state of the
* operand whether to allow operations on it.
*
- * @param sourceType Request Source: {CLUSTER, SERVICE, HOSTCOMPONENT, HOST}
+ * @param operationLevel Request Source: {CLUSTER, SERVICE, HOSTCOMPONENT, HOST}
* @param sch HostComponent which is the operand of the operation
* @return
* @throws AmbariException
*/
- public boolean isOperationAllowed(Resource.Type sourceType,
+ public boolean isOperationAllowed(Resource.Type operationLevel,
ServiceComponentHost sch) throws AmbariException {
MaintenanceState maintenanceState = sch.getMaintenanceState();
- if (sourceType.equals(Resource.Type.Cluster)) {
-
- if (maintenanceState.equals(MaintenanceState.OFF)) {
- return true;
- }
-
- } else if (sourceType.equals(Resource.Type.Service)) {
-
- if (maintenanceState.equals(MaintenanceState.IMPLIED_FROM_SERVICE)
- || maintenanceState.equals(MaintenanceState.OFF)) {
- return true;
- }
-
- } else if (sourceType.equals(Resource.Type.Host)) {
-
- if (maintenanceState.equals(MaintenanceState.IMPLIED_FROM_HOST)
- || maintenanceState.equals(MaintenanceState.OFF)) {
+ switch (operationLevel.getInternalType()) {
+ case Cluster:
+ if (maintenanceState.equals(MaintenanceState.OFF)) {
+ return true;
+ }
+ break;
+ case Service:
+ if (maintenanceState.equals(MaintenanceState.IMPLIED_FROM_SERVICE)
+ || maintenanceState.equals(MaintenanceState.OFF)) {
+ return true;
+ }
+ break;
+ case Host:
+ if (maintenanceState.equals(MaintenanceState.IMPLIED_FROM_HOST)
+ || maintenanceState.equals(MaintenanceState.OFF)) {
+ return true;
+ }
+ break;
+ case HostComponent: {
return true;
}
-
- } else if (sourceType.equals(Resource.Type.HostComponent)) {
-
- return true;
-
- } else {
- LOG.warn("Unsupported Resource type, type = " + sourceType);
+ default:
+ LOG.warn("Unsupported Resource type, type = " + operationLevel);
+ break;
}
-
return false;
}
-
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestOperationLevel.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestOperationLevel.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestOperationLevel.java
new file mode 100644
index 0000000..a74b8a1
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestOperationLevel.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import org.apache.ambari.server.controller.spi.Resource;
+
+/**
+ * Operation level is specified along with some requests. It identifies
+ * the logical level, at which the operation is executed.
+ */
+public class RequestOperationLevel {
+
+ /**
+ * Conversion table is used to convert user input into our internal names
+ * of resources, defined at
+ * org.apache.ambari.server.controller.spi.Resource.Type
+ */
+ private static final String [][] LEVEL_ALIASES= new String [][]{
+ // FORMAT: <external alias> , <internal alias>
+ {"CLUSTER", "Cluster"},
+ {"SERVICE", "Service"},
+ {"HOST", "Host"},
+ {"HOST_COMPONENT", "HostComponent"},
+ };
+
+ private static final int ALIAS_COLUMN = 0;
+ private static final int INTERNAL_NAME_COLUMN = 1;
+
+ /**
+ * Converts external operation level alias to an internal name
+ */
+ public static String getInternalLevelName(String external)
+ throws IllegalArgumentException{
+ String refinedAlias = external.trim().toUpperCase();
+ for (String [] pair : LEVEL_ALIASES) {
+ if (pair[ALIAS_COLUMN].equals(refinedAlias)) {
+ return pair[INTERNAL_NAME_COLUMN];
+ }
+ }
+ String message = String.format("Unknown operation level %s", external);
+ throw new IllegalArgumentException(message);
+ }
+
+ /**
+ * Converts internal operation level name to an external alias
+ */
+ public static String getExternalLevelName(String internal) {
+ for (String [] pair : LEVEL_ALIASES) {
+ if (pair[INTERNAL_NAME_COLUMN].equals(internal)) {
+ return pair[ALIAS_COLUMN];
+ }
+ }
+ // That should never happen
+ String message = String.format("Unknown internal " +
+ "operation level name %s", internal);
+ throw new IllegalArgumentException(message);
+ }
+
+ public RequestOperationLevel(Resource.Type level, String clusterName,
+ String serviceName, String hostComponentName,
+ String hostName) {
+ this.level = level;
+ this.clusterName = clusterName;
+ this.serviceName = serviceName;
+ this.hostComponentName = hostComponentName;
+ this.hostName = hostName;
+ }
+
+ /**
+ * Valid values are Cluster, Service, Host and HostComponent. Component level
+ * is identical to Service level, and that's why it is not supported
+ * as a standalone level.
+ */
+ private Resource.Type level;
+
+ // Fields below are not used as of now and reserved for future use
+
+ /**
+ * Source cluster for request. Specified for all requests
+ */
+ private String clusterName;
+
+ /**
+ * Source service for request. Specified for Service-level
+ * and HostComponent-level requests.
+ */
+ private String serviceName;
+
+ /**
+ * Source host component for request. Specified for
+ * HostComponent-level requests.
+ */
+ private String hostComponentName;
+
+ /**
+ * Source host for request. Specified for Host-level and
+ * HostComponent-level requests.
+ */
+ private String hostName;
+
+
+ public Resource.Type getLevel() {
+ return level;
+ }
+
+ public String getClusterName() {
+ return clusterName;
+ }
+
+ public String getServiceName() {
+ return serviceName;
+ }
+
+ public String getHostComponentName() {
+ return hostComponentName;
+ }
+
+ public String getHostName() {
+ return hostName;
+ }
+
+ @Override
+ public String toString() {
+ return "RequestOperationLevel{" +
+ "level=" + level +
+ ", clusterName='" + clusterName + '\'' +
+ ", serviceName='" + serviceName + '\'' +
+ ", hostComponentName='" + hostComponentName + '\'' +
+ ", hostName='" + hostName + '\'' +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
index 36def40..3d76b52 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
@@ -62,6 +62,7 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
protected static final String REQUEST_TYPE_ID = "Requests/type";
protected static final String REQUEST_INPUTS_ID = "Requests/inputs";
protected static final String REQUEST_RESOURCE_FILTER_ID = "Requests/resource_filters";
+ protected static final String REQUEST_OPERATION_LEVEL_ID = "Requests/operation_level";
protected static final String REQUEST_CREATE_TIME_ID = "Requests/create_time";
protected static final String REQUEST_START_TIME_ID = "Requests/start_time";
protected static final String REQUEST_END_TIME_ID = "Requests/end_time";
@@ -82,6 +83,15 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
new HashSet<String>(Arrays.asList(new String[]{
REQUEST_ID_PROPERTY_ID}));
+ /**
+ * Operation level-related parameters
+ */
+ protected static final String OPERATION_LEVEL_ID = "operation_level/level";
+ protected static final String OPERATION_CLUSTER_ID = "operation_level/cluster_name";
+ protected static final String OPERATION_SERVICE_ID = "operation_level/service_name";
+ protected static final String OPERATION_HOSTCOMPONENT_ID = "operation_level/hostcomponent_name";
+ protected static final String OPERATION_HOST_ID = "operation_level/host_name";
+
// ----- Constructors ----------------------------------------------------
/**
@@ -171,7 +181,8 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
// Get request to execute an action/command
@SuppressWarnings("unchecked")
- private ExecuteActionRequest getActionRequest(Request request) {
+ private ExecuteActionRequest getActionRequest(Request request)
+ throws UnsupportedOperationException {
Map<String, String> requestInfoProperties = request.getRequestInfoProperties();
Map<String, Object> propertyMap = request.getProperties().iterator().next();
@@ -216,20 +227,49 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
));
}
}
+ // Extract operation level property
+ RequestOperationLevel operationLevel = null;
+ Object operationLevelObj = requestInfoProperties.get(OPERATION_LEVEL_ID);
+ if (operationLevelObj != null) {
+ Resource.Type level;
+ try {
+ String internalOpLevelNameStr =
+ RequestOperationLevel.getInternalLevelName(
+ (String)operationLevelObj);
+ level = Resource.Type.valueOf(internalOpLevelNameStr);
+ } catch (IllegalArgumentException e) {
+ String message = String.format(
+ "Wrong operation level value: %s", operationLevelObj);
+ throw new UnsupportedOperationException(message, e);
+ }
+ if (!requestInfoProperties.containsKey(OPERATION_CLUSTER_ID)) {
+ String message = String.format(
+ "Mandatory key %s for operation level is not specified",
+ OPERATION_CLUSTER_ID);
+ throw new UnsupportedOperationException(message);
+ }
+ String clusterName = requestInfoProperties.get(OPERATION_CLUSTER_ID);
+ String serviceName = requestInfoProperties.get(OPERATION_SERVICE_ID);
+ String hostComponentName =
+ requestInfoProperties.get(OPERATION_HOSTCOMPONENT_ID);
+ String hostName = requestInfoProperties.get(OPERATION_HOST_ID);
+ operationLevel = new RequestOperationLevel(level, clusterName,
+ serviceName, hostComponentName, hostName);
+ }
Map<String, String> params = new HashMap<String, String>();
- String keyPrefix = "/" + INPUTS_ID + "/";
+ String keyPrefix = INPUTS_ID + "/";
for (String key : requestInfoProperties.keySet()) {
if (key.startsWith(keyPrefix)) {
params.put(key.substring(keyPrefix.length()), requestInfoProperties.get(key));
}
}
-
return new ExecuteActionRequest(
(String) propertyMap.get(REQUEST_CLUSTER_NAME_PROPERTY_ID),
commandName,
actionName,
resourceFilterList,
+ operationLevel,
params);
}
@@ -311,6 +351,15 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
setResourceProperty(resource, REQUEST_TYPE_ID, request.getRequestType(), requestedPropertyIds);
setResourceProperty(resource, REQUEST_INPUTS_ID, request.getInputs(), requestedPropertyIds);
setResourceProperty(resource, REQUEST_RESOURCE_FILTER_ID, request.getResourceFilters(), requestedPropertyIds);
+
+ RequestOperationLevel operationLevel = request.getOperationLevel();
+ String opLevelStr = null;
+ if (operationLevel != null) {
+ opLevelStr = RequestOperationLevel.getExternalLevelName(
+ operationLevel.getLevel().toString());
+ }
+ setResourceProperty(resource, REQUEST_OPERATION_LEVEL_ID, opLevelStr, requestedPropertyIds);
+
setResourceProperty(resource, REQUEST_CREATE_TIME_ID, request.getCreateTime(), requestedPropertyIds);
setResourceProperty(resource, REQUEST_START_TIME_ID, request.getStartTime(), requestedPropertyIds);
setResourceProperty(resource, REQUEST_END_TIME_ID, request.getEndTime(), requestedPropertyIds);
@@ -377,71 +426,4 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
return resource;
}
- // Get a request resource from the given set of host role commands.
- private Resource getRequestResource(String clusterName,
- Long requestId,
- String context,
- Set<HostRoleCommand> commands,
- Set<String> requestedPropertyIds) {
- Resource resource = new ResourceImpl(Resource.Type.Request);
-
- setResourceProperty(resource, REQUEST_CLUSTER_NAME_PROPERTY_ID, clusterName, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_ID_PROPERTY_ID, requestId, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_CONTEXT_ID, context, requestedPropertyIds);
-
- int taskCount = commands.size();
- int completedTaskCount = 0;
- int queuedTaskCount = 0;
- int pendingTaskCount = 0;
- int failedTaskCount = 0;
- int abortedTaskCount = 0;
- int timedOutTaskCount = 0;
-
- for (HostRoleCommand hostRoleCommand : commands) {
- HostRoleStatus status = hostRoleCommand.getStatus();
- if (status.isCompletedState()) {
- completedTaskCount++;
-
- switch (status) {
- case ABORTED:
- abortedTaskCount++;
- break;
- case FAILED:
- failedTaskCount++;
- break;
- case TIMEDOUT:
- timedOutTaskCount++;
- break;
- }
- } else if (status.equals(HostRoleStatus.QUEUED)) {
- queuedTaskCount++;
- } else if (status.equals(HostRoleStatus.PENDING)) {
- pendingTaskCount++;
- }
- }
-
- int inProgressTaskCount = taskCount - completedTaskCount - queuedTaskCount - pendingTaskCount;
-
- // determine request status
- HostRoleStatus requestStatus = failedTaskCount > 0 ? HostRoleStatus.FAILED :
- abortedTaskCount > 0 ? HostRoleStatus.ABORTED :
- timedOutTaskCount > 0 ? HostRoleStatus.TIMEDOUT :
- inProgressTaskCount > 0 ? HostRoleStatus.IN_PROGRESS :
- completedTaskCount == taskCount ? HostRoleStatus.COMPLETED :
- HostRoleStatus.PENDING;
- double progressPercent =
- ((queuedTaskCount * 0.09 + inProgressTaskCount * 0.35 + completedTaskCount) / (double) taskCount) * 100.0;
-
- setResourceProperty(resource, REQUEST_STATUS_PROPERTY_ID, requestStatus.toString(), requestedPropertyIds);
- setResourceProperty(resource, REQUEST_TASK_CNT_ID, taskCount, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_FAILED_TASK_CNT_ID, failedTaskCount, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_ABORTED_TASK_CNT_ID, abortedTaskCount, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_TIMED_OUT_TASK_CNT_ID, timedOutTaskCount, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_QUEUED_TASK_CNT_ID, queuedTaskCount, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_COMPLETED_TASK_CNT_ID, completedTaskCount, requestedPropertyIds);
- setResourceProperty(resource, REQUEST_PROGRESS_PERCENT_ID, progressPercent, requestedPropertyIds);
-
- return resource;
- }
-
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterStateEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterStateEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterStateEntity.java
index 14f0d55..d955a95 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterStateEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterStateEntity.java
@@ -66,7 +66,7 @@ public class ClusterStateEntity {
this.currentStackVersion = currentStackVersion;
}
- @Override
+ @Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
index e7098d2..f80ac7d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
@@ -32,6 +32,7 @@ import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
+import javax.persistence.OneToOne;
import javax.persistence.Table;
import java.util.Collection;
@@ -89,6 +90,9 @@ public class RequestEntity {
@OneToMany(mappedBy = "requestEntity", cascade = CascadeType.ALL)
private Collection<RequestResourceFilterEntity> resourceFilterEntities;
+ @OneToOne(mappedBy = "requestEntity", cascade = {CascadeType.ALL})
+ private RequestOperationLevelEntity requestOperationLevel;
+
@ManyToOne(cascade = {CascadeType.MERGE})
@JoinColumn(name = "cluster_id", referencedColumnName = "cluster_id")
private ClusterEntity cluster;
@@ -181,6 +185,14 @@ public class RequestEntity {
this.resourceFilterEntities = resourceFilterEntities;
}
+ public RequestOperationLevelEntity getRequestOperationLevel() {
+ return requestOperationLevel;
+ }
+
+ public void setRequestOperationLevel(RequestOperationLevelEntity operationLevel) {
+ this.requestOperationLevel = operationLevel;
+ }
+
public void setClusterId(Long clusterId) {
this.clusterId = clusterId;
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java
new file mode 100644
index 0000000..b7b3133
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.orm.entities;
+
+import org.apache.ambari.server.controller.spi.Resource;
+
+import javax.persistence.Basic;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.EnumType;
+import javax.persistence.Enumerated;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.OneToOne;
+import javax.persistence.Table;
+import javax.persistence.TableGenerator;
+
+@Entity
+@Table(name = "requestoperationlevel")
+@TableGenerator(name = "operation_level_id_generator",
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ , pkColumnValue = "operation_level_id_seq"
+ , initialValue = 1
+ , allocationSize = 1
+)
+public class RequestOperationLevelEntity {
+
+ @Id
+ @Column(name = "operation_level_id", nullable = false, insertable = true, updatable = true)
+ @GeneratedValue(strategy = GenerationType.TABLE, generator = "operation_level_id_generator")
+ private Long operationLevelId;
+
+ @Column(name = "request_id", nullable = false, insertable = true, updatable = true)
+ private Long requestId;
+
+ @OneToOne
+ @JoinColumn(name = "request_id", referencedColumnName = "request_id", nullable = false, insertable = false, updatable = false)
+ private RequestEntity requestEntity;
+
+ public Long getOperationLevelId() {
+ return operationLevelId;
+ }
+
+ public void setOperationLevelId(Long operationLevelId) {
+ this.operationLevelId = operationLevelId;
+ }
+
+ @Column(name = "level_name")
+ @Basic
+ private String level;
+
+ @Column(name = "cluster_name")
+ @Basic
+ private String clusterName;
+
+ @Column(name = "service_name")
+ @Basic
+ private String serviceName;
+
+ @Column(name = "host_component_name")
+ @Basic
+ private String hostComponentName;
+
+ @Column(name = "host_name")
+ @Basic
+ private String hostName;
+
+ public String getLevel() {
+ return level;
+ }
+
+ public void setLevel(String level) {
+ this.level = level;
+ }
+
+ public String getClusterName() {
+ return clusterName;
+ }
+
+ public void setClusterName(String clusterName) {
+ this.clusterName = clusterName;
+ }
+
+ public String getServiceName() {
+ return serviceName;
+ }
+
+ public void setServiceName(String serviceName) {
+ this.serviceName = serviceName;
+ }
+
+ public String getHostComponentName() {
+ return hostComponentName;
+ }
+
+ public void setHostComponentName(String hostComponentName) {
+ this.hostComponentName = hostComponentName;
+ }
+
+ public String getHostName() {
+ return hostName;
+ }
+
+ public void setHostName(String hostName) {
+ this.hostName = hostName;
+ }
+
+ public Long getRequestId() {
+ return requestId;
+ }
+
+ public void setRequestId(Long requestId) {
+ this.requestId = requestId;
+ }
+
+ public RequestEntity getRequestEntity() {
+ return requestEntity;
+ }
+
+ public void setRequestEntity(RequestEntity request) {
+ this.requestEntity = request;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index d04dc26..51d64d3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -164,6 +164,7 @@ public class SchemaUpgradeHelper {
catalogBinder.addBinding().to(UpgradeCatalog150.class);
catalogBinder.addBinding().to(UpgradeCatalog151.class);
catalogBinder.addBinding().to(UpgradeCatalog160.class);
+ catalogBinder.addBinding().to(UpgradeCatalog161.class);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog160.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog160.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog160.java
index c86c3ed..5c91c0d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog160.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog160.java
@@ -78,7 +78,7 @@ public class UpgradeCatalog160 extends AbstractUpgradeCatalog {
restartRequiredColumn.setDefaultValue(Boolean.FALSE);
}
dbAccessor.addColumn("hostcomponentdesiredstate",
- restartRequiredColumn);
+ new DBAccessor.DBColumnInfo("restart_required", Boolean.class, 1, 0, false));
// ========================================================================
// Add constraints
@@ -98,7 +98,7 @@ public class UpgradeCatalog160 extends AbstractUpgradeCatalog {
protected void executeDMLUpdates() throws AmbariException, SQLException {
String dbType = getDbType();
- // Add new sequences for view entity
+ //add new sequences for view entity
String valueColumnName = "\"value\"";
if (Configuration.ORACLE_DB_NAME.equals(dbType) || Configuration.MYSQL_DB_NAME.equals(dbType)) {
valueColumnName = "value";
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
new file mode 100644
index 0000000..8b4a199
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.configuration.Configuration;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+
+/**
+ * Upgrade catalog for version 1.6.1.
+ */
+public class UpgradeCatalog161 extends AbstractUpgradeCatalog {
+
+ // ----- Constructors ------------------------------------------------------
+
+ @Inject
+ public UpgradeCatalog161(Injector injector) {
+ super(injector);
+ }
+
+
+ // ----- AbstractUpgradeCatalog --------------------------------------------
+
+ @Override
+ protected void executeDDLUpdates() throws AmbariException, SQLException {
+ List<DBColumnInfo> columns;
+
+ // Operation level
+ columns = new ArrayList<DBColumnInfo>();
+ columns.add(new DBColumnInfo("operation_level_id", Long.class, null, null, false));
+ columns.add(new DBColumnInfo("request_id", Long.class, null, null, false));
+ columns.add(new DBColumnInfo("level_name", String.class, 255, null, true));
+ columns.add(new DBColumnInfo("cluster_name", String.class, 255, null, true));
+ columns.add(new DBColumnInfo("service_name", String.class, 255, null, true));
+ columns.add(new DBColumnInfo("host_component_name", String.class, 255, null, true));
+ columns.add(new DBColumnInfo("host_name", String.class, 255, null, true));
+
+ dbAccessor.createTable("requestoperationlevel", columns, "operation_level_id");
+
+ // ========================================================================
+ // Add constraints
+ dbAccessor.addFKConstraint("requestoperationlevel", "FK_req_op_level_req_id",
+ "request_id", "request", "request_id", true);
+ }
+
+
+ // ----- UpgradeCatalog ----------------------------------------------------
+
+ @Override
+ protected void executeDMLUpdates() throws AmbariException, SQLException {
+ String dbType = getDbType();
+
+ String valueColumnName = "\"value\"";
+ if (Configuration.ORACLE_DB_NAME.equals(dbType) || Configuration.MYSQL_DB_NAME.equals(dbType)) {
+ valueColumnName = "value";
+ }
+ //add new sequences for operation level
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, " + valueColumnName + ") " +
+ "VALUES('operation_level_id_seq', 1)", true);
+ }
+
+ @Override
+ public String getTargetVersion() {
+ return "1.6.1";
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index bac1615..ce01146 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -44,6 +44,7 @@ CREATE TABLE role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGIN
CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info LONGBLOB, PRIMARY KEY (stage_id, request_id));
CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, request_schedule_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs LONGBLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
CREATE TABLE requestresourcefilter (filter_id BIGINT NOT NULL, request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts LONGBLOB, PRIMARY KEY (filter_id));
+CREATE TABLE requestoperationlevel (operation_level_id BIGINT NOT NULL, request_id BIGINT NOT NULL, level_name VARCHAR(255), cluster_name VARCHAR(255), service_name VARCHAR(255), host_component_name VARCHAR(255), host_name VARCHAR(255), PRIMARY KEY (operation_level_id));
CREATE TABLE key_value_store (`key` VARCHAR(255), `value` LONGTEXT, PRIMARY KEY (`key`));
CREATE TABLE clusterconfigmapping (type_name VARCHAR(255) NOT NULL, create_timestamp BIGINT NOT NULL, cluster_id BIGINT NOT NULL, selected INTEGER NOT NULL DEFAULT 0, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
CREATE TABLE hostconfigmapping (create_timestamp BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, type_name VARCHAR(255) NOT NULL, selected INTEGER NOT NULL DEFAULT 0, service_name VARCHAR(255), version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -106,6 +107,7 @@ ALTER TABLE hostgroup_component ADD CONSTRAINT FK_hgc_blueprint_name FOREIGN KEY
ALTER TABLE blueprint_configuration ADD CONSTRAINT FK_cfg_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name);
ALTER TABLE hostgroup_configuration ADD CONSTRAINT FK_hg_cfg_bp_hg_name FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES hostgroup (blueprint_name, name);
ALTER TABLE requestresourcefilter ADD CONSTRAINT FK_reqresfilter_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
+ALTER TABLE requestoperationlevel ADD CONSTRAINT FK_req_op_level_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
ALTER TABLE viewparameter ADD CONSTRAINT FK_viewparam_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE viewresource ADD CONSTRAINT FK_viewres_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE viewinstance ADD CONSTRAINT FK_viewinst_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
@@ -121,6 +123,7 @@ INSERT INTO ambari_sequences(sequence_name, value) values ('configgroup_id_seq',
INSERT INTO ambari_sequences(sequence_name, value) values ('requestschedule_id_seq', 1);
INSERT INTO ambari_sequences(sequence_name, value) values ('resourcefilter_id_seq', 1);
INSERT INTO ambari_sequences(sequence_name, value) values ('viewentity_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('operation_level_id_seq', 1);
insert into roles(role_name)
select 'admin'
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 2b7f4d5..c0472bd 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -34,6 +34,7 @@ CREATE TABLE role_success_criteria (role VARCHAR2(255) NOT NULL, request_id NUMB
CREATE TABLE stage (stage_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NULL, log_info VARCHAR2(255) NULL, request_context VARCHAR2(255) NULL, cluster_host_info BLOB NOT NULL, PRIMARY KEY (stage_id, request_id));
CREATE TABLE request (request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19), request_schedule_id NUMBER(19), command_name VARCHAR(255), create_time NUMBER(19) NOT NULL, end_time NUMBER(19) NOT NULL, inputs BLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time NUMBER(19) NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
CREATE TABLE requestresourcefilter (filter_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, service_name VARCHAR2(255), component_name VARCHAR2(255), hosts BLOB, PRIMARY KEY (filter_id));
+CREATE TABLE requestoperationlevel (operation_level_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, level_name VARCHAR2(255), cluster_name VARCHAR2(255), service_name VARCHAR2(255), host_component_name VARCHAR2(255), host_name VARCHAR2(255), PRIMARY KEY (operation_level_id));
CREATE TABLE key_value_store ("key" VARCHAR2(255) NOT NULL, "value" CLOB NULL, PRIMARY KEY ("key"));
CREATE TABLE clusterconfigmapping (type_name VARCHAR2(255) NOT NULL, create_timestamp NUMBER(19) NOT NULL, cluster_id NUMBER(19) NOT NULL, selected NUMBER(10) NOT NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
CREATE TABLE hostconfigmapping (create_timestamp NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, cluster_id NUMBER(19) NOT NULL, type_name VARCHAR2(255) NOT NULL, selected NUMBER(10) NOT NULL, service_name VARCHAR2(255) NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -96,6 +97,7 @@ ALTER TABLE hostgroup_component ADD CONSTRAINT FK_hgc_blueprint_name FOREIGN KEY
ALTER TABLE blueprint_configuration ADD CONSTRAINT FK_cfg_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name);
ALTER TABLE hostgroup_configuration ADD CONSTRAINT FK_hg_cfg_bp_hg_name FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES hostgroup(blueprint_name, name);
ALTER TABLE requestresourcefilter ADD CONSTRAINT FK_reqresfilter_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
+ALTER TABLE requestoperationlevel ADD CONSTRAINT FK_req_op_level_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
ALTER TABLE viewparameter ADD CONSTRAINT FK_viewparam_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE viewresource ADD CONSTRAINT FK_viewres_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE viewinstance ADD CONSTRAINT FK_viewinst_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
@@ -110,6 +112,7 @@ INSERT INTO ambari_sequences(sequence_name, value) values ('configgroup_id_seq',
INSERT INTO ambari_sequences(sequence_name, value) values ('requestschedule_id_seq', 1);
INSERT INTO ambari_sequences(sequence_name, value) values ('resourcefilter_id_seq', 1);
INSERT INTO ambari_sequences(sequence_name, value) values ('viewentity_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, value) values ('operation_level_id_seq', 1);
INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariVersion}');
insert into Roles(role_name)
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index f12299f..5792556 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -55,6 +55,8 @@ CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_nam
CREATE TABLE requestresourcefilter (filter_id BIGINT NOT NULL, request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts BYTEA, PRIMARY KEY (filter_id));
+CREATE TABLE requestoperationlevel (operation_level_id BIGINT NOT NULL, request_id BIGINT NOT NULL, level_name VARCHAR(255), cluster_name VARCHAR(255), service_name VARCHAR(255), host_component_name VARCHAR(255), host_name VARCHAR(255), PRIMARY KEY (operation_level_id));
+
CREATE TABLE ClusterHostMapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
CREATE TABLE user_roles (role_name VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, PRIMARY KEY (role_name, user_id));
@@ -128,6 +130,7 @@ ALTER TABLE hostgroup_component ADD CONSTRAINT FK_hgc_blueprint_name FOREIGN KEY
ALTER TABLE blueprint_configuration ADD CONSTRAINT FK_cfg_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name);
ALTER TABLE hostgroup_configuration ADD CONSTRAINT FK_hg_cfg_bp_hg_name FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES hostgroup (blueprint_name, name);
ALTER TABLE requestresourcefilter ADD CONSTRAINT FK_reqresfilter_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
+ALTER TABLE requestoperationlevel ADD CONSTRAINT FK_req_op_level_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
ALTER TABLE viewparameter ADD CONSTRAINT FK_viewparam_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE viewresource ADD CONSTRAINT FK_viewres_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE viewinstance ADD CONSTRAINT FK_viewinst_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
@@ -151,7 +154,9 @@ BEGIN;
union all
select 'resourcefilter_id_seq', 1
union all
- select 'viewentity_id_seq', 0;
+ select 'viewentity_id_seq', 0
+ union all
+ select 'operation_level_id_seq', 1;
INSERT INTO Roles (role_name)
SELECT 'admin'
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index cbf8579..8c07e63 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -85,6 +85,9 @@ GRANT ALL PRIVILEGES ON TABLE ambari.request TO :username;
CREATE TABLE ambari.requestresourcefilter (filter_id BIGINT NOT NULL, request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts BYTEA, PRIMARY KEY (filter_id));
GRANT ALL PRIVILEGES ON TABLE ambari.requestresourcefilter TO :username;
+CREATE TABLE ambari.requestoperationlevel (operation_level_id BIGINT NOT NULL, request_id BIGINT NOT NULL, level_name VARCHAR(255), cluster_name VARCHAR(255), service_name VARCHAR(255), host_component_name VARCHAR(255), host_name VARCHAR(255), PRIMARY KEY (operation_level_id));
+GRANT ALL PRIVILEGES ON TABLE ambari.requestoperationlevel TO :username;
+
CREATE TABLE ambari.ClusterHostMapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
GRANT ALL PRIVILEGES ON TABLE ambari.ClusterHostMapping TO :username;
@@ -181,6 +184,7 @@ ALTER TABLE ambari.hostgroup_component ADD CONSTRAINT FK_hgc_blueprint_name FORE
ALTER TABLE ambari.blueprint_configuration ADD CONSTRAINT FK_cfg_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES ambari.blueprint(blueprint_name);
ALTER TABLE ambari.hostgroup_configuration ADD CONSTRAINT FK_hg_cfg_bp_hg_name FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES ambari.hostgroup (blueprint_name, name);
ALTER TABLE ambari.requestresourcefilter ADD CONSTRAINT FK_reqresfilter_req_id FOREIGN KEY (request_id) REFERENCES ambari.request (request_id);
+ALTER TABLE ambari.requestoperationlevel ADD CONSTRAINT FK_req_op_level_req_id FOREIGN KEY (request_id) REFERENCES ambari.request (request_id);
ALTER TABLE ambari.viewparameter ADD CONSTRAINT FK_viewparam_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE ambari.viewresource ADD CONSTRAINT FK_viewres_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
ALTER TABLE ambari.viewinstance ADD CONSTRAINT FK_viewinst_view_name FOREIGN KEY (view_name) REFERENCES viewmain(view_name);
@@ -204,7 +208,9 @@ INSERT INTO ambari.ambari_sequences (sequence_name, "value")
union all
select 'resourcefilter_id_seq', 1
union all
- select 'viewentity_id_seq', 0;
+ select 'viewentity_id_seq', 0
+ union all
+ select 'operation_level_id_seq', 1;
INSERT INTO ambari.Roles (role_name)
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml b/ambari-server/src/main/resources/META-INF/persistence.xml
index 36cafe2..9f3dcac 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -46,6 +46,7 @@
<class>org.apache.ambari.server.orm.entities.HostGroupComponentEntity</class>
<class>org.apache.ambari.server.orm.entities.HostGroupConfigEntity</class>
<class>org.apache.ambari.server.orm.entities.RequestResourceFilterEntity</class>
+ <class>org.apache.ambari.server.orm.entities.RequestOperationLevelEntity</class>
<class>org.apache.ambari.server.orm.entities.ViewEntity</class>
<class>org.apache.ambari.server.orm.entities.ViewInstanceDataEntity</class>
<class>org.apache.ambari.server.orm.entities.ViewInstanceEntity</class>
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index f92d238..a529950 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -104,6 +104,7 @@
"Requests/type",
"Requests/inputs",
"Requests/resource_filters",
+ "Requests/operation_level",
"Requests/create_time",
"Requests/start_time",
"Requests/end_time",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index 5ad1e77..efcdd9b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -412,7 +412,7 @@ public class TestActionDBAccessorImpl {
List<RequestResourceFilter> resourceFilters = new
ArrayList<RequestResourceFilter>() {{ add(resourceFilter); }};
ExecuteActionRequest executeActionRequest = new ExecuteActionRequest
- ("cluster1", null, actionName, resourceFilters, null);
+ ("cluster1", null, actionName, resourceFilters, null, null);
Request request = new Request(stages, clusters);
db.persistActions(request);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5dff4516/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 2122a8b..96a93c8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -2348,7 +2348,7 @@ public class AmbariManagementControllerTest {
resourceFilters.add(resourceFilter);
ExecuteActionRequest request = new ExecuteActionRequest(clusterName,
- "DECOMMISSION", null, resourceFilters, params);
+ "DECOMMISSION", null, resourceFilters, null, params);
Map<String, String> requestProperties = new HashMap<String, String>();
requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -2417,7 +2417,7 @@ public class AmbariManagementControllerTest {
put("included_hosts", "h2");
}};
request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null,
- resourceFilters, params);
+ resourceFilters, null, params);
response = controller.createAction(request,
requestProperties);
@@ -3807,7 +3807,7 @@ public class AmbariManagementControllerTest {
List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
resourceFilters.add(resourceFilter);
- ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+ ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, null, params);
RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
assertEquals(1, response.getTasks().size());
ShortTaskStatus taskStatus = response.getTasks().get(0);
@@ -3829,7 +3829,7 @@ public class AmbariManagementControllerTest {
resourceFilters.clear();
resourceFilter = new RequestResourceFilter("", "", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, null, params);
response = controller.createAction(actionRequest, requestProperties);
assertEquals(2, response.getTasks().size());
@@ -3857,7 +3857,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("", "", hosts);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, null, params);
response = controller.createAction(actionRequest, requestProperties);
assertEquals(1, response.getTasks().size());
taskStatus = response.getTasks().get(0);
@@ -4144,7 +4144,7 @@ public class AmbariManagementControllerTest {
List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Unsupported action DECOMMISSION for Service: HDFS and Component: HDFS_CLIENT");
@@ -4152,7 +4152,7 @@ public class AmbariManagementControllerTest {
resourceFilters.clear();
resourceFilter = new RequestResourceFilter("HDFS", null, null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "DECOMMISSION_DATANODE", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "DECOMMISSION_DATANODE", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action DECOMMISSION_DATANODE does not exist");
@@ -4160,7 +4160,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("YARN", "RESOURCEMANAGER", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Service not found, clusterName=c1, serviceName=YARN");
@@ -4173,7 +4173,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+ actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, null, params2);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Same host cannot be specified for inclusion as well as exclusion. Hosts: [h1]");
@@ -4186,7 +4186,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+ actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, null, params2);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Component HDFS_CLIENT is not supported for decommissioning.");
@@ -4196,7 +4196,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", hosts);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+ actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, null, params2);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Decommission command cannot be issued with target host(s) specified.");
@@ -4208,7 +4208,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+ actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, null, params2);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Component DATANODE on host h1 cannot be decommissioned as its not in STARTED state");
@@ -4216,7 +4216,7 @@ public class AmbariManagementControllerTest {
put("excluded_hosts", "h1 ");
put("mark_draining_only", "true");
}};
- actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+ actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, null, params2);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"mark_draining_only is not a valid parameter for NAMENODE");
@@ -4236,16 +4236,16 @@ public class AmbariManagementControllerTest {
"a4", ActionType.SYSTEM, "", "HIVE", "", "Does file exist",
TargetHostType.ANY, Short.valueOf("100")));
- actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, null);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a1 requires input 'test' that is not provided");
- actionRequest = new ExecuteActionRequest("c1", null, "a1", null, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a1 requires input 'dirName' that is not provided");
params.put("dirName", "dirName");
- actionRequest = new ExecuteActionRequest("c1", null, "a1", null, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a1 requires explicit target host(s)");
@@ -4253,7 +4253,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HIVE", null, null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a2 targets service HIVE that does not match with expected HDFS");
@@ -4261,7 +4261,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a2 targets component HDFS_CLIENT that does not match with expected DATANODE");
@@ -4269,7 +4269,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HDFS2", "HDFS_CLIENT", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a1 targets service HDFS2 that does not exist");
@@ -4277,7 +4277,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT2", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a1 targets component HDFS_CLIENT2 that does not exist");
@@ -4285,7 +4285,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("", "HDFS_CLIENT2", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a1 targets component HDFS_CLIENT2 without specifying the target service");
@@ -4294,7 +4294,7 @@ public class AmbariManagementControllerTest {
resourceFilters.add(resourceFilter);
// targets a service that is not a member of the stack (e.g. MR not in HDP-2)
- actionRequest = new ExecuteActionRequest("c1", null, "a3", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a3", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Action a3 targets service MAPREDUCE that does not exist");
@@ -4304,7 +4304,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("", "", hosts);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Request specifies host h6 but its not a valid host based on the target service=HDFS and component=DATANODE");
@@ -4312,7 +4312,7 @@ public class AmbariManagementControllerTest {
resourceFilter = new RequestResourceFilter("HIVE", "", null);
resourceFilters.add(resourceFilter);
- actionRequest = new ExecuteActionRequest("c1", null, "a4", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a4", resourceFilters, null, params);
expectActionCreationErrorWithMessage(actionRequest, requestProperties,
"Suitable hosts not found, component=, service=HIVE, cluster=c1, actionName=a4");
@@ -6174,7 +6174,7 @@ public class AmbariManagementControllerTest {
resourceFilters.add(resourceFilter1);
resourceFilters.add(resourceFilter2);
- ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+ ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, null, params);
RequestStatusResponse response = null;
try {
response = controller.createAction(actionRequest, requestProperties);
@@ -6184,7 +6184,7 @@ public class AmbariManagementControllerTest {
"allows one resource filter to be specified"));
}
resourceFilters.remove(resourceFilter1);
- actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+ actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, null, params);
response = controller.createAction(actionRequest, requestProperties);
assertEquals(1, response.getTasks().size());
@@ -6268,7 +6268,7 @@ public class AmbariManagementControllerTest {
resourceFilters.add(resourceFilter);
ExecuteActionRequest request = new ExecuteActionRequest("c1",
- "RESTART", null, resourceFilters, params);
+ "RESTART", null, resourceFilters, null, params);
RequestStatusResponse response = controller.createAction(request, requestProperties);
Assert.assertEquals(3, response.getTasks().size());
@@ -6297,7 +6297,7 @@ public class AmbariManagementControllerTest {
new ArrayList<String>() {{ add("h2"); }});
resourceFilters.add(resourceFilter);
request = new ExecuteActionRequest("c1", Role.HDFS_SERVICE_CHECK.name(),
- null, resourceFilters, null);
+ null, resourceFilters, null, null);
response = controller.createAction(request, requestProperties);
Assert.assertEquals(1, response.getTasks().size());