You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2016/12/07 21:50:36 UTC
[01/14] ambari git commit: AMBARI-18906 - Remove Unnecessary Locks
Inside Of Config Business Object Implementations (jonathanhurley)
Repository: ambari
Updated Branches:
refs/heads/trunk 96e69d58e -> 97e3de68f
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index d50c92d..a3a7e11 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -38,7 +38,6 @@ import javax.persistence.EntityManager;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.RequestFactory;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariCustomCommandExecutionHelper;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.ClusterRequest;
@@ -88,6 +87,7 @@ public class ConfigHelperTest {
private static ConfigHelper configHelper;
private static AmbariManagementController managementController;
private static AmbariMetaInfo metaInfo;
+ private static ConfigFactory configFactory;
@BeforeClass
public static void setup() throws Exception {
@@ -102,6 +102,7 @@ public class ConfigHelperTest {
configHelper = injector.getInstance(ConfigHelper.class);
managementController = injector.getInstance(AmbariManagementController.class);
metaInfo = injector.getInstance(AmbariMetaInfo.class);
+ configFactory = injector.getInstance(ConfigFactory.class);
clusterName = "c1";
clusters.addCluster(clusterName, new StackId("HDP-2.0.6"));
@@ -339,14 +340,11 @@ public class ConfigHelperTest {
add(clusterRequest6);
}}, null);
- final Config config = new ConfigImpl("ams-env");
- config.setTag("version122");
-
Map<String, String> properties = new HashMap<String, String>();
properties.put("a", "b");
properties.put("c", "d");
- config.setProperties(properties);
+ final Config config = configFactory.createNew(cluster, "ams-env", "version122", properties, null);
Long groupId = addConfigGroup("g1", "t1", new ArrayList<String>() {{
add("h1");
}}, new ArrayList<Config>() {{
@@ -419,19 +417,14 @@ public class ConfigHelperTest {
add(clusterRequest3);
}}, null);
- final Config config1 = new ConfigImpl("core-site2");
- config1.setTag("version122");
-
Map<String, String> properties = new HashMap<String, String>();
properties.put("a", "b");
properties.put("c", "d");
- config1.setProperties(properties);
+ final Config config1 = configFactory.createNew(cluster, "core-site2", "version122", properties, null);
- final Config config2 = new ConfigImpl("global2");
- config2.setTag("version122");
Map<String, String> properties2 = new HashMap<String, String>();
properties2.put("namenode_heapsize", "1111");
- config2.setProperties(properties2);
+ final Config config2 = configFactory.createNew(cluster, "global2", "version122", properties2, null);
Long groupId = addConfigGroup("g2", "t1", new ArrayList<String>() {{
add("h1");
@@ -511,24 +504,23 @@ public class ConfigHelperTest {
}}, null);
- final Config config1 = new ConfigImpl("core-site3");
- config1.setTag("version122");
-
Map<String, String> attributes = new HashMap<String, String>();
attributes.put("fs.trash.interval", "11");
attributes.put("b", "y");
Map<String, Map<String, String>> config1Attributes = new HashMap<String, Map<String, String>>();
config1Attributes.put("attribute1", attributes);
- config1.setPropertiesAttributes(config1Attributes);
- final Config config2 = new ConfigImpl("global3");
- config2.setTag("version122");
+ final Config config1 = configFactory.createNew(cluster, "core-site3", "version122",
+ new HashMap<String, String>(), config1Attributes);
+
attributes = new HashMap<String, String>();
attributes.put("namenode_heapsize", "z");
attributes.put("c", "q");
Map<String, Map<String, String>> config2Attributes = new HashMap<String, Map<String, String>>();
config2Attributes.put("attribute2", attributes);
- config2.setPropertiesAttributes(config2Attributes);
+
+ final Config config2 = configFactory.createNew(cluster, "global3", "version122",
+ new HashMap<String, String>(), config2Attributes);
Long groupId = addConfigGroup("g3", "t1", new ArrayList<String>() {{
add("h3");
@@ -690,7 +682,8 @@ public class ConfigHelperTest {
confGroupProperties.put("b", "any");
confGroupProperties.put("c", "any");
- Config overrideConfig = new ConfigImpl(cluster, "type", confGroupProperties, confGroupAttributes, injector);
+ Config overrideConfig = configFactory.createNew(cluster, "type", null,
+ confGroupProperties, confGroupAttributes);
Map<String, Map<String, String>> result
= configHelper.overrideAttributes(overrideConfig, persistedAttributes);
@@ -718,7 +711,8 @@ public class ConfigHelperTest {
confGroupProperties.put("b", "any");
confGroupProperties.put("c", "any");
- Config overrideConfig = new ConfigImpl(cluster, "type", confGroupProperties, confGroupAttributes, injector);
+ Config overrideConfig = configFactory.createNew(cluster, "type", null,
+ confGroupProperties, confGroupAttributes);
Map<String, Map<String, String>> result
= configHelper.overrideAttributes(overrideConfig, persistedAttributes);
@@ -744,7 +738,8 @@ public class ConfigHelperTest {
confGroupProperties.put("b", "any");
confGroupProperties.put("c", "any");
- Config overrideConfig = new ConfigImpl(cluster, "type", confGroupProperties, null, injector);
+ Config overrideConfig = configFactory.createNew(cluster, "type", null,
+ confGroupProperties, null);
Map<String, Map<String, String>> result
= configHelper.overrideAttributes(overrideConfig, persistedAttributes);
@@ -772,7 +767,8 @@ public class ConfigHelperTest {
confGroupFinalAttrs.put("b", "true");
confGroupAttributes.put("final", confGroupFinalAttrs);
- Config overrideConfig = new ConfigImpl(cluster, "type", null, confGroupAttributes, injector);
+ Config overrideConfig = configFactory.createNew(cluster, "type", "version122",
+ new HashMap<String,String>(), confGroupAttributes);
Map<String, Map<String, String>> result
= configHelper.overrideAttributes(overrideConfig, persistedAttributes);
@@ -921,8 +917,10 @@ public class ConfigHelperTest {
List<String> hosts = new ArrayList<String>();
hosts.add("h1");
List<Config> configs = new ArrayList<Config>();
- ConfigImpl configImpl = new ConfigImpl("flume-conf");
- configImpl.setTag("FLUME1");
+
+ Config configImpl = configFactory.createNew(cluster, "flume-conf", "FLUME1",
+ new HashMap<String,String>(), null);
+
configs.add(configImpl);
addConfigGroup("configGroup1", "FLUME", hosts, configs);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
index 1867bda..ede94dc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
@@ -56,12 +56,12 @@ import org.apache.ambari.server.utils.EventBusSynchronizer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.persist.PersistService;
import com.google.inject.persist.UnitOfWork;
-import org.junit.experimental.categories.Category;
/**
* Tests the {@link AlertReceivedListener}.
@@ -835,17 +835,13 @@ public class AlertReceivedListenerTest {
@SuppressWarnings("serial")
public void testAlertFirmnessUsingGlobalValueHigherThanOverride() throws Exception {
ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
- Config config = cf.createNew(m_cluster, ConfigHelper.CLUSTER_ENV,
+ Config config = cf.createNew(m_cluster, ConfigHelper.CLUSTER_ENV, "version2",
new HashMap<String, String>() {
{
put(ConfigHelper.CLUSTER_ENV_ALERT_REPEAT_TOLERANCE, "3");
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
-
- m_cluster.addConfig(config);
m_cluster.addDesiredConfig("user", Collections.singleton(config));
String definitionName = ALERT_DEFINITION + "1";
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
index 4fdcc22..9dc405e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterDeadlockTest.java
@@ -124,14 +124,11 @@ public class ClusterDeadlockTest {
cluster.createClusterVersion(stackId,
stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
- Config config1 = configFactory.createNew(cluster, "test-type1", new HashMap<String, String>(), new HashMap<String,
+ Config config1 = configFactory.createNew(cluster, "test-type1", "version1", new HashMap<String, String>(), new HashMap<String,
Map<String, String>>());
- Config config2 = configFactory.createNew(cluster, "test-type2", new HashMap<String, String>(), new HashMap<String,
+ Config config2 = configFactory.createNew(cluster, "test-type2", "version1", new HashMap<String, String>(), new HashMap<String,
Map<String, String>>());
- config1.persist();
- config2.persist();
- cluster.addConfig(config1);
- cluster.addConfig(config2);
+
cluster.addDesiredConfig("test user", new HashSet<Config>(Arrays.asList(config1, config2)));
// 100 hosts
@@ -186,7 +183,7 @@ public class ClusterDeadlockTest {
}
DeadlockWarningThread wt = new DeadlockWarningThread(threads);
-
+
while (true) {
if(!wt.isAlive()) {
break;
@@ -221,7 +218,7 @@ public class ClusterDeadlockTest {
}
DeadlockWarningThread wt = new DeadlockWarningThread(threads);
-
+
while (true) {
if(!wt.isAlive()) {
break;
@@ -267,7 +264,7 @@ public class ClusterDeadlockTest {
clusterWriterThread.start();
schWriterThread.start();
}
-
+
DeadlockWarningThread wt = new DeadlockWarningThread(threads, 20, 1000);
while (true) {
if(!wt.isAlive()) {
@@ -337,7 +334,7 @@ public class ClusterDeadlockTest {
@Override
public void run() {
for (int i =0; i<300; i++) {
- config.persist(false);
+ config.save();
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 90a3d02..69cfc9f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -87,7 +87,6 @@ import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostHealthStatus;
@@ -964,21 +963,14 @@ public class ClusterTest {
Map<String, Map<String, String>> c2PropAttributes = new HashMap<String, Map<String,String>>();
c2PropAttributes.put("final", new HashMap<String, String>());
c2PropAttributes.get("final").put("x", "true");
- Config config1 = configFactory.createNew(c1, "global",
+ Config config1 = configFactory.createNew(c1, "global", "version1",
new HashMap<String, String>() {{ put("a", "b"); }}, c1PropAttributes);
- config1.setTag("version1");
- Config config2 = configFactory.createNew(c1, "global",
+ Config config2 = configFactory.createNew(c1, "global", "version2",
new HashMap<String, String>() {{ put("x", "y"); }}, c2PropAttributes);
- config2.setTag("version2");
- Config config3 = configFactory.createNew(c1, "core-site",
+ Config config3 = configFactory.createNew(c1, "core-site", "version2",
new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
- config3.setTag("version2");
-
- c1.addConfig(config1);
- c1.addConfig(config2);
- c1.addConfig(config3);
c1.addDesiredConfig("_test", Collections.singleton(config1));
Config res = c1.getDesiredConfigByType("global");
@@ -998,21 +990,14 @@ public class ClusterTest {
public void testDesiredConfigs() throws Exception {
createDefaultCluster();
- Config config1 = configFactory.createNew(c1, "global",
+ Config config1 = configFactory.createNew(c1, "global", "version1",
new HashMap<String, String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = configFactory.createNew(c1, "global",
+ Config config2 = configFactory.createNew(c1, "global", "version2",
new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
- config2.setTag("version2");
- Config config3 = configFactory.createNew(c1, "core-site",
+ Config config3 = configFactory.createNew(c1, "core-site", "version2",
new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
- config3.setTag("version2");
-
- c1.addConfig(config1);
- c1.addConfig(config2);
- c1.addConfig(config3);
try {
c1.addDesiredConfig(null, Collections.singleton(config1));
@@ -1132,18 +1117,11 @@ public class ClusterTest {
c1.addService("HDFS");
- Config config1 = configFactory.createNew(c1, "hdfs-site",
+ Config config1 = configFactory.createNew(c1, "hdfs-site", "version1",
new HashMap<String, String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = configFactory.createNew(c1, "core-site",
+ Config config2 = configFactory.createNew(c1, "core-site", "version2",
new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
- config2.setTag("version2");
-
- config1.persist();
- c1.addConfig(config1);
- config2.persist();
- c1.addConfig(config2);
Set<Config> configs = new HashSet<Config>();
configs.add(config1);
@@ -1209,10 +1187,9 @@ public class ClusterTest {
Map<String, Map<String, String>> propAttributes = new HashMap<String, Map<String,String>>();
propAttributes.put("final", new HashMap<String, String>());
propAttributes.get("final").put("test", "true");
- Config config = configFactory.createNew(c1, "hdfs-site", new HashMap<String, String>(){{
+ Config config = configFactory.createNew(c1, "hdfs-site", "1", new HashMap<String, String>(){{
put("test", "test");
}}, propAttributes);
- config.setTag("1");
host1.addDesiredConfig(c1.getClusterId(), true, "test", config);
@@ -1247,16 +1224,11 @@ public class ClusterTest {
public void testServiceConfigVersions() throws Exception {
createDefaultCluster();
- Config config1 = configFactory.createNew(c1, "hdfs-site",
+ Config config1 = configFactory.createNew(c1, "hdfs-site", "version1",
new HashMap<String, String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = configFactory.createNew(c1, "hdfs-site",
+ Config config2 = configFactory.createNew(c1, "hdfs-site", "version2",
new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
- config2.setTag("version2");
-
- c1.addConfig(config1);
- c1.addConfig(config2);
c1.addDesiredConfig("admin", Collections.singleton(config1));
List<ServiceConfigVersionResponse> serviceConfigVersions =
@@ -1310,16 +1282,11 @@ public class ClusterTest {
public void testSingleServiceVersionForMultipleConfigs() throws Exception {
createDefaultCluster();
- Config config1 = configFactory.createNew(c1, "hdfs-site",
+ Config config1 = configFactory.createNew(c1, "hdfs-site", "version1",
new HashMap<String, String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = configFactory.createNew(c1, "core-site",
+ Config config2 = configFactory.createNew(c1, "core-site", "version2",
new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
- config2.setTag("version2");
-
- c1.addConfig(config1);
- c1.addConfig(config2);
Set<Config> configs = new HashSet<Config>();
configs.add(config1);
@@ -1345,11 +1312,8 @@ public class ClusterTest {
public void testServiceConfigVersionsForGroups() throws Exception {
createDefaultCluster();
- Config config1 = configFactory.createNew(c1, "hdfs-site",
+ Config config1 = configFactory.createNew(c1, "hdfs-site", "version1",
new HashMap<String, String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
-
- c1.addConfig(config1);
ServiceConfigVersionResponse scvResponse =
c1.addDesiredConfig("admin", Collections.singleton(config1));
@@ -1361,9 +1325,8 @@ public class ClusterTest {
Assert.assertEquals("Only one scv should be active", 1, activeServiceConfigVersions.get("HDFS").size());
//create config group
- Config config2 = configFactory.createNew(c1, "hdfs-site",
+ Config config2 = configFactory.createNew(c1, "hdfs-site", "version2",
new HashMap<String, String>() {{ put("a", "c"); }}, new HashMap<String, Map<String,String>>());
- config2.setTag("version2");
ConfigGroup configGroup =
configGroupFactory.createNew(c1, "test group", "HDFS", "descr", Collections.singletonMap("hdfs-site", config2),
@@ -1381,7 +1344,7 @@ public class ClusterTest {
Assert.assertEquals("Two service config versions should be active, for default and test groups",
2, activeServiceConfigVersions.get("HDFS").size());
- Config config3 = configFactory.createNew(c1, "hdfs-site",
+ Config config3 = configFactory.createNew(c1, "hdfs-site", "version3",
new HashMap<String, String>() {{ put("a", "d"); }}, new HashMap<String, Map<String,String>>());
configGroup.setConfigurations(Collections.singletonMap("hdfs-site", config3));
@@ -1417,13 +1380,12 @@ public class ClusterTest {
//check config with empty cluster
- Config config4 = new ConfigImpl("hdfs-site");
- config4.setProperties(new HashMap<String, String>() {{
- put("a", "b");
- }});
+ Config config4 = configFactory.createReadOnly("hdfs-site", "version4",
+ Collections.singletonMap("a", "b"), null);
ConfigGroup configGroup2 =
- configGroupFactory.createNew(c1, "test group 2", "HDFS", "descr", Collections.singletonMap("hdfs-site", config4),
+ configGroupFactory.createNew(c1, "test group 2", "HDFS", "descr",
+ new HashMap<>(Collections.singletonMap("hdfs-site", config4)),
Collections.<Long, Host>emptyMap());
configGroup2.persist();
@@ -1443,12 +1405,8 @@ public class ClusterTest {
// Given
createDefaultCluster();
- Config hdfsSiteConfigV1 = configFactory.createNew(c1, "hdfs-site", ImmutableMap.of("p1", "v1"), ImmutableMap.<String, Map<String,String>>of());
- hdfsSiteConfigV1.setTag("version1");
- hdfsSiteConfigV1.persist();
-
- c1.addConfig(hdfsSiteConfigV1);
-
+ Config hdfsSiteConfigV1 = configFactory.createNew(c1, "hdfs-site", "version1",
+ ImmutableMap.of("p1", "v1"), ImmutableMap.<String, Map<String,String>>of());
ServiceConfigVersionResponse hdfsSiteConfigResponseV1 = c1.addDesiredConfig("admin", Collections.singleton(hdfsSiteConfigV1));
List<ConfigurationResponse> configResponsesDefaultGroup = Collections.singletonList(
@@ -1459,8 +1417,8 @@ public class ClusterTest {
hdfsSiteConfigResponseV1.setConfigurations(configResponsesDefaultGroup);
- Config hdfsSiteConfigV2 = configFactory.createNew(c1, "hdfs-site", ImmutableMap.of("p1", "v2"), ImmutableMap.<String, Map<String,String>>of());
- hdfsSiteConfigV2.setTag("version2");
+ Config hdfsSiteConfigV2 = configFactory.createNew(c1, "hdfs-site", "version2",
+ ImmutableMap.of("p1", "v2"), ImmutableMap.<String, Map<String,String>>of());
ConfigGroup configGroup = configGroupFactory.createNew(c1, "configGroup1", "version1", "test description", ImmutableMap.of(hdfsSiteConfigV2.getType(), hdfsSiteConfigV2), ImmutableMap.<Long, Host>of());
configGroup.persist();
@@ -1507,12 +1465,8 @@ public class ClusterTest {
// Given
createDefaultCluster();
- Config hdfsSiteConfigV1 = configFactory.createNew(c1, "hdfs-site", ImmutableMap.of("p1", "v1"), ImmutableMap.<String, Map<String,String>>of());
- hdfsSiteConfigV1.setTag("version1");
- hdfsSiteConfigV1.persist();
-
- c1.addConfig(hdfsSiteConfigV1);
-
+ Config hdfsSiteConfigV1 = configFactory.createNew(c1, "hdfs-site", "version1",
+ ImmutableMap.of("p1", "v1"), ImmutableMap.<String, Map<String,String>>of());
ServiceConfigVersionResponse hdfsSiteConfigResponseV1 = c1.addDesiredConfig("admin", Collections.singleton(hdfsSiteConfigV1));
List<ConfigurationResponse> configResponsesDefaultGroup = Collections.singletonList(
@@ -1523,8 +1477,8 @@ public class ClusterTest {
hdfsSiteConfigResponseV1.setConfigurations(configResponsesDefaultGroup);
- Config hdfsSiteConfigV2 = configFactory.createNew(c1, "hdfs-site", ImmutableMap.of("p1", "v2"), ImmutableMap.<String, Map<String,String>>of());
- hdfsSiteConfigV2.setTag("version2");
+ Config hdfsSiteConfigV2 = configFactory.createNew(c1, "hdfs-site", "version2",
+ ImmutableMap.of("p1", "v2"), ImmutableMap.<String, Map<String,String>>of());
ConfigGroup configGroup = configGroupFactory.createNew(c1, "configGroup1", "version1", "test description", ImmutableMap.of(hdfsSiteConfigV2.getType(), hdfsSiteConfigV2), ImmutableMap.<Long, Host>of());
configGroup.persist();
@@ -2373,17 +2327,13 @@ public class ClusterTest {
ClusterEntity clusterEntity = clusterDAO.findByName("c1");
assertEquals(0, clusterEntity.getClusterConfigEntities().size());
- final Config originalConfig = configFactory.createNew(cluster, "foo-site",
+ final Config originalConfig = configFactory.createNew(cluster, "foo-site", "version3",
new HashMap<String, String>() {
{
put("one", "two");
}
}, new HashMap<String, Map<String, String>>());
- originalConfig.setTag("version3");
- originalConfig.persist();
- cluster.addConfig(originalConfig);
-
ConfigGroup configGroup = configGroupFactory.createNew(cluster, "g1", "t1", "",
new HashMap<String, Config>() {
{
@@ -2403,8 +2353,7 @@ public class ClusterTest {
Map<String, String> properties = config.getProperties();
properties.put("three", "four");
config.setProperties(properties);
-
- config.persist(false);
+ config.save();
clusterEntity = clusterDAO.findByName("c1");
assertEquals(1, clusterEntity.getClusterConfigEntities().size());
@@ -2545,13 +2494,7 @@ public class ClusterTest {
// foo-type for v1 on current stack
properties.put("foo-property-1", "foo-value-1");
- Config c1 = new ConfigImpl(cluster, "foo-type", properties, propertiesAttributes, injector);
- c1.setTag("version-1");
- c1.setStackId(stackId);
- c1.setVersion(1L);
-
- cluster.addConfig(c1);
- c1.persist();
+ Config c1 = configFactory.createNew(cluster, "foo-type", "version-1", properties, propertiesAttributes);
// make v1 "current"
cluster.addDesiredConfig("admin", Sets.newHashSet(c1), "note-1");
@@ -2562,12 +2505,7 @@ public class ClusterTest {
// save v2
// foo-type for v2 on new stack
properties.put("foo-property-2", "foo-value-2");
- Config c2 = new ConfigImpl(cluster, "foo-type", properties, propertiesAttributes, injector);
- c2.setTag("version-2");
- c2.setStackId(newStackId);
- c2.setVersion(2L);
- cluster.addConfig(c2);
- c2.persist();
+ Config c2 = configFactory.createNew(cluster, "foo-type", "version-2", properties, propertiesAttributes);
// make v2 "current"
cluster.addDesiredConfig("admin", Sets.newHashSet(c2), "note-2");
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
index 5886234..d75d9d0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClustersTest.java
@@ -405,19 +405,15 @@ public class ClustersTest {
cluster.transitionClusterVersion(stackId, stackId.getStackVersion(),
RepositoryVersionState.CURRENT);
- final Config config1 = injector.getInstance(ConfigFactory.class).createNew(cluster, "t1",
+ final Config config1 = injector.getInstance(ConfigFactory.class).createNew(cluster, "t1", "1",
new HashMap<String, String>() {{
put("prop1", "val1");
}}, new HashMap<String, Map<String,String>>());
- config1.setTag("1");
- config1.persist();
- Config config2 = injector.getInstance(ConfigFactory.class).createNew(cluster, "t1",
+ Config config2 = injector.getInstance(ConfigFactory.class).createNew(cluster, "t1", "2",
new HashMap<String, String>() {{
put("prop2", "val2");
}}, new HashMap<String, Map<String,String>>());
- config2.setTag("2");
- config2.persist();
// cluster desired config
cluster.addDesiredConfig("_test", Collections.singleton(config1));
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
index 1f09002..96dbf26 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ServiceComponentHostConcurrentWriteDeadlockTest.java
@@ -114,17 +114,12 @@ public class ServiceComponentHostConcurrentWriteDeadlockTest {
cluster.createClusterVersion(stackId,
stackId.getStackVersion(), "admin", RepositoryVersionState.INSTALLING);
- Config config1 = configFactory.createNew(cluster, "test-type1", new HashMap<String, String>(), new HashMap<String,
+ Config config1 = configFactory.createNew(cluster, "test-type1", null, new HashMap<String, String>(), new HashMap<String,
Map<String, String>>());
- Config config2 = configFactory.createNew(cluster, "test-type2", new HashMap<String, String>(), new HashMap<String,
+ Config config2 = configFactory.createNew(cluster, "test-type2", null, new HashMap<String, String>(), new HashMap<String,
Map<String, String>>());
- config1.persist();
- config2.persist();
-
- cluster.addConfig(config1);
- cluster.addConfig(config2);
cluster.addDesiredConfig("test user", new HashSet<Config>(Arrays.asList(config1, config2)));
String hostName = "c6401";
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
index 596f381..5c8d174 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/host/HostTest.java
@@ -384,7 +384,7 @@ public class HostTest {
clusters.mapHostToCluster("h1", "c1");
ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
- Config config = configFactory.createNew(c1, "global",
+ Config config = configFactory.createNew(c1, "global", "v1",
new HashMap<String,String>() {{ put("a", "b"); put("x", "y"); }}, new HashMap<String, Map<String,String>>());
try {
@@ -396,16 +396,14 @@ public class HostTest {
}
- config.setTag("v1");
host.addDesiredConfig(c1.getClusterId(), true, "_test", config);
Map<String, DesiredConfig> map = host.getDesiredConfigs(c1.getClusterId());
Assert.assertTrue("Expect desired config to contain global", map.containsKey("global"));
Assert.assertEquals("Expect global user to be '_test'", "_test", map.get("global").getUser());
- config = configFactory.createNew(c1, "global",
+ config = configFactory.createNew(c1, "global", "v2",
new HashMap<String,String>() {{ put("c", "d"); }}, new HashMap<String, Map<String,String>>());
- config.setTag("v2");
host.addDesiredConfig(c1.getClusterId(), true, "_test1", config);
map = host.getDesiredConfigs(c1.getClusterId());
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
index 14a8de6..8db5190 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
@@ -221,11 +221,8 @@ public class ServiceComponentHostTest {
Cluster c = clusters.getCluster(clusterName);
if (c.getConfig("time", String.valueOf(timestamp)) == null) {
- Config config = configFactory.createNew (c, "time",
+ Config config = configFactory.createNew (c, "time", String.valueOf(timestamp),
new HashMap<String, String>(), new HashMap<String, Map<String,String>>());
- config.setTag(String.valueOf(timestamp));
- c.addConfig(config);
- config.persist();
}
switch (eventType) {
@@ -817,12 +814,10 @@ public class ServiceComponentHostTest {
final Host host = clusters.getHostsForCluster(clusterName).get(hostName);
Assert.assertNotNull(host);
- final Config c = configFactory.createNew(cluster, "hdfs-site",
+ final Config c = configFactory.createNew(cluster, "hdfs-site", "version3",
new HashMap<String, String>() {{ put("dfs.journalnode.http-address", "http://goo"); }},
new HashMap<String, Map<String,String>>());
- c.setTag("version3");
- c.persist();
- cluster.addConfig(c);
+
host.addDesiredConfig(cluster.getClusterId(), true, "user", c);
ConfigGroup configGroup = configGroupFactory.createNew(cluster, "g1",
"t1", "", new HashMap<String, Config>() {{ put("hdfs-site", c); }},
@@ -874,12 +869,9 @@ public class ServiceComponentHostTest {
sch1.updateActualConfigs(actual);
- final Config c1 = configFactory.createNew(cluster, "core-site",
+ final Config c1 = configFactory.createNew(cluster, "core-site", "version2",
new HashMap<String, String>() {{ put("fs.trash.interval", "400"); }},
new HashMap<String, Map<String,String>>());
- c1.setTag("version2");
- c1.persist();
- cluster.addConfig(c1);
configGroup = configGroupFactory.createNew(cluster, "g2",
"t2", "", new HashMap<String, Config>() {{ put("core-site", c1); }},
new HashMap<Long, Host>() {{ put(hostEntity.getHostId(), host); }});
@@ -1037,10 +1029,7 @@ public class ServiceComponentHostTest {
* @param values the values for the config
*/
private void makeConfig(Cluster cluster, String type, String tag, Map<String, String> values, Map<String, Map<String, String>> attributes) {
- Config config = configFactory.createNew(cluster, type, values, attributes);
- config.setTag(tag);
- config.persist();
- cluster.addConfig(config);
+ Config config = configFactory.createNew(cluster, type, tag, values, attributes);
cluster.addDesiredConfig("user", Collections.singleton(config));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
index ee64ac9..dadeb03 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
@@ -59,6 +59,7 @@ import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
@@ -66,13 +67,14 @@ import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.configgroup.ConfigGroup;
import org.easymock.Capture;
+import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.ImmutableList;
/**
* AmbariContext unit tests
@@ -110,6 +112,7 @@ public class AmbariContextTest {
private static final ConfigGroup configGroup2 = createMock(ConfigGroup.class);
private static final Host host1 = createNiceMock(Host.class);
private static final Host host2 = createNiceMock(Host.class);
+ private static final ConfigFactory configFactory = createNiceMock(ConfigFactory.class);
private static final Collection<String> blueprintServices = new HashSet<String>();
private static final Map<String, Service> clusterServices = new HashMap<String, Service>();
@@ -164,6 +167,9 @@ public class AmbariContextTest {
type1Props.put("prop3", "val3");
group1Configuration = new Configuration(group1Properties, null, bpConfiguration);
+ Map<String, String> group1ResolvedProperties = new HashMap<String, String>(bpType1Props);
+ group1ResolvedProperties.putAll(type1Props);
+
// config type -> service mapping
Map<String, String> configTypeServiceMapping = new HashMap<String, String>();
configTypeServiceMapping.put("type1", "service1");
@@ -172,6 +178,28 @@ public class AmbariContextTest {
configGroups.put(1L, configGroup1);
configGroups.put(2L, configGroup2);
+ // config factory mock
+ Config type1Group1 = createNiceMock(Config.class);
+ expect(type1Group1.getType()).andReturn("type1").anyTimes();
+ expect(type1Group1.getTag()).andReturn("group1").anyTimes();
+ expect(type1Group1.getProperties()).andReturn(group1ResolvedProperties).anyTimes();
+ expect(configFactory.createReadOnly(EasyMock.eq("type1"), EasyMock.eq("group1"),
+ EasyMock.<Map<String, String>> anyObject(),
+ EasyMock.<Map<String, Map<String, String>>> anyObject())).andReturn(type1Group1).anyTimes();
+ replay(type1Group1);
+
+ Config type1Service1 = createNiceMock(Config.class);
+ expect(type1Service1.getType()).andReturn("type1").anyTimes();
+ expect(type1Service1.getTag()).andReturn("service1").anyTimes();
+ expect(type1Service1.getProperties()).andReturn(type1Props).anyTimes();
+ expect(configFactory.createReadOnly(EasyMock.eq("type1"), EasyMock.eq("service1"),
+ EasyMock.<Map<String, String>> anyObject(),
+ EasyMock.<Map<String, Map<String, String>>> anyObject())).andReturn(
+ type1Service1).anyTimes();
+ replay(type1Service1);
+
+ context.configFactory = configFactory;
+
blueprintServices.add("service1");
blueprintServices.add("service2");
@@ -222,17 +250,17 @@ public class AmbariContextTest {
public void tearDown() throws Exception {
verify(controller, clusterController, hostResourceProvider, serviceResourceProvider, componentResourceProvider,
hostComponentResourceProvider, configGroupResourceProvider, topology, blueprint, stack, clusters,
- cluster, group1Info, configHelper, configGroup1, configGroup2, host1, host2);
+ cluster, group1Info, configHelper, configGroup1, configGroup2, host1, host2, configFactory);
reset(controller, clusterController, hostResourceProvider, serviceResourceProvider, componentResourceProvider,
hostComponentResourceProvider, configGroupResourceProvider, topology, blueprint, stack, clusters,
- cluster, group1Info, configHelper, configGroup1, configGroup2, host1, host2);
+ cluster, group1Info, configHelper, configGroup1, configGroup2, host1, host2, configFactory);
}
private void replayAll() {
replay(controller, clusterController, hostResourceProvider, serviceResourceProvider, componentResourceProvider,
hostComponentResourceProvider, configGroupResourceProvider, topology, blueprint, stack, clusters,
- cluster, group1Info, configHelper, configGroup1, configGroup2, host1, host2);
+ cluster, group1Info, configHelper, configGroup1, configGroup2, host1, host2, configFactory);
}
@Test
@@ -331,6 +359,7 @@ public class AmbariContextTest {
//todo: for now not using return value so just returning null
expect(configGroupResourceProvider.createResources(capture(configGroupRequestCapture))).andReturn(null).once();
configHelper.moveDeprecatedGlobals(stackId, group1Configuration.getFullProperties(1), CLUSTER_NAME);
+
// replay all mocks
replayAll();
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/update/HostUpdateHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/update/HostUpdateHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/update/HostUpdateHelperTest.java
index f9dd5d1..3bb6c0a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/update/HostUpdateHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/update/HostUpdateHelperTest.java
@@ -49,6 +49,8 @@ import org.apache.ambari.server.orm.entities.StackEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
+import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.stack.OsFamily;
import org.apache.ambari.server.utils.CollectionPresentationUtils;
@@ -62,6 +64,7 @@ import com.google.gson.JsonPrimitive;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
+import com.google.inject.assistedinject.FactoryModuleBuilder;
import junit.framework.Assert;
@@ -212,16 +215,12 @@ public class HostUpdateHelperTest {
Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
Cluster mockCluster = easyMockSupport.createNiceMock(Cluster.class);
ClusterEntity mockClusterEntity1 = easyMockSupport.createNiceMock(ClusterEntity.class);
- ClusterEntity mockClusterEntity2 = easyMockSupport.createNiceMock(ClusterEntity.class);
ClusterConfigEntity mockClusterConfigEntity1 = easyMockSupport.createNiceMock(ClusterConfigEntity.class);
ClusterConfigEntity mockClusterConfigEntity2 = easyMockSupport.createNiceMock(ClusterConfigEntity.class);
- ClusterConfigEntity mockClusterConfigEntity3 = easyMockSupport.createNiceMock(ClusterConfigEntity.class);
- ClusterConfigEntity mockClusterConfigEntity4 = easyMockSupport.createNiceMock(ClusterConfigEntity.class);
StackEntity mockStackEntity = easyMockSupport.createNiceMock(StackEntity.class);
Map<String, Map<String, String>> clusterHostsToChange = new HashMap<>();
Map<String, String> hosts = new HashMap<>();
List<ClusterConfigEntity> clusterConfigEntities1 = new ArrayList<>();
- List<ClusterConfigEntity> clusterConfigEntities2 = new ArrayList<>();
final Injector mockInjector = Guice.createInjector(new AbstractModule() {
@Override
@@ -231,6 +230,8 @@ public class HostUpdateHelperTest {
bind(EntityManager.class).toInstance(entityManager);
bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
bind(ClusterDAO.class).toInstance(mockClusterDAO);
+
+ install(new FactoryModuleBuilder().implement(Config.class, ConfigImpl.class).build(ConfigFactory.class));
}
});
@@ -242,49 +243,42 @@ public class HostUpdateHelperTest {
clusterConfigEntities1.add(mockClusterConfigEntity1);
clusterConfigEntities1.add(mockClusterConfigEntity2);
- clusterConfigEntities2.add(mockClusterConfigEntity3);
- clusterConfigEntities2.add(mockClusterConfigEntity4);
-
clusterHostsToChange.put("cl1", hosts);
- expect(mockClusterDAO.findByName("cl1")).andReturn(mockClusterEntity1).once();
- expect(mockClusterDAO.findById(1L)).andReturn(mockClusterEntity2).atLeastOnce();
+ expect(mockClusterDAO.findByName("cl1")).andReturn(mockClusterEntity1).atLeastOnce();
expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
expect(mockClusters.getCluster("cl1")).andReturn(mockCluster).once();
- expect(mockCluster.getClusterId()).andReturn(1L).atLeastOnce();
+ expect(mockCluster.getClusterId()).andReturn(1L).anyTimes();
expect(mockClusterEntity1.getClusterConfigEntities()).andReturn(clusterConfigEntities1).atLeastOnce();
- expect(mockClusterEntity2.getClusterConfigEntities()).andReturn(clusterConfigEntities2).atLeastOnce();
- expect(mockClusterConfigEntity1.getStack()).andReturn(mockStackEntity).once();
+ expect(mockClusterConfigEntity1.getClusterId()).andReturn(1L).atLeastOnce();
+ expect(mockClusterConfigEntity1.getConfigId()).andReturn(1L).atLeastOnce();
+ expect(mockClusterConfigEntity1.getStack()).andReturn(mockStackEntity).atLeastOnce();
expect(mockClusterConfigEntity1.getData()).andReturn("{\"testProperty1\" : \"testValue_host1\", " +
"\"testProperty2\" : \"testValue_host5\", \"testProperty3\" : \"testValue_host11\", " +
"\"testProperty4\" : \"testValue_host55\"}").atLeastOnce();
expect(mockClusterConfigEntity1.getTag()).andReturn("testTag1").atLeastOnce();
expect(mockClusterConfigEntity1.getType()).andReturn("testType1").atLeastOnce();
expect(mockClusterConfigEntity1.getVersion()).andReturn(1L).atLeastOnce();
+ expect(mockClusterDAO.findConfig(1L)).andReturn(mockClusterConfigEntity1).atLeastOnce();
- expect(mockClusterConfigEntity2.getStack()).andReturn(mockStackEntity).once();
+ expect(mockClusterConfigEntity2.getClusterId()).andReturn(1L).atLeastOnce();
+ expect(mockClusterConfigEntity2.getConfigId()).andReturn(2L).anyTimes();
+ expect(mockClusterConfigEntity2.getStack()).andReturn(mockStackEntity).atLeastOnce();
expect(mockClusterConfigEntity2.getData()).andReturn("{\"testProperty5\" : \"test_host1_test_host5_test_host11_test_host55\"}").atLeastOnce();
expect(mockClusterConfigEntity2.getTag()).andReturn("testTag2").atLeastOnce();
expect(mockClusterConfigEntity2.getType()).andReturn("testType2").atLeastOnce();
expect(mockClusterConfigEntity2.getVersion()).andReturn(2L).atLeastOnce();
-
- expect(mockClusterConfigEntity3.getTag()).andReturn("testTag1").atLeastOnce();
- expect(mockClusterConfigEntity3.getType()).andReturn("testType1").atLeastOnce();
- expect(mockClusterConfigEntity3.getVersion()).andReturn(1L).atLeastOnce();
-
- expect(mockClusterConfigEntity4.getTag()).andReturn("testTag2").atLeastOnce();
- expect(mockClusterConfigEntity4.getType()).andReturn("testType2").atLeastOnce();
- expect(mockClusterConfigEntity4.getVersion()).andReturn(2L).atLeastOnce();
+ expect(mockClusterDAO.findConfig(2L)).andReturn(mockClusterConfigEntity2).atLeastOnce();
Capture<String> dataCapture = EasyMock.newCapture();
- mockClusterConfigEntity3.setData(EasyMock.capture(dataCapture));
+ mockClusterConfigEntity1.setData(EasyMock.capture(dataCapture));
expectLastCall();
- mockClusterConfigEntity4.setData("{\"testProperty5\":\"test_host5_test_host1_test_host55_test_host11\"}");
+ mockClusterConfigEntity2.setData("{\"testProperty5\":\"test_host5_test_host1_test_host55_test_host11\"}");
expectLastCall();
HostUpdateHelper hostUpdateHelper = new HostUpdateHelper(null, null, mockInjector);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
index 29f40fb..5c77831 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java
@@ -65,6 +65,9 @@ import org.apache.ambari.server.security.encryption.CredentialStoreService;
import org.apache.ambari.server.stack.StackManagerFactory;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
+import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostComponentAdminState;
import org.apache.ambari.server.state.Service;
@@ -126,6 +129,7 @@ public class StageUtilsTest extends EasyMockSupport {
bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
install(new FactoryModuleBuilder().build(ExecutionCommandWrapperFactory.class));
+ install(new FactoryModuleBuilder().implement(Config.class, ConfigImpl.class).build(ConfigFactory.class));
}
});
[04/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0a0e9a50
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0a0e9a50
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0a0e9a50
Branch: refs/heads/trunk
Commit: 0a0e9a5005ef1c410938f6059c0d8f9cb0a16ba5
Parents: a6639a7 2c884ab
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Nov 17 11:26:54 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Nov 17 11:26:54 2016 -0500
----------------------------------------------------------------------
.../main/resources/ui/admin-web/app/index.html | 1 +
.../controllers/groups/GroupsEditCtrl.js | 5 +-
.../controllers/groups/GroupsListCtrl.js | 18 +-
.../scripts/controllers/users/UsersShowCtrl.js | 8 +-
.../ui/admin-web/app/scripts/i18n.config.js | 1 +
.../ui/admin-web/app/scripts/services/Group.js | 33 +-
.../app/scripts/services/GroupConstants.js | 38 +
.../app/scripts/services/UserConstants.js | 4 +
.../ui/admin-web/app/views/groups/edit.html | 8 +-
.../ui/admin-web/app/views/groups/list.html | 2 +-
.../ui/admin-web/app/views/users/show.html | 8 +-
ambari-agent/conf/unix/ambari-agent.ini | 3 +
ambari-agent/conf/windows/ambari-agent.ini | 2 +
.../src/main/python/ambari_agent/Controller.py | 8 +-
.../ambari_agent/CustomServiceOrchestrator.py | 3 +-
.../src/main/python/ambari_agent/FileCache.py | 10 +
.../src/main/python/ambari_agent/HostCleanup.py | 87 +-
.../src/main/python/ambari_agent/NetUtil.py | 10 +-
.../TestCustomServiceOrchestrator.py | 11 +-
.../test/python/ambari_agent/TestFileCache.py | 12 +
.../test/python/ambari_agent/TestHostCleanup.py | 18 +-
.../libraries/functions/constants.py | 1 +
.../ambari-logsearch-assembly/pom.xml | 2 +
.../src/main/resources/solr | 1464 +++++++++++++++++
.../ambari-logsearch-portal/pom.xml | 6 +
.../org/apache/ambari/logsearch/LogSearch.java | 14 +-
.../ambari/logsearch/conf/AuthPropsConfig.java | 62 +
.../ambari/logsearch/conf/SecurityConfig.java | 56 +-
.../web/filters/LogsearchJWTFilter.java | 181 +++
...LogsearchSecurityContextFormationFilter.java | 3 +-
...rchUsernamePasswordAuthenticationFilter.java | 1 -
.../web/model/JWTAuthenticationToken.java | 53 +
.../src/main/resources/swagger/swagger.html | 119 ++
ambari-metrics/ambari-metrics-assembly/pom.xml | 1 -
ambari-metrics/ambari-metrics-common/pom.xml | 11 +-
.../timeline/AbstractTimelineMetricsSink.java | 20 +-
.../AbstractTimelineMetricSinkTest.java | 98 ++
.../conf/unix/metric_monitor.ini | 2 -
.../src/main/python/core/config_reader.py | 10 +-
.../src/main/python/core/emitter.py | 57 +-
.../src/main/python/core/krberr.py | 42 -
.../main/python/core/spnego_kerberos_auth.py | 163 --
.../src/test/python/core/TestEmitter.py | 26 -
.../metrics/timeline/TimelineMetricStore.java | 2 +-
ambari-server/docs/configuration/index.md | 35 +-
ambari-server/pom.xml | 10 +
ambari-server/sbin/ambari-server | 4 +
ambari-server/src/main/assemblies/server.xml | 8 +
.../server/agent/RecoveryConfigHelper.java | 71 +-
.../stackadvisor/StackAdvisorHelper.java | 18 +-
.../server/configuration/Configuration.java | 179 +-
.../AmbariManagementControllerImpl.java | 49 +-
.../ambari/server/controller/AmbariServer.java | 219 +--
.../server/controller/ControllerModule.java | 73 +-
.../ambari/server/controller/GroupResponse.java | 14 +
.../server/controller/KerberosHelperImpl.java | 6 -
.../internal/GroupResourceProvider.java | 4 +
.../internal/UpgradeResourceProvider.java | 6 +
.../internal/UserPrivilegeResourceProvider.java | 3 +
.../metrics/MetricsCollectorHAClusterState.java | 10 +
.../ambari/server/events/AmbariEvent.java | 7 +-
.../server/events/MaintenanceModeEvent.java | 30 +-
.../ambari/server/hooks/AmbariEventFactory.java | 33 +
.../apache/ambari/server/hooks/HookContext.java | 26 +
.../ambari/server/hooks/HookContextFactory.java | 44 +
.../apache/ambari/server/hooks/HookService.java | 36 +
.../users/PostUserCreationHookContext.java | 55 +
.../server/hooks/users/UserCreatedEvent.java | 45 +
.../server/hooks/users/UserHookParams.java | 49 +
.../server/hooks/users/UserHookService.java | 279 ++++
.../apache/ambari/server/orm/dao/GroupDAO.java | 19 +-
.../ambari/server/orm/dao/ResourceDAO.java | 21 +
.../ambari/server/orm/entities/GroupEntity.java | 18 +
.../server/security/ClientSecurityType.java | 3 +-
.../AmbariPamAuthenticationProvider.java | 252 +++
.../server/security/authorization/Group.java | 6 +
.../security/authorization/GroupType.java | 25 +
.../PamAuthenticationException.java | 36 +
.../server/security/authorization/UserType.java | 3 +-
.../server/security/authorization/Users.java | 173 +-
.../security/unsecured/rest/ConnectionInfo.java | 2 +-
.../serveraction/AbstractServerAction.java | 2 +-
.../server/serveraction/ServerAction.java | 4 +-
.../kerberos/MITKerberosOperationHandler.java | 19 +-
.../users/CollectionPersisterService.java | 46 +
.../CollectionPersisterServiceFactory.java | 24 +
.../users/CsvFilePersisterService.java | 103 ++
.../users/PostUserCreationHookServerAction.java | 163 ++
.../users/ShellCommandCallableFactory.java | 26 +
.../users/ShellCommandUtilityCallable.java | 48 +
.../users/ShellCommandUtilityWrapper.java | 57 +
.../ambari/server/state/ConfigHelper.java | 85 -
.../server/state/DependencyConditionInfo.java | 104 ++
.../ambari/server/state/DependencyInfo.java | 37 +-
.../ambari/server/state/host/HostImpl.java | 2 +-
.../state/stack/upgrade/HostOrderGrouping.java | 2 +-
.../ambari/server/topology/AmbariContext.java | 20 -
.../server/topology/AsyncCallableService.java | 25 +-
.../server/topology/BlueprintValidatorImpl.java | 18 +-
.../server/upgrade/AbstractUpgradeCatalog.java | 3 -
.../server/upgrade/SchemaUpgradeHelper.java | 5 -
.../server/upgrade/UpgradeCatalog150.java | 910 -----------
.../server/upgrade/UpgradeCatalog151.java | 152 --
.../server/upgrade/UpgradeCatalog160.java | 205 ---
.../server/upgrade/UpgradeCatalog161.java | 343 ----
.../server/upgrade/UpgradeCatalog170.java | 1530 ------------------
.../server/upgrade/UpgradeCatalog200.java | 8 -
.../server/upgrade/UpgradeCatalog250.java | 11 +
.../ambari/server/utils/ShellCommandUtil.java | 2 +-
ambari-server/src/main/python/ambari-server.py | 7 +-
.../python/ambari_server/resourceFilesKeeper.py | 12 +-
.../python/ambari_server/serverConfiguration.py | 11 +
.../main/python/ambari_server/serverUtils.py | 2 +-
.../main/python/ambari_server/setupActions.py | 1 +
.../main/python/ambari_server/setupMpacks.py | 10 +-
.../main/python/ambari_server/setupSecurity.py | 53 +-
.../src/main/python/ambari_server/utils.py | 12 +-
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 1 +
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 1 +
.../resources/Ambari-DDL-Postgres-CREATE.sql | 1 +
.../resources/Ambari-DDL-SQLAnywhere-CREATE.sql | 1 +
.../resources/Ambari-DDL-SQLServer-CREATE.sql | 1 +
.../0.1.0/package/scripts/params.py | 2 +-
.../0.1.0/properties/infra-solr-env.sh.j2 | 4 +-
.../AMBARI_METRICS/0.1.0/kerberos.json | 26 -
.../AMBARI_METRICS/0.1.0/package/scripts/ams.py | 13 +-
.../package/scripts/metrics_grafana_util.py | 8 +-
.../0.1.0/package/scripts/params.py | 10 +-
.../0.1.0/package/scripts/split_points.py | 29 +-
.../package/templates/metric_monitor.ini.j2 | 2 -
.../configuration/application-properties.xml | 11 +
.../HBASE/0.96.0.2.0/package/scripts/hbase.py | 46 +-
.../HDFS/2.1.0.2.0/configuration/hadoop-env.xml | 2 +-
.../common-services/HDFS/2.1.0.2.0/metainfo.xml | 43 +-
.../0.12.0.2.0/package/scripts/params_linux.py | 2 -
.../0.12.0.2.0/package/scripts/service_check.py | 10 +-
.../configuration/kafka_client_jaas_conf.xml | 41 +
.../0.8.1/configuration/kafka_jaas_conf.xml | 59 +
.../common-services/KAFKA/0.8.1/metainfo.xml | 2 +
.../KAFKA/0.8.1/package/scripts/kafka.py | 12 +
.../KAFKA/0.8.1/package/scripts/params.py | 2 +
.../logfeeder-custom-logsearch-conf.xml | 46 +
.../configuration/logsearch-properties.xml | 63 +
.../scripts/logsearch_config_aggregator.py | 14 +-
.../LOGSEARCH/0.5.0/package/scripts/params.py | 1 +
.../0.5.0/package/scripts/setup_logfeeder.py | 1 +
.../templates/HadoopServiceConfig.json.j2 | 14 +-
.../LOGSEARCH/0.5.0/themes/theme.json | 87 +-
.../MAHOUT/1.0.0.2.3/metainfo.xml | 2 +-
.../OOZIE/4.0.0.2.0/metainfo.xml | 5 +-
.../OOZIE/4.2.0.2.3/metainfo.xml | 4 +-
.../common-services/PIG/0.12.0.2.0/metainfo.xml | 2 +-
.../0.4.0/package/scripts/setup_ranger_xml.py | 24 +
.../RANGER_KMS/0.5.0.2.3/package/scripts/kms.py | 8 +
.../common-services/TEZ/0.4.0.2.1/metainfo.xml | 2 +-
.../common-services/YARN/2.1.0.2.0/metainfo.xml | 2 +-
.../0.6.0.2.5/package/scripts/master.py | 11 +-
.../src/main/resources/properties.json | 1 +
.../main/resources/scripts/Ambaripreupload.py | 2 +-
.../scripts/post-user-creation-hook.sh | 133 ++
.../HDP/2.0.6/properties/stack_features.json | 5 +
.../stacks/HDP/2.0.6/services/stack_advisor.py | 32 +-
.../services/HDFS/configuration/core-site.xml | 2 +-
.../services/HDFS/configuration/hdfs-site.xml | 2 +-
.../HDP/2.5/upgrades/host-ordered-upgrade.xml | 17 +-
.../configuration/application-properties.xml | 95 ++
.../ATLAS/configuration/atlas-log4j.xml | 118 ++
.../stacks/HDP/2.6/services/ATLAS/metainfo.xml | 10 +-
.../services/ATLAS/themes/theme_version_2.json | 845 ++++++++++
.../HDP/2.6/services/ZEPPELIN/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/stack_advisor.py | 381 ++---
.../services/HDFS/configuration/core-site.xml | 2 +-
.../services/HDFS/configuration/hdfs-site.xml | 2 +-
.../webapp/WEB-INF/spring-security.xml | 1 +
.../stackadvisor/StackAdvisorHelperTest.java | 10 +-
.../server/configuration/ConfigurationTest.java | 6 +-
.../ActiveWidgetLayoutResourceProviderTest.java | 4 +
.../StackUpgradeConfigurationMergeTest.java | 5 +
.../UserAuthorizationResourceProviderTest.java | 4 +
.../internal/UserResourceProviderTest.java | 4 +
.../AlertMaintenanceModeListenerTest.java | 2 +-
.../server/hooks/users/UserHookServiceTest.java | 224 +++
.../AmbariAuthorizationFilterTest.java | 4 +
...uthenticationProviderForDNWithSpaceTest.java | 28 +-
.../AmbariPamAuthenticationProviderTest.java | 97 ++
.../security/authorization/TestUsers.java | 10 +-
.../security/authorization/UsersTest.java | 10 +
.../PostUserCreationHookServerActionTest.java | 182 +++
.../cluster/ClusterEffectiveVersionTest.java | 5 +-
.../server/topology/AmbariContextTest.java | 2 -
.../topology/AsyncCallableServiceTest.java | 59 +-
.../server/topology/BlueprintImplTest.java | 41 -
.../topology/BlueprintValidatorImplTest.java | 75 +-
.../server/upgrade/UpgradeCatalog150Test.java | 258 ---
.../server/upgrade/UpgradeCatalog151Test.java | 120 --
.../server/upgrade/UpgradeCatalog160Test.java | 264 ---
.../server/upgrade/UpgradeCatalog161Test.java | 418 -----
.../server/upgrade/UpgradeCatalog170Test.java | 791 ---------
.../server/upgrade/UpgradeCatalog200Test.java | 2 +-
.../server/upgrade/UpgradeCatalog240Test.java | 35 +
.../server/upgrade/UpgradeCatalog250Test.java | 13 +
.../server/upgrade/UpgradeCatalogTest.java | 85 +-
ambari-server/src/test/python/TestMpacks.py | 13 +-
.../src/test/python/TestResourceFilesKeeper.py | 61 +-
.../stacks/2.0.6/HBASE/test_hbase_client.py | 14 -
.../stacks/2.0.6/HBASE/test_hbase_master.py | 21 -
.../2.0.6/HBASE/test_hbase_regionserver.py | 28 -
.../2.0.6/HBASE/test_phoenix_queryserver.py | 16 -
.../2.0.6/HIVE/test_hive_service_check.py | 43 +-
.../stacks/2.0.6/common/test_stack_advisor.py | 4 +-
.../stacks/2.4/LOGSEARCH/test_logfeeder.py | 3 +
.../stacks/2.5/RANGER/test_ranger_admin.py | 14 +
.../stacks/2.5/RANGER/test_ranger_tagsync.py | 7 +
.../stacks/2.5/RANGER/test_ranger_usersync.py | 7 +
.../stacks/2.5/RANGER_KMS/test_kms_server.py | 14 +
.../stacks/2.6/common/test_stack_advisor.py | 97 ++
ambari-web/app/config.js | 2 +-
.../main/admin/kerberos/step4_controller.js | 27 +-
.../main/admin/kerberos/wizard_controller.js | 15 +
.../alert_definitions_actions_controller.js | 4 +-
.../controllers/main/service/info/configs.js | 2 +-
ambari-web/app/data/HDP2/site_properties.js | 21 +
.../app/mappers/configs/config_groups_mapper.js | 2 +-
ambari-web/app/messages.js | 12 +-
.../main/service/configs/config_overridable.js | 1 +
ambari-web/app/models/configs/config_group.js | 2 +
.../models/configs/service_config_version.js | 2 +-
ambari-web/app/styles/alerts.less | 55 -
ambari-web/app/styles/application.less | 997 +++++-------
ambari-web/app/styles/bootstrap_overrides.less | 14 +
ambari-web/app/styles/common.less | 298 +---
ambari-web/app/styles/config_history_flow.less | 6 -
ambari-web/app/styles/modal_popups.less | 3 -
ambari-web/app/styles/stack_versions.less | 24 +-
.../app/styles/theme/bootstrap-ambari.css | 12 +-
ambari-web/app/styles/widgets.less | 4 +-
.../common/configs/selectCreateConfigGroup.hbs | 4 +-
.../templates/common/host_progress_popup.hbs | 4 +-
.../common/modal_popups/log_tail_popup.hbs | 2 +-
ambari-web/app/templates/login.hbs | 2 +-
.../highAvailability/journalNode/step2.hbs | 2 +-
.../main/alerts/definition_details.hbs | 8 +-
.../main/dashboard/plus_button_filter.hbs | 8 +-
.../main/dashboard/widgets/yarn_links.hbs | 2 +-
ambari-web/app/templates/main/host/summary.hbs | 2 +-
.../manage_configuration_groups_popup.hbs | 8 +-
.../templates/main/service/services/flume.hbs | 4 +-
ambari-web/app/templates/main/views.hbs | 5 +-
ambari-web/app/templates/wizard/step3.hbs | 4 +-
ambari-web/app/views/main/dashboard/widgets.js | 2 +
.../admin/kerberos/step4_controller_test.js | 4 +
.../host_component_recommendation_mixin_test.js | 24 +-
.../host_component_validation_mixin_test.js | 24 +-
contrib/utils/perf/deploy-gce-perf-cluster.py | 360 +++++
docs/pom.xml | 12 +
255 files changed, 8523 insertions(+), 7443 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0a0e9a50/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0a0e9a50/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0a0e9a50/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
index dadeb03,82526e7..68a8d4c
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
@@@ -358,8 -330,6 +358,7 @@@ public class AmbariContextTest
expect(clusterController.ensureResourceProvider(Resource.Type.ConfigGroup)).andReturn(configGroupResourceProvider).once();
//todo: for now not using return value so just returning null
expect(configGroupResourceProvider.createResources(capture(configGroupRequestCapture))).andReturn(null).once();
- configHelper.moveDeprecatedGlobals(stackId, group1Configuration.getFullProperties(1), CLUSTER_NAME);
+
// replay all mocks
replayAll();
[11/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3bbe75c3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3bbe75c3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3bbe75c3
Branch: refs/heads/trunk
Commit: 3bbe75c3bd7b1a913905137465c19b2c7f7c9ba9
Parents: a58c39c 3096c79
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Dec 2 22:40:18 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Dec 2 22:40:18 2016 -0500
----------------------------------------------------------------------
ambari-agent/conf/unix/ambari-agent.ini | 3 +
.../ambari_agent/CustomServiceOrchestrator.py | 120 +++++++++++++++++++
ambari-agent/src/packages/tarball/all.xml | 30 +++++
.../ambari/server/agent/ExecutionCommand.java | 28 +++++
.../ambari/server/agent/HeartBeatHandler.java | 2 +-
.../AmbariManagementControllerImpl.java | 5 +
.../configuration/logsearch-properties.xml | 2 +-
.../HDP/2.6/services/ACCUMULO/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/FALCON/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/FLUME/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/HBASE/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/HDFS/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/HIVE/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/KAFKA/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/KNOX/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/MAHOUT/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/OOZIE/metainfo.xml | 1 +
.../stacks/HDP/2.6/services/PIG/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/RANGER/metainfo.xml | 2 +-
.../HDP/2.6/services/RANGER_KMS/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/SLIDER/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/SPARK/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/SPARK2/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/SQOOP/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/STORM/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/TEZ/metainfo.xml | 2 +-
.../stacks/HDP/2.6/services/YARN/metainfo.xml | 4 +-
.../HDP/2.6/services/ZOOKEEPER/metainfo.xml | 2 +-
.../HAPPY/configuration/happy-alert-config.xml | 5 +
.../HBASE/configuration/hbase-alert-config.xml | 5 +
.../HDFS/configuration/hdfs-alert-config.xml | 5 +
.../configuration/sleepy-alert-config.xml | 5 +
.../SNOW/configuration/snow-alert-config.xml | 5 +
.../YARN/configuration/yarn-alert-config.xml | 5 +
.../ZOOKEEPER/configuration/zk-alert-config.xml | 5 +
.../server/agent/TestHeartbeatHandler.java | 28 +++--
ambari-web/app/messages.js | 1 -
.../configs/objects/service_config_property.js | 7 --
.../widgets/slider_config_widget_view.js | 7 +-
contrib/views/ambari-views-package/pom.xml | 4 +-
40 files changed, 269 insertions(+), 46 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/3bbe75c3/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
[12/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/803f44ba
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/803f44ba
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/803f44ba
Branch: refs/heads/trunk
Commit: 803f44ba5f7411d9421b44dbacaa48f81ce3edc6
Parents: 3bbe75c 8fbc271
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Dec 5 15:21:25 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Dec 5 15:21:25 2016 -0500
----------------------------------------------------------------------
.../libraries/functions/constants.py | 1 +
.../libraries/functions/setup_atlas_hook.py | 47 +-
.../BlueprintConfigurationProcessor.java | 3 +-
.../ambari/server/topology/TopologyManager.java | 19 +-
.../server/upgrade/UpgradeCatalog250.java | 30 +
.../src/main/python/ambari_server_main.py | 8 +-
.../0.5.0.2.1/configuration/falcon-env.xml | 17 +
.../FALCON/0.5.0.2.1/metainfo.xml | 7 +
.../FALCON/0.5.0.2.1/package/scripts/falcon.py | 2 +-
.../0.5.0.2.1/package/scripts/params_linux.py | 5 +-
.../FALCON/0.5.0.2.1/quicklinks/quicklinks.json | 35 +
.../HIVE/0.12.0.2.0/configuration/hive-env.xml | 17 +
.../HIVE/0.12.0.2.0/package/scripts/hcat.py | 2 +-
.../HIVE/0.12.0.2.0/package/scripts/hive.py | 2 +-
.../0.12.0.2.0/package/scripts/params_linux.py | 5 +-
.../HIVE/0.12.0.2.0/package/scripts/webhcat.py | 2 +-
.../RANGER/0.4.0/package/scripts/params.py | 1 +
.../0.4.0/package/scripts/ranger_admin.py | 36 +-
.../SQOOP/1.4.4.2.0/configuration/sqoop-env.xml | 17 +
.../1.4.4.2.0/configuration/sqoop-site.xml | 6 +
.../1.4.4.2.0/package/scripts/params_linux.py | 5 +-
.../SQOOP/1.4.4.2.0/package/scripts/sqoop.py | 2 +-
.../STORM/0.10.0/configuration/storm-env.xml | 17 +
.../STORM/0.9.1/package/scripts/params_linux.py | 6 +-
.../STORM/0.9.1/package/scripts/storm.py | 2 +-
.../scripts/post-user-creation-hook.sh | 7 +
.../HDP/2.0.6/properties/stack_features.json | 5 +
.../services/HIVE/configuration/hive-site.xml | 4 +
.../services/STORM/configuration/storm-site.xml | 4 +
.../stacks/HDP/2.3/services/stack_advisor.py | 57 +-
.../stacks/HDP/2.5/services/stack_advisor.py | 6 -
.../BlueprintConfigurationProcessorTest.java | 11 +
.../server/upgrade/UpgradeCatalog250Test.java | 93 +++
.../stacks/2.3/common/test_stack_advisor.py | 28 +-
.../stacks/2.6/RANGER/test_ranger_admin.py | 504 ++++++++++++
.../2.6/configs/ranger-admin-default.json | 704 +++++++++++++++++
.../2.6/configs/ranger-admin-secured.json | 773 +++++++++++++++++++
ambari-web/app/styles/wizard.less | 6 +-
.../common/configs/notifications_configs.hbs | 2 +-
ambari-web/app/templates/wizard/step1.hbs | 2 +-
.../notification_configs_view.js | 18 +-
.../configs/service_configs_by_category_view.js | 9 +-
ambari-web/app/views/wizard/step1_view.js | 21 +-
.../widgets/slider_config_widget_view_test.js | 23 -
.../test/views/main/dashboard/widgets_test.js | 2 +-
contrib/utils/perf/deploy-gce-perf-cluster.py | 10 +-
.../src/main/resources/view.xml | 23 +-
contrib/views/files/src/main/resources/view.xml | 7 +
.../views/hive-next/src/main/resources/view.xml | 7 +
contrib/views/tez/src/main/resources/view.xml | 7 +
50 files changed, 2502 insertions(+), 125 deletions(-)
----------------------------------------------------------------------
[05/14] ambari git commit: AMBARI-18933 - Remove Unnecessary Locks
Inside Of ConfigGroup Business Object Implementations (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-18933 - Remove Unnecessary Locks Inside Of ConfigGroup Business Object Implementations (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ab1c1001
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ab1c1001
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ab1c1001
Branch: refs/heads/trunk
Commit: ab1c1001688b333817ac20ec5c20b635f566c353
Parents: 0a0e9a5
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Nov 18 12:48:52 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Nov 18 18:36:41 2016 -0500
----------------------------------------------------------------------
.../internal/ConfigGroupResourceProvider.java | 49 +-
.../apache/ambari/server/state/ConfigImpl.java | 14 +-
.../server/state/cluster/ClusterImpl.java | 6 +-
.../server/state/configgroup/ConfigGroup.java | 33 +-
.../state/configgroup/ConfigGroupFactory.java | 34 +-
.../state/configgroup/ConfigGroupImpl.java | 583 +++++++++----------
.../ambari/server/topology/AmbariContext.java | 2 -
.../AmbariManagementControllerTest.java | 2 -
.../ambari/server/state/ConfigGroupTest.java | 18 +-
.../ambari/server/state/ConfigHelperTest.java | 1 -
.../server/state/cluster/ClusterTest.java | 7 -
.../svccomphost/ServiceComponentHostTest.java | 3 -
.../server/topology/AmbariContextTest.java | 1 -
13 files changed, 342 insertions(+), 411 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
index b957f0a..2373068 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
@@ -17,7 +17,16 @@
*/
package org.apache.ambari.server.controller.internal;
-import com.google.inject.Inject;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ClusterNotFoundException;
import org.apache.ambari.server.ConfigGroupNotFoundException;
@@ -49,7 +58,6 @@ import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigFactory;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.configgroup.ConfigGroup;
import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
@@ -57,15 +65,7 @@ import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import com.google.inject.Inject;
@StaticallyInject
public class ConfigGroupResourceProvider extends
@@ -101,7 +101,7 @@ public class ConfigGroupResourceProvider extends
@Inject
private static HostDAO hostDAO;
-
+
/**
* Used for creating {@link Config} instances to return in the REST response.
*/
@@ -575,22 +575,19 @@ public class ConfigGroupResourceProvider extends
}
}
+ configLogger.info("User {} is creating new configuration group {} for tag {} in cluster {}",
+ getManagementController().getAuthName(), request.getGroupName(), request.getTag(),
+ cluster.getClusterName());
+
ConfigGroup configGroup = configGroupFactory.createNew(cluster,
request.getGroupName(),
request.getTag(), request.getDescription(),
request.getConfigs(), hosts);
- verifyConfigs(configGroup.getConfigurations(), cluster.getClusterName());
configGroup.setServiceName(serviceName);
- // Persist before add, since id is auto-generated
- configLogger.info("Persisting new Config group"
- + ", clusterName = " + cluster.getClusterName()
- + ", name = " + configGroup.getName()
- + ", tag = " + configGroup.getTag()
- + ", user = " + getManagementController().getAuthName());
+ verifyConfigs(configGroup.getConfigurations(), cluster.getClusterName());
- configGroup.persist();
cluster.addConfigGroup(configGroup);
if (serviceName != null) {
cluster.createServiceConfigVersion(serviceName, getManagementController().getAuthName(),
@@ -641,6 +638,11 @@ public class ConfigGroupResourceProvider extends
+ ", clusterName = " + request.getClusterName()
+ ", groupId = " + request.getId());
}
+
+ configLogger.info("User {} is updating configuration group {} for tag {} in cluster {}",
+ getManagementController().getAuthName(), request.getGroupName(), request.getTag(),
+ cluster.getClusterName());
+
String serviceName = configGroup.getServiceName();
String requestServiceName = cluster.getServiceForConfigTypes(request.getConfigs().keySet());
if (StringUtils.isEmpty(serviceName) && StringUtils.isEmpty(requestServiceName)) {
@@ -689,13 +691,6 @@ public class ConfigGroupResourceProvider extends
configGroup.setDescription(request.getDescription());
configGroup.setTag(request.getTag());
- configLogger.info("Persisting updated Config group"
- + ", clusterName = " + configGroup.getClusterName()
- + ", id = " + configGroup.getId()
- + ", tag = " + configGroup.getTag()
- + ", user = " + getManagementController().getAuthName());
-
- configGroup.persist();
if (serviceName != null) {
cluster.createServiceConfigVersion(serviceName, getManagementController().getAuthName(),
request.getServiceConfigVersionNote(), configGroup);
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
index e68839f..052ee28 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
@@ -23,12 +23,13 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.concurrent.locks.ReadWriteLock;
import javax.annotation.Nullable;
import org.apache.ambari.server.events.ClusterConfigChangedEvent;
import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
+import org.apache.ambari.server.logging.LockFactory;
import org.apache.ambari.server.orm.dao.ClusterDAO;
import org.apache.ambari.server.orm.dao.ServiceConfigDAO;
import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
@@ -71,7 +72,7 @@ public class ConfigImpl implements Config {
*
* @see #properties
*/
- private final ReentrantReadWriteLock propertyLock = new ReentrantReadWriteLock();
+ private final ReadWriteLock propertyLock;
/**
* The property attributes for this configuration.
@@ -94,7 +95,9 @@ public class ConfigImpl implements Config {
@Assisted("tag") @Nullable String tag,
@Assisted Map<String, String> properties,
@Assisted @Nullable Map<String, Map<String, String>> propertiesAttributes, ClusterDAO clusterDAO,
- Gson gson, AmbariEventPublisher eventPublisher) {
+ Gson gson, AmbariEventPublisher eventPublisher, LockFactory lockFactory) {
+
+ propertyLock = lockFactory.newReadWriteLock("configurationPropertyLock");
this.cluster = cluster;
this.type = type;
@@ -140,7 +143,8 @@ public class ConfigImpl implements Config {
@AssistedInject
ConfigImpl(@Assisted Cluster cluster, @Assisted ClusterConfigEntity entity,
- ClusterDAO clusterDAO, Gson gson, AmbariEventPublisher eventPublisher) {
+ ClusterDAO clusterDAO, Gson gson, AmbariEventPublisher eventPublisher,
+ LockFactory lockFactory) {
this.cluster = cluster;
this.clusterDAO = clusterDAO;
this.gson = gson;
@@ -333,7 +337,7 @@ public class ConfigImpl implements Config {
* Persist the cluster and configuration entities in their own transaction.
*/
@Transactional
- private void persistEntitiesInTransaction(ClusterConfigEntity entity) {
+ void persistEntitiesInTransaction(ClusterConfigEntity entity) {
ClusterEntity clusterEntity = entity.getClusterEntity();
clusterDAO.createConfig(entity);
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8b157c7..6be36dd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -326,8 +326,11 @@ public class ClusterImpl implements Cluster {
loadStackVersion();
loadServices();
loadServiceHostComponents();
- loadConfigGroups();
+
+ // cache configurations before loading configuration groups
cacheConfigurations();
+ loadConfigGroups();
+
loadRequestExecutions();
if (desiredStackVersion != null && !StringUtils.isEmpty(desiredStackVersion.getStackName()) && !
@@ -2568,7 +2571,6 @@ public class ClusterImpl implements Cluster {
}
}
configGroup.setHosts(groupDesiredHosts);
- configGroup.persist();
} else {
throw new IllegalArgumentException("Config group {} doesn't exist");
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java
index 1b29c9b..5a9c574 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java
@@ -18,13 +18,13 @@
package org.apache.ambari.server.state.configgroup;
+import java.util.Map;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.controller.ConfigGroupResponse;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.Host;
-import java.util.Map;
-
/**
* Configuration group or Config group is a type of Ambari resource that
* supports grouping of configuration resources and host resources for a
@@ -80,29 +80,20 @@ public interface ConfigGroup {
public void setDescription(String description);
/**
- * List of hosts to which configs are applied
+ * Gets an unmodifiable list of {@link Host}s.
+ *
* @return
*/
public Map<Long, Host> getHosts();
/**
- * List of @Config objects
+ * Gets an unmodifiable map of {@link Config}s.
+ *
* @return
*/
public Map<String, Config> getConfigurations();
/**
- * Persist the Config group along with the related host and config mapping
- * entities to the persistence store
- */
- void persist();
-
- /**
- * Persist the host mapping entity to the persistence store
- */
- void persistHostMapping();
-
- /**
* Delete config group and the related host and config mapping
* entities from the persistence store
*/
@@ -116,13 +107,6 @@ public interface ConfigGroup {
public void addHost(Host host) throws AmbariException;
/**
- * Add config to the config group
- * @param config
- * @throws AmbariException
- */
- public void addConfiguration(Config config) throws AmbariException;
-
- /**
* Return @ConfigGroupResponse for the config group
*
* @return @ConfigGroupResponse
@@ -131,11 +115,6 @@ public interface ConfigGroup {
public ConfigGroupResponse convertToResponse() throws AmbariException;
/**
- * Refresh Config group and the host and config mappings for the group
- */
- public void refresh();
-
- /**
* Reassign the set of hosts associated with this config group
* @param hosts
*/
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java
index 9abadf3..906d948 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java
@@ -17,22 +17,38 @@
*/
package org.apache.ambari.server.state.configgroup;
-import com.google.inject.assistedinject.Assisted;
+import java.util.Map;
+
import org.apache.ambari.server.orm.entities.ConfigGroupEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.configgroup.ConfigGroup;
-import java.util.Map;
+import com.google.inject.assistedinject.Assisted;
public interface ConfigGroupFactory {
- ConfigGroup createNew(@Assisted("cluster") Cluster cluster,
- @Assisted("name") String name,
- @Assisted("tag") String tag,
- @Assisted("description") String description,
- @Assisted("configs") Map<String, Config> configs,
- @Assisted("hosts") Map<Long, Host> hosts);
+ /**
+ * Creates and saves a new {@link ConfigGroup}.
+ *
+ * @param cluster
+ * @param name
+ * @param tag
+ * @param description
+ * @param configs
+ * @param hosts
+ * @param serviceName
+ * @return
+ */
+ ConfigGroup createNew(@Assisted("cluster") Cluster cluster, @Assisted("name") String name,
+ @Assisted("tag") String tag, @Assisted("description") String description,
+ @Assisted("configs") Map<String, Config> configs, @Assisted("hosts") Map<Long, Host> hosts);
+ /**
+ * Instantiates a {@link ConfigGroup} fron an existing, persisted entity.
+ *
+ * @param cluster
+ * @param entity
+ * @return
+ */
ConfigGroup createExisting(Cluster cluster, ConfigGroupEntity entity);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
index 9a2fc88..fe1f338 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
@@ -17,19 +17,22 @@
*/
package org.apache.ambari.server.state.configgroup;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.DuplicateResourceException;
import org.apache.ambari.server.controller.ConfigGroupResponse;
import org.apache.ambari.server.controller.internal.ConfigurationResourceProvider;
+import org.apache.ambari.server.logging.LockFactory;
import org.apache.ambari.server.orm.dao.ClusterDAO;
import org.apache.ambari.server.orm.dao.ConfigGroupConfigMappingDAO;
import org.apache.ambari.server.orm.dao.ConfigGroupDAO;
@@ -50,212 +53,190 @@ import org.apache.ambari.server.state.Host;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.gson.Gson;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import com.google.inject.persist.Transactional;
public class ConfigGroupImpl implements ConfigGroup {
private static final Logger LOG = LoggerFactory.getLogger(ConfigGroupImpl.class);
- private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private Cluster cluster;
- private ConfigGroupEntity configGroupEntity;
- private Map<Long, Host> hosts;
- private Map<String, Config> configurations;
- private volatile boolean isPersisted = false;
-
- @Inject
- private Gson gson;
- @Inject
- private ConfigGroupDAO configGroupDAO;
- @Inject
- private ConfigGroupConfigMappingDAO configGroupConfigMappingDAO;
- @Inject
- private ConfigGroupHostMappingDAO configGroupHostMappingDAO;
- @Inject
- private HostDAO hostDAO;
- @Inject
- private ClusterDAO clusterDAO;
- @Inject
- private Clusters clusters;
-
- @Inject
- private ConfigFactory configFactory;
+ private ConcurrentMap<Long, Host> m_hosts;
+ private ConcurrentMap<String, Config> m_configurations;
+ private String configGroupName;
+ private long configGroupId;
+
+ /**
+ * This lock is required to prevent inconsistencies in internal state between
+ * {@link #m_hosts} and the entities stored by the {@link ConfigGroupEntity}.
+ */
+ private final ReadWriteLock hostLock;
+
+ /**
+ * A label for {@link #hostLock} to use with the {@link LockFactory}.
+ */
+ private static final String hostLockLabel = "configurationGroupHostLock";
+
+ private final ConfigGroupDAO configGroupDAO;
+
+ private final ConfigGroupConfigMappingDAO configGroupConfigMappingDAO;
+
+ private final ConfigGroupHostMappingDAO configGroupHostMappingDAO;
+
+ private final HostDAO hostDAO;
+
+ private final ClusterDAO clusterDAO;
+
+ private final ConfigFactory configFactory;
@AssistedInject
- public ConfigGroupImpl(@Assisted("cluster") Cluster cluster,
- @Assisted("name") String name,
- @Assisted("tag") String tag,
- @Assisted("description") String description,
- @Assisted("configs") Map<String, Config> configs,
- @Assisted("hosts") Map<Long, Host> hosts,
- Injector injector) {
- injector.injectMembers(this);
+ public ConfigGroupImpl(@Assisted("cluster") Cluster cluster, @Assisted("name") String name,
+ @Assisted("tag") String tag, @Assisted("description") String description,
+ @Assisted("configs") Map<String, Config> configurations,
+ @Assisted("hosts") Map<Long, Host> hosts, Clusters clusters, ConfigFactory configFactory,
+ ClusterDAO clusterDAO, HostDAO hostDAO, ConfigGroupDAO configGroupDAO,
+ ConfigGroupConfigMappingDAO configGroupConfigMappingDAO,
+ ConfigGroupHostMappingDAO configGroupHostMappingDAO, LockFactory lockFactory) {
+
+ this.configFactory = configFactory;
+ this.clusterDAO = clusterDAO;
+ this.hostDAO = hostDAO;
+ this.configGroupDAO = configGroupDAO;
+ this.configGroupConfigMappingDAO = configGroupConfigMappingDAO;
+ this.configGroupHostMappingDAO = configGroupHostMappingDAO;
+
+ hostLock = lockFactory.newReadWriteLock(hostLockLabel);
+
this.cluster = cluster;
+ configGroupName = name;
- configGroupEntity = new ConfigGroupEntity();
+ ConfigGroupEntity configGroupEntity = new ConfigGroupEntity();
configGroupEntity.setClusterId(cluster.getClusterId());
configGroupEntity.setGroupName(name);
configGroupEntity.setTag(tag);
configGroupEntity.setDescription(description);
- if (hosts != null) {
- this.hosts = hosts;
- } else {
- this.hosts = new HashMap<Long, Host>();
- }
+ m_hosts = hosts == null ? new ConcurrentHashMap<Long, Host>()
+ : new ConcurrentHashMap<>(hosts);
- if (configs != null) {
- configurations = configs;
- } else {
- configurations = new HashMap<String, Config>();
- }
+ m_configurations = configurations == null ? new ConcurrentHashMap<String, Config>()
+ : new ConcurrentHashMap<>(configurations);
+
+ // save the entity and grab the ID
+ persist(configGroupEntity);
+ configGroupId = configGroupEntity.getGroupId();
}
@AssistedInject
- public ConfigGroupImpl(@Assisted Cluster cluster,
- @Assisted ConfigGroupEntity configGroupEntity,
- Injector injector) {
- injector.injectMembers(this);
+ public ConfigGroupImpl(@Assisted Cluster cluster, @Assisted ConfigGroupEntity configGroupEntity,
+ Clusters clusters, ConfigFactory configFactory,
+ ClusterDAO clusterDAO, HostDAO hostDAO, ConfigGroupDAO configGroupDAO,
+ ConfigGroupConfigMappingDAO configGroupConfigMappingDAO,
+ ConfigGroupHostMappingDAO configGroupHostMappingDAO, LockFactory lockFactory) {
+
+ this.configFactory = configFactory;
+ this.clusterDAO = clusterDAO;
+ this.hostDAO = hostDAO;
+ this.configGroupDAO = configGroupDAO;
+ this.configGroupConfigMappingDAO = configGroupConfigMappingDAO;
+ this.configGroupHostMappingDAO = configGroupHostMappingDAO;
+
+ hostLock = lockFactory.newReadWriteLock(hostLockLabel);
+
this.cluster = cluster;
+ configGroupId = configGroupEntity.getGroupId();
+ configGroupName = configGroupEntity.getGroupName();
- this.configGroupEntity = configGroupEntity;
- configurations = new HashMap<String, Config>();
- hosts = new HashMap<Long, Host>();
+ m_configurations = new ConcurrentHashMap<String, Config>();
+ m_hosts = new ConcurrentHashMap<Long, Host>();
// Populate configs
- for (ConfigGroupConfigMappingEntity configMappingEntity : configGroupEntity
- .getConfigGroupConfigMappingEntities()) {
-
+ for (ConfigGroupConfigMappingEntity configMappingEntity : configGroupEntity.getConfigGroupConfigMappingEntities()) {
Config config = cluster.getConfig(configMappingEntity.getConfigType(),
configMappingEntity.getVersionTag());
if (config != null) {
- configurations.put(config.getType(), config);
+ m_configurations.put(config.getType(), config);
} else {
- LOG.warn("Unable to find config mapping for config group"
- + ", clusterName = " + cluster.getClusterName()
- + ", type = " + configMappingEntity.getConfigType()
- + ", tag = " + configMappingEntity.getVersionTag());
+ LOG.warn("Unable to find config mapping {}/{} for config group in cluster {}",
+ configMappingEntity.getConfigType(), configMappingEntity.getVersionTag(),
+ cluster.getClusterName());
}
}
// Populate Hosts
- for (ConfigGroupHostMappingEntity hostMappingEntity : configGroupEntity
- .getConfigGroupHostMappingEntities()) {
-
+ for (ConfigGroupHostMappingEntity hostMappingEntity : configGroupEntity.getConfigGroupHostMappingEntities()) {
try {
Host host = clusters.getHost(hostMappingEntity.getHostname());
HostEntity hostEntity = hostMappingEntity.getHostEntity();
if (host != null && hostEntity != null) {
- hosts.put(hostEntity.getHostId(), host);
+ m_hosts.put(hostEntity.getHostId(), host);
}
} catch (AmbariException e) {
- String msg = "Host seems to be deleted but Config group mapping still " +
- "exists !";
- LOG.warn(msg);
- LOG.debug(msg, e);
+ LOG.warn("Host seems to be deleted but Config group mapping still exists !");
+ LOG.debug("Host seems to be deleted but Config group mapping still exists !", e);
}
}
-
- isPersisted = true;
}
@Override
public Long getId() {
- return configGroupEntity.getGroupId();
+ return configGroupId;
}
@Override
public String getName() {
- readWriteLock.readLock().lock();
- try {
- return configGroupEntity.getGroupName();
- } finally {
- readWriteLock.readLock().unlock();
- }
+ return configGroupName;
}
@Override
public void setName(String name) {
- readWriteLock.writeLock().lock();
- try {
- configGroupEntity.setGroupName(name);
- } finally {
- readWriteLock.writeLock().unlock();
- }
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ configGroupEntity.setGroupName(name);
+ configGroupDAO.merge(configGroupEntity);
+ configGroupName = name;
}
@Override
public String getClusterName() {
- return configGroupEntity.getClusterEntity().getClusterName();
+ return cluster.getClusterName();
}
@Override
public String getTag() {
- readWriteLock.readLock().lock();
- try {
- return configGroupEntity.getTag();
- } finally {
- readWriteLock.readLock().unlock();
- }
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ return configGroupEntity.getTag();
}
@Override
public void setTag(String tag) {
- readWriteLock.writeLock().lock();
- try {
- configGroupEntity.setTag(tag);
- } finally {
- readWriteLock.writeLock().unlock();
- }
-
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ configGroupEntity.setTag(tag);
+ configGroupDAO.merge(configGroupEntity);
}
@Override
public String getDescription() {
- readWriteLock.readLock().lock();
- try {
- return configGroupEntity.getDescription();
- } finally {
- readWriteLock.readLock().unlock();
- }
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ return configGroupEntity.getDescription();
}
@Override
public void setDescription(String description) {
- readWriteLock.writeLock().lock();
- try {
- configGroupEntity.setDescription(description);
- } finally {
- readWriteLock.writeLock().unlock();
- }
-
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ configGroupEntity.setDescription(description);
+ configGroupDAO.merge(configGroupEntity);
}
@Override
public Map<Long, Host> getHosts() {
- readWriteLock.readLock().lock();
- try {
- return Collections.unmodifiableMap(hosts);
- } finally {
- readWriteLock.readLock().unlock();
- }
+ return Collections.unmodifiableMap(m_hosts);
}
@Override
public Map<String, Config> getConfigurations() {
- readWriteLock.readLock().lock();
- try {
- return Collections.unmodifiableMap(configurations);
- } finally {
- readWriteLock.readLock().unlock();
- }
-
+ return Collections.unmodifiableMap(m_configurations);
}
/**
@@ -264,13 +245,14 @@ public class ConfigGroupImpl implements ConfigGroup {
*/
@Override
public void setHosts(Map<Long, Host> hosts) {
- readWriteLock.writeLock().lock();
+ hostLock.writeLock().lock();
try {
- this.hosts = hosts;
+ // persist enitites in a transaction first, then update internal state
+ replaceHostMappings(hosts);
+ m_hosts = new ConcurrentHashMap<>(hosts);
} finally {
- readWriteLock.writeLock().unlock();
+ hostLock.writeLock().unlock();
}
-
}
/**
@@ -278,115 +260,140 @@ public class ConfigGroupImpl implements ConfigGroup {
* @param configs
*/
@Override
- public void setConfigurations(Map<String, Config> configs) {
- readWriteLock.writeLock().lock();
- try {
- configurations = configs;
- } finally {
- readWriteLock.writeLock().unlock();
- }
-
+ public void setConfigurations(Map<String, Config> configurations) {
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ ClusterEntity clusterEntity = configGroupEntity.getClusterEntity();
+
+ // only update the internal state after the configurations have been
+ // persisted
+ persistConfigMapping(clusterEntity, configGroupEntity, configurations);
+ m_configurations = new ConcurrentHashMap<>(configurations);
}
@Override
- @Transactional
public void removeHost(Long hostId) throws AmbariException {
- readWriteLock.writeLock().lock();
+ hostLock.writeLock().lock();
try {
- if (hosts.containsKey(hostId)) {
- String hostName = hosts.get(hostId).getHostName();
- LOG.info("Removing host from config group, hostid = " + hostId + ", hostname = " + hostName);
- hosts.remove(hostId);
- try {
- ConfigGroupHostMappingEntityPK hostMappingEntityPK = new
- ConfigGroupHostMappingEntityPK();
- hostMappingEntityPK.setHostId(hostId);
- hostMappingEntityPK.setConfigGroupId(configGroupEntity.getGroupId());
- configGroupHostMappingDAO.removeByPK(hostMappingEntityPK);
- } catch (Exception e) {
- LOG.error("Failed to delete config group host mapping"
- + ", clusterName = " + getClusterName()
- + ", id = " + getId()
- + ", hostid = " + hostId
- + ", hostname = " + hostName, e);
- throw new AmbariException(e.getMessage());
- }
+ Host host = m_hosts.get(hostId);
+ if (null == host) {
+ return;
}
- } finally {
- readWriteLock.writeLock().unlock();
- }
- }
- @Override
- public void persist() {
- readWriteLock.writeLock().lock();
- try {
- if (!isPersisted) {
- persistEntities();
- refresh();
- cluster.refresh();
- isPersisted = true;
- } else {
- saveIfPersisted();
+ String hostName = host.getHostName();
+ LOG.info("Removing host (id={}, name={}) from config group", host.getHostId(), hostName);
+
+ try {
+ // remove the entities first, then update internal state
+ removeConfigGroupHostEntity(host);
+ m_hosts.remove(hostId);
+ } catch (Exception e) {
+ LOG.error("Failed to delete config group host mapping for cluster {} and host {}",
+ cluster.getClusterName(), hostName, e);
+
+ throw new AmbariException(e.getMessage());
}
} finally {
- readWriteLock.writeLock().unlock();
+ hostLock.writeLock().unlock();
}
}
/**
+ * Removes the {@link ConfigGroupHostMappingEntity} for the specified host
+ * from this configuration group.
+ *
+ * @param host
+ * the host to remove.
+ */
+ @Transactional
+ void removeConfigGroupHostEntity(Host host) {
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ ConfigGroupHostMappingEntityPK hostMappingEntityPK = new ConfigGroupHostMappingEntityPK();
+ hostMappingEntityPK.setHostId(host.getHostId());
+ hostMappingEntityPK.setConfigGroupId(configGroupId);
+
+ ConfigGroupHostMappingEntity configGroupHostMapping = configGroupHostMappingDAO.findByPK(
+ hostMappingEntityPK);
+
+ configGroupHostMappingDAO.remove(configGroupHostMapping);
+
+ configGroupEntity.getConfigGroupHostMappingEntities().remove(configGroupHostMapping);
+ configGroupEntity = configGroupDAO.merge(getConfigGroupEntity());
+ }
+
+ /**
+ * @param configGroupEntity
+ */
+ private void persist(ConfigGroupEntity configGroupEntity) {
+ persistEntities(configGroupEntity);
+ cluster.refresh();
+ }
+
+ /**
* Persist Config group with host mapping and configurations
*
* @throws Exception
*/
@Transactional
- void persistEntities() {
+ void persistEntities(ConfigGroupEntity configGroupEntity) {
ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId());
configGroupEntity.setClusterEntity(clusterEntity);
configGroupEntity.setTimestamp(System.currentTimeMillis());
configGroupDAO.create(configGroupEntity);
- persistConfigMapping(clusterEntity);
- persistHostMapping();
- }
+ configGroupId = configGroupEntity.getGroupId();
- // TODO: Test rollback scenario
+ persistConfigMapping(clusterEntity, configGroupEntity, m_configurations);
+ replaceHostMappings(m_hosts);
+ }
/**
- * Persist host mapping
+ * Replaces all existing host mappings with the new collection of hosts.
*
+ * @param the
+ * new hosts
* @throws Exception
*/
- @Override
@Transactional
- public void persistHostMapping() {
- if (isPersisted) {
- // Delete existing mappings and create new ones
- configGroupHostMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
- configGroupEntity.setConfigGroupHostMappingEntities(new HashSet<ConfigGroupHostMappingEntity>());
- }
+ void replaceHostMappings(Map<Long, Host> hosts) {
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+
+ // Delete existing mappings and create new ones
+ configGroupHostMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
+ configGroupEntity.setConfigGroupHostMappingEntities(
+ new HashSet<ConfigGroupHostMappingEntity>());
if (hosts != null && !hosts.isEmpty()) {
- for (Host host : hosts.values()) {
- HostEntity hostEntity = hostDAO.findById(host.getHostId());
- if (hostEntity != null) {
- ConfigGroupHostMappingEntity hostMappingEntity = new
- ConfigGroupHostMappingEntity();
- hostMappingEntity.setHostId(hostEntity.getHostId());
- hostMappingEntity.setHostEntity(hostEntity);
- hostMappingEntity.setConfigGroupEntity(configGroupEntity);
- hostMappingEntity.setConfigGroupId(configGroupEntity.getGroupId());
- configGroupEntity.getConfigGroupHostMappingEntities().add
- (hostMappingEntity);
- configGroupHostMappingDAO.create(hostMappingEntity);
- } else {
- LOG.warn("Host seems to be deleted, cannot create host to config " +
- "group mapping, host = " + host.getHostName());
- }
+ configGroupEntity = persistHostMapping(hosts.values(), configGroupEntity);
+ }
+ }
+
+ /**
+ * Adds the collection of hosts to the configuration group.
+ *
+ * @param hostEntity
+ * @param configGroupEntity
+ */
+ @Transactional
+ ConfigGroupEntity persistHostMapping(Collection<Host> hosts,
+ ConfigGroupEntity configGroupEntity) {
+ for (Host host : hosts) {
+ HostEntity hostEntity = hostDAO.findById(host.getHostId());
+ if (hostEntity != null) {
+ ConfigGroupHostMappingEntity hostMappingEntity = new ConfigGroupHostMappingEntity();
+ hostMappingEntity.setHostId(hostEntity.getHostId());
+ hostMappingEntity.setHostEntity(hostEntity);
+ hostMappingEntity.setConfigGroupEntity(configGroupEntity);
+ hostMappingEntity.setConfigGroupId(configGroupEntity.getGroupId());
+ configGroupEntity.getConfigGroupHostMappingEntities().add(hostMappingEntity);
+ configGroupHostMappingDAO.create(hostMappingEntity);
+ } else {
+ LOG.warn(
+ "The host {} has been removed from the cluster and cannot be added to the configuration group {}",
+ host.getHostName(), configGroupName);
}
}
- // TODO: Make sure this does not throw Nullpointer based on JPA docs
- configGroupEntity = configGroupDAO.merge(configGroupEntity);
+
+ return configGroupDAO.merge(configGroupEntity);
}
/**
@@ -396,11 +403,11 @@ public class ConfigGroupImpl implements ConfigGroup {
* @throws Exception
*/
@Transactional
- void persistConfigMapping(ClusterEntity clusterEntity) {
- if (isPersisted) {
- configGroupConfigMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
- configGroupEntity.setConfigGroupConfigMappingEntities(new HashSet<ConfigGroupConfigMappingEntity>());
- }
+ void persistConfigMapping(ClusterEntity clusterEntity,
+ ConfigGroupEntity configGroupEntity, Map<String, Config> configurations) {
+ configGroupConfigMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
+ configGroupEntity.setConfigGroupConfigMappingEntities(
+ new HashSet<ConfigGroupConfigMappingEntity>());
if (configurations != null && !configurations.isEmpty()) {
for (Entry<String, Config> entry : configurations.entrySet()) {
@@ -437,142 +444,84 @@ public class ConfigGroupImpl implements ConfigGroup {
}
}
- void saveIfPersisted() {
- if (isPersisted) {
- save(clusterDAO.findById(cluster.getClusterId()));
- }
- }
-
- @Transactional
- void save(ClusterEntity clusterEntity) {
- persistHostMapping();
- persistConfigMapping(clusterEntity);
- }
-
@Override
+ @Transactional
public void delete() {
- readWriteLock.writeLock().lock();
- try {
- configGroupConfigMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
- configGroupHostMappingDAO.removeAllByGroup(configGroupEntity.getGroupId());
- configGroupDAO.removeByPK(configGroupEntity.getGroupId());
- cluster.refresh();
- isPersisted = false;
- } finally {
- readWriteLock.writeLock().unlock();
- }
+ configGroupConfigMappingDAO.removeAllByGroup(configGroupId);
+ configGroupHostMappingDAO.removeAllByGroup(configGroupId);
+ configGroupDAO.removeByPK(configGroupId);
+ cluster.refresh();
}
@Override
public void addHost(Host host) throws AmbariException {
- readWriteLock.writeLock().lock();
+ hostLock.writeLock().lock();
try {
- if (hosts != null && !hosts.isEmpty()) {
- for (Host h : hosts.values()) {
- if (h.getHostName().equals(host.getHostName())) {
- throw new DuplicateResourceException("Host " + h.getHostName() +
- "is already associated with Config Group " +
- configGroupEntity.getGroupName());
- }
- }
- HostEntity hostEntity = hostDAO.findByName(host.getHostName());
- if (hostEntity != null) {
- hosts.put(hostEntity.getHostId(), host);
- }
- }
- } finally {
- readWriteLock.writeLock().unlock();
- }
- }
+ if (m_hosts.containsKey(host.getHostId())) {
+ String message = String.format(
+ "Host %s is already associated with the configuration group %s", host.getHostName(),
+ configGroupName);
- @Override
- public void addConfiguration(Config config) throws AmbariException {
- readWriteLock.writeLock().lock();
- try {
- if (configurations != null && !configurations.isEmpty()) {
- for (Config c : configurations.values()) {
- if (c.getType().equals(config.getType()) && c.getTag().equals
- (config.getTag())) {
- throw new DuplicateResourceException("Config " + config.getType() +
- " with tag " + config.getTag() + " is already associated " +
- "with Config Group " + configGroupEntity.getGroupName());
- }
- }
- configurations.put(config.getType(), config);
+ throw new DuplicateResourceException(message);
}
+
+ // ensure that we only update the in-memory structure if the merge was
+ // successful
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ persistHostMapping(Collections.singletonList(host), configGroupEntity);
+ m_hosts.putIfAbsent(host.getHostId(), host);
} finally {
- readWriteLock.writeLock().unlock();
+ hostLock.writeLock().unlock();
}
}
@Override
public ConfigGroupResponse convertToResponse() throws AmbariException {
- readWriteLock.readLock().lock();
- try {
- Set<Map<String, Object>> hostnames = new HashSet<Map<String, Object>>();
- for (Host host : hosts.values()) {
- Map<String, Object> hostMap = new HashMap<String, Object>();
- hostMap.put("host_name", host.getHostName());
- hostnames.add(hostMap);
- }
+ Set<Map<String, Object>> hostnames = new HashSet<Map<String, Object>>();
+ for (Host host : m_hosts.values()) {
+ Map<String, Object> hostMap = new HashMap<String, Object>();
+ hostMap.put("host_name", host.getHostName());
+ hostnames.add(hostMap);
+ }
- Set<Map<String, Object>> configObjMap = new HashSet<Map<String, Object>>();
+ Set<Map<String, Object>> configObjMap = new HashSet<Map<String, Object>>();
- for (Config config : configurations.values()) {
- Map<String, Object> configMap = new HashMap<String, Object>();
- configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TYPE_PROPERTY_ID,
- config.getType());
- configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TAG_PROPERTY_ID,
- config.getTag());
- configObjMap.add(configMap);
- }
-
- ConfigGroupResponse configGroupResponse = new ConfigGroupResponse(
- configGroupEntity.getGroupId(), cluster.getClusterName(),
- configGroupEntity.getGroupName(), configGroupEntity.getTag(),
- configGroupEntity.getDescription(), hostnames, configObjMap);
- return configGroupResponse;
- } finally {
- readWriteLock.readLock().unlock();
+ for (Config config : m_configurations.values()) {
+ Map<String, Object> configMap = new HashMap<String, Object>();
+ configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TYPE_PROPERTY_ID,
+ config.getType());
+ configMap.put(ConfigurationResourceProvider.CONFIGURATION_CONFIG_TAG_PROPERTY_ID,
+ config.getTag());
+ configObjMap.add(configMap);
}
- }
- @Override
- @Transactional
- public void refresh() {
- readWriteLock.writeLock().lock();
- try {
- if (isPersisted) {
- ConfigGroupEntity groupEntity = configGroupDAO.findById
- (configGroupEntity.getGroupId());
- configGroupDAO.refresh(groupEntity);
- // TODO What other entities should refresh?
- }
- } finally {
- readWriteLock.writeLock().unlock();
- }
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ ConfigGroupResponse configGroupResponse = new ConfigGroupResponse(
+ configGroupEntity.getGroupId(), cluster.getClusterName(),
+ configGroupEntity.getGroupName(), configGroupEntity.getTag(),
+ configGroupEntity.getDescription(), hostnames, configObjMap);
+ return configGroupResponse;
}
-
@Override
public String getServiceName() {
- readWriteLock.readLock().lock();
- try {
- return configGroupEntity.getServiceName();
- } finally {
- readWriteLock.readLock().unlock();
- }
-
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ return configGroupEntity.getServiceName();
}
@Override
public void setServiceName(String serviceName) {
- readWriteLock.writeLock().lock();
- try {
- configGroupEntity.setServiceName(serviceName);
- } finally {
- readWriteLock.writeLock().unlock();
- }
+ ConfigGroupEntity configGroupEntity = getConfigGroupEntity();
+ configGroupEntity.setServiceName(serviceName);
+ configGroupDAO.merge(configGroupEntity);
+ }
+ /**
+ * Gets the {@link ConfigGroupEntity} by it's ID from the JPA cache.
+ *
+ * @return the entity.
+ */
+ private ConfigGroupEntity getConfigGroupEntity() {
+ return configGroupDAO.findById(configGroupId);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
index ed43ee1..5e887d4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
@@ -71,7 +71,6 @@ import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.SecurityType;
-import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.configgroup.ConfigGroup;
import org.apache.ambari.server.utils.RetryHelper;
import org.slf4j.Logger;
@@ -558,7 +557,6 @@ public class AmbariContext {
addedHost = true;
if (! group.getHosts().containsKey(host.getHostId())) {
group.addHost(host);
- group.persistHostMapping();
}
} catch (AmbariException e) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 098efa9..8a158bd 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -409,7 +409,6 @@ public class AmbariManagementControllerTest {
ConfigGroup configGroup = configGroupFactory.createNew(cluster, name,
tag, "", configMap, hostMap);
- configGroup.persist();
cluster.addConfigGroup(configGroup);
return configGroup.getId();
@@ -7011,7 +7010,6 @@ public class AmbariManagementControllerTest {
ConfigGroup configGroup = cluster.getConfigGroups().get(groupId);
configGroup.setHosts(new HashMap<Long, Host>() {{ put(3L,
clusters.getHost(host3)); }});
- configGroup.persist();
requestId = startService(cluster1, serviceName2, false, false);
mapredInstall = null;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
index 75853db..f55bf62 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
@@ -102,7 +102,6 @@ public class ConfigGroupTest {
ConfigGroup configGroup = configGroupFactory.createNew(cluster, "cg-test",
"HDFS", "New HDFS configs for h1", configs, hosts);
- configGroup.persist();
cluster.addConfigGroup(configGroup);
return configGroup;
}
@@ -155,23 +154,26 @@ public class ConfigGroupTest {
propertiesAttributes.put("final", attributes);
Config config = configFactory.createNew(cluster, "test-site", "version100", properties, propertiesAttributes);
- configGroup.addConfiguration(config);
+ Map<String, Config> newConfigurations = new HashMap<>(configGroup.getConfigurations());
+ newConfigurations.put(config.getType(), config);
+
+ configGroup.setConfigurations(newConfigurations);
Assert.assertEquals(2, configGroup.getConfigurations().values().size());
+ // re-request it and verify that the config was added
+ configGroupEntity = configGroupDAO.findById(configGroup.getId());
+ Assert.assertEquals(2, configGroupEntity.getConfigGroupConfigMappingEntities().size());
+
configGroup.setName("NewName");
configGroup.setDescription("NewDesc");
configGroup.setTag("NewTag");
// Save
- configGroup.persist();
- configGroup.refresh();
configGroupEntity = configGroupDAO.findByName("NewName");
Assert.assertNotNull(configGroupEntity);
- Assert.assertEquals(2, configGroupEntity
- .getConfigGroupHostMappingEntities().size());
- Assert.assertEquals(2, configGroupEntity
- .getConfigGroupConfigMappingEntities().size());
+ Assert.assertEquals(2, configGroupEntity.getConfigGroupHostMappingEntities().size());
+ Assert.assertEquals(2, configGroupEntity.getConfigGroupConfigMappingEntities().size());
Assert.assertEquals("NewTag", configGroupEntity.getTag());
Assert.assertEquals("NewDesc", configGroupEntity.getDescription());
Assert.assertNotNull(cluster.getConfig("test-site", "version100"));
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index a3a7e11..526e462 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -252,7 +252,6 @@ public class ConfigHelperTest {
LOG.info("Config group created with tag " + tag);
configGroup.setTag(tag);
- configGroup.persist();
cluster.addConfigGroup(configGroup);
return configGroup.getId();
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 69cfc9f..fc3646a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -1332,8 +1332,6 @@ public class ClusterTest {
configGroupFactory.createNew(c1, "test group", "HDFS", "descr", Collections.singletonMap("hdfs-site", config2),
Collections.<Long, Host>emptyMap());
- configGroup.persist();
-
c1.addConfigGroup(configGroup);
scvResponse = c1.createServiceConfigVersion("HDFS", "admin", "test note", configGroup);
@@ -1349,7 +1347,6 @@ public class ClusterTest {
configGroup.setConfigurations(Collections.singletonMap("hdfs-site", config3));
- configGroup.persist();
scvResponse = c1.createServiceConfigVersion("HDFS", "admin", "test note", configGroup);
assertEquals("SCV 3 should be created", Long.valueOf(3), scvResponse.getVersion());
@@ -1388,7 +1385,6 @@ public class ClusterTest {
new HashMap<>(Collections.singletonMap("hdfs-site", config4)),
Collections.<Long, Host>emptyMap());
- configGroup2.persist();
c1.addConfigGroup(configGroup2);
scvResponse = c1.createServiceConfigVersion("HDFS", "admin", "test note", configGroup2);
@@ -1421,7 +1417,6 @@ public class ClusterTest {
ImmutableMap.of("p1", "v2"), ImmutableMap.<String, Map<String,String>>of());
ConfigGroup configGroup = configGroupFactory.createNew(c1, "configGroup1", "version1", "test description", ImmutableMap.of(hdfsSiteConfigV2.getType(), hdfsSiteConfigV2), ImmutableMap.<Long, Host>of());
- configGroup.persist();
c1.addConfigGroup(configGroup);
ServiceConfigVersionResponse hdfsSiteConfigResponseV2 = c1.createServiceConfigVersion("HDFS", "admin", "test note", configGroup);
@@ -1481,7 +1476,6 @@ public class ClusterTest {
ImmutableMap.of("p1", "v2"), ImmutableMap.<String, Map<String,String>>of());
ConfigGroup configGroup = configGroupFactory.createNew(c1, "configGroup1", "version1", "test description", ImmutableMap.of(hdfsSiteConfigV2.getType(), hdfsSiteConfigV2), ImmutableMap.<Long, Host>of());
- configGroup.persist();
c1.addConfigGroup(configGroup);
ServiceConfigVersionResponse hdfsSiteConfigResponseV2 = c1.createServiceConfigVersion("HDFS", "admin", "test note", configGroup);
@@ -2341,7 +2335,6 @@ public class ClusterTest {
}
}, Collections.<Long, Host> emptyMap());
- configGroup.persist();
cluster.addConfigGroup(configGroup);
clusterEntity = clusterDAO.findByName("c1");
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
index 8db5190..6a0457f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
@@ -561,7 +561,6 @@ public class ServiceComponentHostTest {
final ConfigGroup configGroup = configGroupFactory.createNew(cluster,
"cg1", "t1", "", new HashMap<String, Config>(), new HashMap<Long, Host>());
- configGroup.persist();
cluster.addConfigGroup(configGroup);
Map<String, Map<String,String>> actual =
@@ -822,7 +821,6 @@ public class ServiceComponentHostTest {
ConfigGroup configGroup = configGroupFactory.createNew(cluster, "g1",
"t1", "", new HashMap<String, Config>() {{ put("hdfs-site", c); }},
new HashMap<Long, Host>() {{ put(hostEntity.getHostId(), host); }});
- configGroup.persist();
cluster.addConfigGroup(configGroup);
// HDP-x/HDFS/hdfs-site updated host to changed property
@@ -875,7 +873,6 @@ public class ServiceComponentHostTest {
configGroup = configGroupFactory.createNew(cluster, "g2",
"t2", "", new HashMap<String, Config>() {{ put("core-site", c1); }},
new HashMap<Long, Host>() {{ put(hostEntity.getHostId(), host); }});
- configGroup.persist();
cluster.addConfigGroup(configGroup);
Assert.assertTrue(sch1.convertToResponse(null).isStaleConfig());
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab1c1001/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
index 68a8d4c..fac5185 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java
@@ -445,7 +445,6 @@ public class AmbariContextTest {
expect(configGroup1.getHosts()).andReturn(Collections.singletonMap(2L, host2)).once();
configGroup1.addHost(host1);
- configGroup1.persistHostMapping();
// replay all mocks
replayAll();
[02/14] ambari git commit: AMBARI-18906 - Remove Unnecessary Locks
Inside Of Config Business Object Implementations (jonathanhurley)
Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 0fdaa46..098efa9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -122,7 +122,6 @@ import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostComponentAdminState;
import org.apache.ambari.server.state.HostState;
@@ -1933,10 +1932,8 @@ public class AmbariManagementControllerTest {
Map<String, String> properties = new HashMap<String, String>();
Map<String, Map<String, String>> propertiesAttributes = new HashMap<String, Map<String,String>>();
- Config c1 = new ConfigImpl(cluster, "hdfs-site", properties, propertiesAttributes, injector);
- c1.setTag("v1");
- cluster.addConfig(c1);
- c1.persist();
+ ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+ Config c1 = configFactory.createNew(cluster, "hdfs-site", "v1", properties, propertiesAttributes);
configs.put(c1.getType(), c1);
ServiceRequest r = new ServiceRequest(cluster1, serviceName, State.INSTALLED.toString());
@@ -1976,26 +1973,17 @@ public class AmbariManagementControllerTest {
properties.put("a", "a1");
properties.put("b", "b1");
- Config c1 = new ConfigImpl(cluster, "hdfs-site", properties, propertiesAttributes, injector);
+ ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+ Config c1 = configFactory.createNew(cluster, "hdfs-site", "v1", properties, propertiesAttributes);
properties.put("c", cluster1);
properties.put("d", "d1");
- Config c2 = new ConfigImpl(cluster, "core-site", properties, propertiesAttributes, injector);
- Config c3 = new ConfigImpl(cluster, "foo-site", properties, propertiesAttributes, injector);
+
+ Config c2 = configFactory.createNew(cluster, "core-site", "v1", properties, propertiesAttributes);
+ Config c3 = configFactory.createNew(cluster, "foo-site", "v1", properties, propertiesAttributes);
Map<String, String> mapRequestProps = new HashMap<String, String>();
mapRequestProps.put("context", "Called from a test");
- c1.setTag("v1");
- c2.setTag("v1");
- c3.setTag("v1");
-
- cluster.addConfig(c1);
- cluster.addConfig(c2);
- cluster.addConfig(c3);
- c1.persist();
- c2.persist();
- c3.persist();
-
configs.put(c1.getType(), c1);
configs.put(c2.getType(), c2);
@@ -4203,27 +4191,20 @@ public class AmbariManagementControllerTest {
cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "global",
+ Config config1 = cf.createNew(cluster, "global", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String, String>>());
- config1.setTag("version1");
- Config config2 = cf.createNew(cluster, "core-site",
+ Config config2 = cf.createNew(cluster, "core-site", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config2.setTag("version1");
- Config config3 = cf.createNew(cluster, "yarn-site",
+ Config config3 = cf.createNew(cluster, "yarn-site", "version1",
new HashMap<String, String>() {{
put("test.password", "supersecret");
}}, new HashMap<String, Map<String,String>>());
- config3.setTag("version1");
-
- cluster.addConfig(config1);
- cluster.addConfig(config2);
- cluster.addConfig(config3);
Service hdfs = cluster.addService("HDFS");
Service mapred = cluster.addService("YARN");
@@ -4376,20 +4357,15 @@ public class AmbariManagementControllerTest {
cluster.setCurrentStackVersion(new StackId("HDP-2.0.7"));
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "global",
+ Config config1 = cf.createNew(cluster, "global", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = cf.createNew(cluster, "core-site",
+ Config config2 = cf.createNew(cluster, "core-site", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config2.setTag("version1");
-
- cluster.addConfig(config1);
- cluster.addConfig(config2);
Service hdfs = cluster.addService("HDFS");
@@ -4481,19 +4457,15 @@ public class AmbariManagementControllerTest {
cluster.setCurrentStackVersion(new StackId("HDP-2.0.7"));
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "global",
+ Config config1 = cf.createNew(cluster, "global", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = cf.createNew(cluster, "core-site",
+ Config config2 = cf.createNew(cluster, "core-site", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config2.setTag("version1");
- config1.persist();
- config2.persist();
cluster.addConfig(config1);
cluster.addConfig(config2);
@@ -4769,18 +4741,14 @@ public class AmbariManagementControllerTest {
cluster.setCurrentStackVersion(new StackId("HDP-0.1"));
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "global",
+ Config config1 = cf.createNew(cluster, "global", "version1",
new HashMap<String, String>(){{ put("key1", "value1"); }}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
config1.setPropertiesAttributes(new HashMap<String, Map<String, String>>(){{ put("attr1", new HashMap<String, String>()); }});
- Config config2 = cf.createNew(cluster, "core-site",
+ Config config2 = cf.createNew(cluster, "core-site", "version1",
new HashMap<String, String>(){{ put("key1", "value1"); }}, new HashMap<String, Map<String,String>>());
- config2.setTag("version1");
config2.setPropertiesAttributes(new HashMap<String, Map<String, String>>(){{ put("attr2", new HashMap<String, String>()); }});
- cluster.addConfig(config1);
- cluster.addConfig(config2);
cluster.addDesiredConfig("_test", Collections.singleton(config1));
cluster.addDesiredConfig("_test", Collections.singleton(config2));
@@ -5515,11 +5483,8 @@ public class AmbariManagementControllerTest {
configs3, null);
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "kerberos-env",
+ Config config1 = cf.createNew(cluster, "kerberos-env", "version1",
new HashMap<String, String>(), new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
-
- cluster.addConfig(config1);
ClusterRequest crReq = new ClusterRequest(cluster.getClusterId(), cluster1, null, null);
crReq.setDesiredConfig(Collections.singletonList(cr1));
@@ -6441,20 +6406,15 @@ public class AmbariManagementControllerTest {
cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "global",
+ Config config1 = cf.createNew(cluster, "global", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = cf.createNew(cluster, "core-site",
+ Config config2 = cf.createNew(cluster, "core-site", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config2.setTag("version1");
-
- cluster.addConfig(config1);
- cluster.addConfig(config2);
Service hdfs = cluster.addService("HDFS");
Service mapred = cluster.addService("YARN");
@@ -6547,20 +6507,15 @@ public class AmbariManagementControllerTest {
cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "global",
+ Config config1 = cf.createNew(cluster, "global", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config1.setTag("version1");
- Config config2 = cf.createNew(cluster, "core-site",
+ Config config2 = cf.createNew(cluster, "core-site", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- config2.setTag("version1");
-
- cluster.addConfig(config1);
- cluster.addConfig(config2);
Service hdfs = cluster.addService("HDFS");
Service mapred = cluster.addService("YARN");
@@ -6974,13 +6929,13 @@ public class AmbariManagementControllerTest {
String group2 = getUniqueName();
String tag2 = getUniqueName();
+ ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+
// Create Config group for core-site
configs = new HashMap<String, String>();
configs.put("a", "c");
cluster = clusters.getCluster(cluster1);
- final Config config = new ConfigImpl("core-site");
- config.setProperties(configs);
- config.setTag("version122");
+ final Config config = configFactory.createReadOnly("core-site", "version122", configs, null);
Long groupId = createConfigGroup(cluster, group1, tag1,
new ArrayList<String>() {{ add(host1); }},
new ArrayList<Config>() {{ add(config); }});
@@ -6991,9 +6946,7 @@ public class AmbariManagementControllerTest {
configs = new HashMap<String, String>();
configs.put("a", "c");
- final Config config2 = new ConfigImpl("mapred-site");
- config2.setProperties(configs);
- config2.setTag("version122");
+ final Config config2 = configFactory.createReadOnly("mapred-site", "version122", configs, null);
groupId = createConfigGroup(cluster, group2, tag2,
new ArrayList<String>() {{ add(host1); }},
new ArrayList<Config>() {{ add(config2); }});
@@ -7136,9 +7089,8 @@ public class AmbariManagementControllerTest {
String group1 = getUniqueName();
String tag1 = getUniqueName();
- final Config config = new ConfigImpl("hdfs-site");
- config.setProperties(configs);
- config.setTag("version122");
+ ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+ final Config config = configFactory.createReadOnly("hdfs-site", "version122", configs, null);
Long groupId = createConfigGroup(clusters.getCluster(cluster1), group1, tag1,
new ArrayList<String>() {{
add(host1);
@@ -7246,9 +7198,8 @@ public class AmbariManagementControllerTest {
configs = new HashMap<String, String>();
configs.put("a", "c");
- final Config config = new ConfigImpl("hdfs-site");
- config.setProperties(configs);
- config.setTag("version122");
+ ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+ final Config config = configFactory.createReadOnly("hdfs-site", "version122", configs, null);
Long groupId = createConfigGroup(clusters.getCluster(cluster1), group1, tag1,
new ArrayList<String>() {{ add(host1); add(host2); }},
new ArrayList<Config>() {{ add(config); }});
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
index 96810cf..1747b28 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderHDP22Test.java
@@ -66,7 +66,7 @@ import org.apache.ambari.server.security.authorization.RoleAuthorization;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostState;
import org.apache.ambari.server.state.RepositoryVersionState;
@@ -108,6 +108,7 @@ public class UpgradeResourceProviderHDP22Test {
private AmbariManagementController amc;
private StackDAO stackDAO;
private TopologyManager topologyManager;
+ private ConfigFactory configFactory;
private static final String configTagVersion1 = "version1";
private static final String configTagVersion2 = "version2";
@@ -136,6 +137,7 @@ public class UpgradeResourceProviderHDP22Test {
stackDAO = injector.getInstance(StackDAO.class);
upgradeDao = injector.getInstance(UpgradeDAO.class);
repoVersionDao = injector.getInstance(RepositoryVersionDAO.class);
+ configFactory = injector.getInstance(ConfigFactory.class);
AmbariEventPublisher publisher = createNiceMock(AmbariEventPublisher.class);
replay(publisher);
@@ -233,11 +235,7 @@ public class UpgradeResourceProviderHDP22Test {
}
}
- Config config = new ConfigImpl("hive-site");
- config.setProperties(configTagVersion1Properties);
- config.setTag(configTagVersion1);
-
- cluster.addConfig(config);
+ Config config = configFactory.createNew(cluster, "hive-site", configTagVersion1, configTagVersion1Properties, null);
cluster.addDesiredConfig("admin", Collections.singleton(config));
Map<String, Object> requestProps = new HashMap<String, Object>();
@@ -286,9 +284,7 @@ public class UpgradeResourceProviderHDP22Test {
// Hive service checks have generated the ExecutionCommands by now.
// Change the new desired config tag and verify execution command picks up new tag
assertEquals(configTagVersion1, cluster.getDesiredConfigByType("hive-site").getTag());
- final Config newConfig = new ConfigImpl("hive-site");
- newConfig.setProperties(configTagVersion2Properties);
- newConfig.setTag(configTagVersion2);
+ final Config newConfig = configFactory.createNew(cluster, "hive-site", configTagVersion2, configTagVersion2Properties, null);
Set<Config> desiredConfigs = new HashSet<Config>() {
{
add(newConfig);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index 14e3d08..ae99ee6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -85,8 +85,8 @@ import org.apache.ambari.server.serveraction.upgrades.AutoSkipFailedSummaryActio
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostState;
@@ -144,6 +144,7 @@ public class UpgradeResourceProviderTest {
private StackDAO stackDAO;
private AmbariMetaInfo ambariMetaInfo;
private TopologyManager topologyManager;
+ private ConfigFactory configFactory;
@Before
public void before() throws Exception {
@@ -174,6 +175,7 @@ public class UpgradeResourceProviderTest {
amc = injector.getInstance(AmbariManagementController.class);
ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+ configFactory = injector.getInstance(ConfigFactory.class);
Field field = AmbariServer.class.getDeclaredField("clusterController");
field.setAccessible(true);
@@ -1046,16 +1048,9 @@ public class UpgradeResourceProviderTest {
}
- Config config = new ConfigImpl("zoo.cfg");
- config.setProperties(new HashMap<String, String>() {{
- put("a", "b");
- }});
- config.setTag("abcdefg");
-
- cluster.addConfig(config);
+ Config config = configFactory.createNew(cluster, "zoo.cfg", "abcdefg", Collections.singletonMap("a", "b"), null);
cluster.addDesiredConfig("admin", Collections.singleton(config));
-
Map<String, Object> requestProps = new HashMap<String, Object>();
requestProps.put(UpgradeResourceProvider.UPGRADE_CLUSTER_NAME, "c1");
requestProps.put(UpgradeResourceProvider.UPGRADE_VERSION, "2.2.0.0");
@@ -1249,7 +1244,7 @@ public class UpgradeResourceProviderTest {
Map<String, String> map = gson.<Map<String, String>> fromJson(se.getCommandParamsStage(),Map.class);
assertTrue(map.containsKey("upgrade_direction"));
assertEquals("upgrade", map.get("upgrade_direction"));
-
+
if(map.containsKey("upgrade_type")){
assertEquals("rolling_upgrade", map.get("upgrade_type"));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
index 0163024..80a3bc5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
@@ -49,8 +49,7 @@ import org.apache.ambari.server.orm.entities.HostVersionEntity;
import org.apache.ambari.server.orm.entities.StackEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.RepositoryInfo;
import org.apache.ambari.server.state.RepositoryVersionState;
@@ -113,6 +112,9 @@ public class ComponentVersionCheckActionTest {
@Inject
private ServiceComponentHostFactory serviceComponentHostFactory;
+ @Inject
+ private ConfigFactory configFactory;
+
@Before
public void setup() throws Exception {
m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
@@ -399,18 +401,11 @@ public class ComponentVersionCheckActionTest {
properties.put("a", "a1");
properties.put("b", "b1");
- Config c1 = new ConfigImpl(cluster, "hdfs-site", properties, propertiesAttributes, m_injector);
+ configFactory.createNew(cluster, "hdfs-site", "version1", properties, propertiesAttributes);
properties.put("c", "c1");
properties.put("d", "d1");
- Config c2 = new ConfigImpl(cluster, "core-site", properties, propertiesAttributes, m_injector);
- Config c3 = new ConfigImpl(cluster, "foo-site", properties, propertiesAttributes, m_injector);
-
- cluster.addConfig(c1);
- cluster.addConfig(c2);
- cluster.addConfig(c3);
- c1.persist();
- c2.persist();
- c3.persist();
+ configFactory.createNew(cluster, "core-site", "version1", properties, propertiesAttributes);
+ configFactory.createNew(cluster, "foo-site", "version1", properties, propertiesAttributes);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
index 7ab2856..92fa084 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
@@ -132,13 +132,10 @@ public class ConfigureActionTest {
c.setCurrentStackVersion(HDP_211_STACK);
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
put("initLimit", "10");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -196,16 +193,13 @@ public class ConfigureActionTest {
// create a config for zoo.cfg with two values; one is a stack value and the
// other is custom
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("tickTime", "2000");
put("foo", "bar");
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -262,16 +256,13 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
put("initLimit", "10");
put("copyIt", "10");
put("moveIt", "10");
put("deleteIt", "10");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -402,15 +393,12 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("zoo.server.csv", "c6401,c6402, c6403");
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -468,16 +456,13 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("key_to_replace", "My New Cat");
put("key_with_no_match", "WxyAndZ");
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -543,16 +528,13 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("existing", "This exists!");
put("missing", null);
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -604,16 +586,12 @@ public class ConfigureActionTest {
c.setCurrentStackVersion(HDP_211_STACK);
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("fooKey", "barValue");
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
-
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -671,7 +649,7 @@ public class ConfigureActionTest {
c.setCurrentStackVersion(HDP_211_STACK);
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("set.key.1", "s1");
put("set.key.2", "s2");
@@ -680,10 +658,6 @@ public class ConfigureActionTest {
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
-
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -769,7 +743,7 @@ public class ConfigureActionTest {
c.setCurrentStackVersion(HDP_211_STACK);
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("set.key.1", "s1");
put("set.key.2", "s2");
@@ -778,10 +752,6 @@ public class ConfigureActionTest {
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
-
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -855,7 +825,7 @@ public class ConfigureActionTest {
c.setCurrentStackVersion(HDP_211_STACK);
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("replace.key.1", "r1");
put("replace.key.2", "r2");
@@ -865,10 +835,6 @@ public class ConfigureActionTest {
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
-
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -951,7 +917,7 @@ public class ConfigureActionTest {
c.setCurrentStackVersion(HDP_211_STACK);
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {
{
put("replace.key.1", "r1");
put("replace.key.2", "r2");
@@ -961,10 +927,6 @@ public class ConfigureActionTest {
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version2");
- config.persist();
-
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1041,15 +1003,12 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
put("initLimit", "10");
put("copy.key.1", "c1");
put("copy.key.2", "c2");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1157,15 +1116,12 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
put("initLimit", "10");
put("copy.key.1", "c1");
put("copy.key.2", "c2");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1253,17 +1209,14 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
put("initLimit", "10");
put("move.key.1", "m1");
put("move.key.2", "m2");
put("move.key.3", "m3");
put("move.key.4", "m4");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1362,17 +1315,15 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2",
+ new HashMap<String, String>() {{
put("initLimit", "10");
put("move.key.1", "m1");
put("move.key.2", "m2");
put("move.key.3", "m3");
put("move.key.4", "m4");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1466,17 +1417,14 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
put("initLimit", "10");
put("delete.key.1", "d1");
put("delete.key.2", "d2");
put("delete.key.3", "d3");
put("delete.key.4", "d4");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1567,17 +1515,14 @@ public class ConfigureActionTest {
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
c.setDesiredStackVersion(HDP_220_STACK);
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {{
+ Config config = cf.createNew(c, "zoo.cfg", "version2", new HashMap<String, String>() {{
put("initLimit", "10");
put("delete.key.1", "d1");
put("delete.key.2", "d2");
put("delete.key.3", "d3");
put("delete.key.4", "d4");
}}, new HashMap<String, Map<String,String>>());
- config.setTag("version2");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1674,15 +1619,12 @@ public class ConfigureActionTest {
// service properties will not run!
installService(c, "ZOOKEEPER");
- Config config = cf.createNew(c, "zoo.cfg", new HashMap<String, String>() {
+ Config config = cf.createNew(c, "zoo.cfg", "version1", new HashMap<String, String>() {
{
put("initLimit", "10");
}
}, new HashMap<String, Map<String, String>>());
- config.setTag("version1");
- config.persist();
- c.addConfig(config);
c.addDesiredConfig("user", Collections.singleton(config));
// add a host component
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsersTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsersTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsersTest.java
index 314e955..d4c90b8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsersTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsersTest.java
@@ -17,7 +17,16 @@
*/
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Injector;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+
import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
import org.apache.ambari.server.actionmanager.HostRoleCommand;
import org.apache.ambari.server.agent.CommandReport;
@@ -25,18 +34,11 @@ import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
-import java.lang.reflect.Field;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import com.google.inject.Injector;
/**
* Tests OozieConfigCalculation logic
@@ -53,52 +55,28 @@ public class FixOozieAdminUsersTest {
clusters = EasyMock.createMock(Clusters.class);
cluster = EasyMock.createMock(Cluster.class);
+ Map<String, String> mockProperties = new HashMap<String, String>() {{
+ put("falcon_user", "falcon");
+ }};
+
+ Config falconEnvConfig = EasyMock.createNiceMock(Config.class);
+ expect(falconEnvConfig.getType()).andReturn("falcon-env").anyTimes();
+ expect(falconEnvConfig.getProperties()).andReturn(mockProperties).anyTimes();
+
+ mockProperties = new HashMap<String, String>() {{
+ put("oozie_admin_users", "oozie, oozie-admin");
+ }};
+
+ Config oozieEnvConfig = EasyMock.createNiceMock(Config.class);
+ expect(oozieEnvConfig.getType()).andReturn("oozie-env").anyTimes();
+ expect(oozieEnvConfig.getProperties()).andReturn(mockProperties).anyTimes();
- Config falconEnvConfig = new ConfigImpl("falcon-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("falcon_user", "falcon");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
- Config oozieEnvConfig = new ConfigImpl("oozie-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("oozie_admin_users", "oozie, oozie-admin");
- }};
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
expect(cluster.getDesiredConfigByType("falcon-env")).andReturn(falconEnvConfig).atLeastOnce();
expect(cluster.getDesiredConfigByType("oozie-env")).andReturn(oozieEnvConfig).atLeastOnce();
expect(clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
expect(injector.getInstance(Clusters.class)).andReturn(clusters).atLeastOnce();
- replay(injector, clusters);
+ replay(injector, clusters, falconEnvConfig, oozieEnvConfig);
clustersField = FixOozieAdminUsers.class.getDeclaredField("clusters");
clustersField.setAccessible(true);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeActionTest.java
index 4c1d7a3..f8a5373 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeActionTest.java
@@ -17,8 +17,18 @@
*/
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Injector;
-import junit.framework.Assert;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
import org.apache.ambari.server.actionmanager.HostRoleCommand;
import org.apache.ambari.server.agent.CommandReport;
@@ -26,21 +36,13 @@ import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
-import java.lang.reflect.Field;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.inject.Injector;
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import junit.framework.Assert;
/**
* Tests HiveEnvClasspathAction logic
@@ -55,99 +57,86 @@ public class HBaseEnvMaxDirectMemorySizeActionTest {
injector = EasyMock.createMock(Injector.class);
clusters = EasyMock.createMock(Clusters.class);
Cluster cluster = EasyMock.createMock(Cluster.class);
-
- Config hbaseEnv = new ConfigImpl("hbase-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("content","# Set environment variables here.\n" +
- "\n" +
- "# The java implementation to use. Java 1.6 required.\n" +
- "export JAVA_HOME={{java64_home}}\n" +
- "\n" +
- "# HBase Configuration directory\n" +
- "export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}\n" +
- "\n" +
- "# Extra Java CLASSPATH elements. Optional.\n" +
- "export HBASE_CLASSPATH=${HBASE_CLASSPATH}\n" +
- "\n" +
- "# The maximum amount of heap to use, in MB. Default is 1000.\n" +
- "# export HBASE_HEAPSIZE=1000\n" +
- "\n" +
- "# Extra Java runtime options.\n" +
- "# Below are what we set by default. May only work with SUN JVM.\n" +
- "# For more on why as well as other possible settings,\n" +
- "# see http://wiki.apache.org/hadoop/PerformanceTuning\n" +
- "export SERVER_GC_OPTS=\"-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`\"\n" +
- "# Uncomment below to enable java garbage collection logging.\n" +
- "# export HBASE_OPTS=\"$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log -Djava.io.tmpdir={{java_io_tmpdir}}\"\n" +
- "\n" +
- "# Uncomment and adjust to enable JMX exporting\n" +
- "# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.\n" +
- "# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html\n" +
- "#\n" +
- "# export HBASE_JMX_BASE=\"-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false\"\n" +
- "# If you want to configure BucketCache, specify '-XX: MaxDirectMemorySize=' with proper direct memory size\n" +
- "# export HBASE_THRIFT_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103\"\n" +
- "# export HBASE_ZOOKEEPER_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104\"\n" +
- "\n" +
- "# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.\n" +
- "export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers\n" +
- "\n" +
- "# Extra ssh options. Empty by default.\n" +
- "# export HBASE_SSH_OPTS=\"-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR\"\n" +
- "\n" +
- "# Where log files are stored. $HBASE_HOME/logs by default.\n" +
- "export HBASE_LOG_DIR={{log_dir}}\n" +
- "\n" +
- "# A string representing this instance of hbase. $USER by default.\n" +
- "# export HBASE_IDENT_STRING=$USER\n" +
- "\n" +
- "# The scheduling priority for daemon processes. See 'man nice'.\n" +
- "# export HBASE_NICENESS=10\n" +
- "\n" +
- "# The directory where pid files are stored. /tmp by default.\n" +
- "export HBASE_PID_DIR={{pid_dir}}\n" +
- "\n" +
- "# Seconds to sleep between slave commands. Unset by default. This\n" +
- "# can be useful in large clusters, where, e.g., slave rsyncs can\n" +
- "# otherwise arrive faster than the master can service them.\n" +
- "# export HBASE_SLAVE_SLEEP=0.1\n" +
- "\n" +
- "# Tell HBase whether it should manage it's own instance of Zookeeper or not.\n" +
- "export HBASE_MANAGES_ZK=false\n" +
- "\n" +
- "{% if security_enabled %}\n" +
- "export HBASE_OPTS=\"$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}} -Djava.io.tmpdir={{java_io_tmpdir}}\"\n" +
- "export HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}}\"\n" +
- "export HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}}\"\n" +
- "{% else %}\n" +
- "export HBASE_OPTS=\"$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.io.tmpdir={{java_io_tmpdir}}\"\n" +
- "export HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Xmx{{master_heapsize}}\"\n" +
- "export HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}\"\n" +
- "{% endif %}");
- }};
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
-
+ Config hbaseEnv = EasyMock.createNiceMock(Config.class);
+
+ Map<String, String> mockProperties = new HashMap<String, String>() {{
+ put("content","# Set environment variables here.\n" +
+ "\n" +
+ "# The java implementation to use. Java 1.6 required.\n" +
+ "export JAVA_HOME={{java64_home}}\n" +
+ "\n" +
+ "# HBase Configuration directory\n" +
+ "export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}\n" +
+ "\n" +
+ "# Extra Java CLASSPATH elements. Optional.\n" +
+ "export HBASE_CLASSPATH=${HBASE_CLASSPATH}\n" +
+ "\n" +
+ "# The maximum amount of heap to use, in MB. Default is 1000.\n" +
+ "# export HBASE_HEAPSIZE=1000\n" +
+ "\n" +
+ "# Extra Java runtime options.\n" +
+ "# Below are what we set by default. May only work with SUN JVM.\n" +
+ "# For more on why as well as other possible settings,\n" +
+ "# see http://wiki.apache.org/hadoop/PerformanceTuning\n" +
+ "export SERVER_GC_OPTS=\"-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`\"\n" +
+ "# Uncomment below to enable java garbage collection logging.\n" +
+ "# export HBASE_OPTS=\"$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log -Djava.io.tmpdir={{java_io_tmpdir}}\"\n" +
+ "\n" +
+ "# Uncomment and adjust to enable JMX exporting\n" +
+ "# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.\n" +
+ "# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html\n" +
+ "#\n" +
+ "# export HBASE_JMX_BASE=\"-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false\"\n" +
+ "# If you want to configure BucketCache, specify '-XX: MaxDirectMemorySize=' with proper direct memory size\n" +
+ "# export HBASE_THRIFT_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103\"\n" +
+ "# export HBASE_ZOOKEEPER_OPTS=\"$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104\"\n" +
+ "\n" +
+ "# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.\n" +
+ "export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers\n" +
+ "\n" +
+ "# Extra ssh options. Empty by default.\n" +
+ "# export HBASE_SSH_OPTS=\"-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR\"\n" +
+ "\n" +
+ "# Where log files are stored. $HBASE_HOME/logs by default.\n" +
+ "export HBASE_LOG_DIR={{log_dir}}\n" +
+ "\n" +
+ "# A string representing this instance of hbase. $USER by default.\n" +
+ "# export HBASE_IDENT_STRING=$USER\n" +
+ "\n" +
+ "# The scheduling priority for daemon processes. See 'man nice'.\n" +
+ "# export HBASE_NICENESS=10\n" +
+ "\n" +
+ "# The directory where pid files are stored. /tmp by default.\n" +
+ "export HBASE_PID_DIR={{pid_dir}}\n" +
+ "\n" +
+ "# Seconds to sleep between slave commands. Unset by default. This\n" +
+ "# can be useful in large clusters, where, e.g., slave rsyncs can\n" +
+ "# otherwise arrive faster than the master can service them.\n" +
+ "# export HBASE_SLAVE_SLEEP=0.1\n" +
+ "\n" +
+ "# Tell HBase whether it should manage it's own instance of Zookeeper or not.\n" +
+ "export HBASE_MANAGES_ZK=false\n" +
+ "\n" +
+ "{% if security_enabled %}\n" +
+ "export HBASE_OPTS=\"$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}} -Djava.io.tmpdir={{java_io_tmpdir}}\"\n" +
+ "export HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}}\"\n" +
+ "export HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}}\"\n" +
+ "{% else %}\n" +
+ "export HBASE_OPTS=\"$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.io.tmpdir={{java_io_tmpdir}}\"\n" +
+ "export HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Xmx{{master_heapsize}}\"\n" +
+ "export HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}\"\n" +
+ "{% endif %}");
+ }};
+
+ expect(hbaseEnv.getType()).andReturn("hbase-env").anyTimes();
+ expect(hbaseEnv.getProperties()).andReturn(mockProperties).anyTimes();
expect(cluster.getDesiredConfigByType("hbase-env")).andReturn(hbaseEnv).atLeastOnce();
expect(clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
expect(injector.getInstance(Clusters.class)).andReturn(clusters).atLeastOnce();
- replay(injector, clusters, cluster);
+ replay(injector, clusters, cluster, hbaseEnv);
m_clusterField = HBaseEnvMaxDirectMemorySizeAction.class.getDeclaredField("clusters");
m_clusterField.setAccessible(true);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathActionTest.java
index 9bde631..8926203 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathActionTest.java
@@ -17,8 +17,18 @@
*/
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Injector;
-import junit.framework.Assert;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
import org.apache.ambari.server.actionmanager.HostRoleCommand;
import org.apache.ambari.server.agent.CommandReport;
@@ -26,22 +36,13 @@ import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
-import java.lang.reflect.Field;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.inject.Injector;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import junit.framework.Assert;
/**
* Tests HiveEnvClasspathAction logic
@@ -57,79 +58,66 @@ public class HiveEnvClasspathActionTest {
m_clusters = EasyMock.createMock(Clusters.class);
Cluster cluster = EasyMock.createMock(Cluster.class);
- Config hiveEnv = new ConfigImpl("hive-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("content", " export HADOOP_USER_CLASSPATH_FIRST=true #this prevents old metrics libs from mapreduce lib from bringing in old jar deps overriding HIVE_LIB\n" +
- " if [ \"$SERVICE\" = \"cli\" ]; then\n" +
- " if [ -z \"$DEBUG\" ]; then\n" +
- " export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseNUMA -XX:+UseParallelGC -XX:-UseGCOverheadLimit\"\n" +
- " else\n" +
- " export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n" +
- " fi\n" +
- " fi\n" +
- "\n" +
- " # The heap size of the jvm stared by hive shell script can be controlled via:\n" +
- "\n" +
- " if [ \"$SERVICE\" = \"metastore\" ]; then\n" +
- " export HADOOP_HEAPSIZE={{hive_metastore_heapsize}} # Setting for HiveMetastore\n" +
- " else\n" +
- " export HADOOP_HEAPSIZE={{hive_heapsize}} # Setting for HiveServer2 and Client\n" +
- " fi\n" +
- "\n" +
- " export HADOOP_CLIENT_OPTS=\"$HADOOP_CLIENT_OPTS -Xmx${HADOOP_HEAPSIZE}m\"\n" +
- "\n" +
- " # Larger heap size may be required when running queries over large number of files or partitions.\n" +
- " # By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be\n" +
- " # appropriate for hive server (hwi etc).\n" +
- "\n" +
- "\n" +
- " # Set HADOOP_HOME to point to a specific hadoop install directory\n" +
- " HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n" +
- "\n" +
- " # Hive Configuration Directory can be controlled by:\n" +
- " export HIVE_CONF_DIR=test\n" +
- "\n" +
- " # Folder containing extra libraries required for hive compilation/execution can be controlled by:\n" +
- " if [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n" +
- " if [ -f \"${HIVE_AUX_JARS_PATH}\" ]; then\n" +
- " export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\n" +
- " elif [ -d \"/usr/hdp/current/hive-webhcat/share/hcatalog\" ]; then\n" +
- " export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar\n" +
- " fi\n" +
- " elif [ -d \"/usr/hdp/current/hive-webhcat/share/hcatalog\" ]; then\n" +
- " export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar\n" +
- " fi\n" +
- "\n" +
- " export METASTORE_PORT={{hive_metastore_port}}\n" +
- "\n" +
- " {% if sqla_db_used or lib_dir_available %}\n" +
- " export LD_LIBRARY_PATH=\"$LD_LIBRARY_PATH:{{jdbc_libs_dir}}\"\n" +
- " export JAVA_LIBRARY_PATH=\"$JAVA_LIBRARY_PATH:{{jdbc_libs_dir}}\"\n" +
- " {% endif %}");
- }};
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
-
+ Map<String, String> mockProperties = new HashMap<String, String>() {{
+ put("content", " export HADOOP_USER_CLASSPATH_FIRST=true #this prevents old metrics libs from mapreduce lib from bringing in old jar deps overriding HIVE_LIB\n" +
+ " if [ \"$SERVICE\" = \"cli\" ]; then\n" +
+ " if [ -z \"$DEBUG\" ]; then\n" +
+ " export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseNUMA -XX:+UseParallelGC -XX:-UseGCOverheadLimit\"\n" +
+ " else\n" +
+ " export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit\"\n" +
+ " fi\n" +
+ " fi\n" +
+ "\n" +
+ " # The heap size of the jvm stared by hive shell script can be controlled via:\n" +
+ "\n" +
+ " if [ \"$SERVICE\" = \"metastore\" ]; then\n" +
+ " export HADOOP_HEAPSIZE={{hive_metastore_heapsize}} # Setting for HiveMetastore\n" +
+ " else\n" +
+ " export HADOOP_HEAPSIZE={{hive_heapsize}} # Setting for HiveServer2 and Client\n" +
+ " fi\n" +
+ "\n" +
+ " export HADOOP_CLIENT_OPTS=\"$HADOOP_CLIENT_OPTS -Xmx${HADOOP_HEAPSIZE}m\"\n" +
+ "\n" +
+ " # Larger heap size may be required when running queries over large number of files or partitions.\n" +
+ " # By default hive shell scripts use a heap size of 256 (MB). Larger heap size would also be\n" +
+ " # appropriate for hive server (hwi etc).\n" +
+ "\n" +
+ "\n" +
+ " # Set HADOOP_HOME to point to a specific hadoop install directory\n" +
+ " HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n" +
+ "\n" +
+ " # Hive Configuration Directory can be controlled by:\n" +
+ " export HIVE_CONF_DIR=test\n" +
+ "\n" +
+ " # Folder containing extra libraries required for hive compilation/execution can be controlled by:\n" +
+ " if [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n" +
+ " if [ -f \"${HIVE_AUX_JARS_PATH}\" ]; then\n" +
+ " export HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\n" +
+ " elif [ -d \"/usr/hdp/current/hive-webhcat/share/hcatalog\" ]; then\n" +
+ " export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar\n" +
+ " fi\n" +
+ " elif [ -d \"/usr/hdp/current/hive-webhcat/share/hcatalog\" ]; then\n" +
+ " export HIVE_AUX_JARS_PATH=/usr/hdp/current/hive-webhcat/share/hcatalog/hive-hcatalog-core.jar\n" +
+ " fi\n" +
+ "\n" +
+ " export METASTORE_PORT={{hive_metastore_port}}\n" +
+ "\n" +
+ " {% if sqla_db_used or lib_dir_available %}\n" +
+ " export LD_LIBRARY_PATH=\"$LD_LIBRARY_PATH:{{jdbc_libs_dir}}\"\n" +
+ " export JAVA_LIBRARY_PATH=\"$JAVA_LIBRARY_PATH:{{jdbc_libs_dir}}\"\n" +
+ " {% endif %}");
+ }};
+
+ Config hiveEnv = EasyMock.createNiceMock(Config.class);
+ expect(hiveEnv.getType()).andReturn("hive-env").anyTimes();
+ expect(hiveEnv.getProperties()).andReturn(mockProperties).anyTimes();
expect(cluster.getDesiredConfigByType("hive-env")).andReturn(hiveEnv).atLeastOnce();
expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
- replay(m_injector, m_clusters, cluster);
+ replay(m_injector, m_clusters, cluster, hiveEnv);
m_clusterField = HiveEnvClasspathAction.class.getDeclaredField("clusters");
m_clusterField.setAccessible(true);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigActionTest.java
index 907194c..cd5eb9d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigActionTest.java
@@ -91,7 +91,7 @@ public class HiveZKQuorumConfigActionTest {
m_hiveSiteConfig.setProperties(EasyMock.anyObject(Map.class));
EasyMock.expectLastCall().once();
- m_hiveSiteConfig.persist(false);
+ m_hiveSiteConfig.save();
EasyMock.expectLastCall().once();
EasyMock.expect(m_cluster.getDesiredConfigByType(HiveZKQuorumConfigAction.HIVE_SITE_CONFIG_TYPE)).andReturn(m_hiveSiteConfig).atLeastOnce();
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/KerberosKeytabsActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/KerberosKeytabsActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/KerberosKeytabsActionTest.java
index d374d75..d18f727 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/KerberosKeytabsActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/KerberosKeytabsActionTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.controller.KerberosHelper;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.SecurityType;
import org.apache.commons.lang.StringUtils;
import org.easymock.EasyMock;
@@ -65,26 +64,13 @@ public class KerberosKeytabsActionTest {
m_clusters = EasyMock.createMock(Clusters.class);
m_kerberosHelper = EasyMock.createMock(KerberosHelper.class);
- m_kerberosConfig = new ConfigImpl("kerberos-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("kerberos-env", "");
- }};
+ Map<String, String> mockProperties = new HashMap<String, String>() {{
+ put("kerberos-env", "");
+ }};
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
+ m_kerberosConfig = EasyMock.createNiceMock(Config.class);
+ expect(m_kerberosConfig.getType()).andReturn("kerberos-env").anyTimes();
+ expect(m_kerberosConfig.getProperties()).andReturn(mockProperties).anyTimes();
Cluster cluster = EasyMock.createMock(Cluster.class);
@@ -92,7 +78,7 @@ public class KerberosKeytabsActionTest {
expect(cluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
- replay(m_clusters, cluster);
+ replay(m_clusters, cluster, m_kerberosConfig);
m_injector = Guice.createInjector(new AbstractModule() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculationTest.java
index e673714..7a6a6c3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculationTest.java
@@ -35,7 +35,6 @@ import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
@@ -57,54 +56,27 @@ public class RangerConfigCalculationTest {
m_clusters = EasyMock.createMock(Clusters.class);
Cluster cluster = EasyMock.createMock(Cluster.class);
- Config adminConfig = new ConfigImpl("admin-properties") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("DB_FLAVOR", "MYSQL");
- put("db_host", "host1");
- put("db_name", "ranger");
- put("audit_db_name", "ranger_audit");
- }};
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config adminSiteConfig = new ConfigImpl("admin-properties") {
- Map<String, String> mockProperties = new HashMap<String, String>();
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
-
- Config rangerEnv = new ConfigImpl("ranger-env") {
- Map<String, String> mockProperties = new HashMap<String, String>();
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
+ Map<String, String> mockProperties = new HashMap<String, String>() {{
+ put("DB_FLAVOR", "MYSQL");
+ put("db_host", "host1");
+ put("db_name", "ranger");
+ put("audit_db_name", "ranger_audit");
+ }};
+
+ Config adminConfig = EasyMock.createNiceMock(Config.class);
+ expect(adminConfig.getType()).andReturn("admin-properties").anyTimes();
+ expect(adminConfig.getProperties()).andReturn(mockProperties).anyTimes();
+
+ mockProperties = new HashMap<String, String>();
+
+ Config adminSiteConfig = EasyMock.createNiceMock(Config.class);
+ expect(adminSiteConfig.getType()).andReturn("admin-properties").anyTimes();
+ expect(adminSiteConfig.getProperties()).andReturn(mockProperties).anyTimes();
+
+ Config rangerEnv = EasyMock.createNiceMock(Config.class);
+ expect(rangerEnv.getType()).andReturn("ranger-env").anyTimes();
+ expect(rangerEnv.getProperties()).andReturn(mockProperties).anyTimes();
+
expect(cluster.getDesiredConfigByType("admin-properties")).andReturn(adminConfig).atLeastOnce();
expect(cluster.getDesiredConfigByType("ranger-admin-site")).andReturn(adminSiteConfig).atLeastOnce();
@@ -113,7 +85,7 @@ public class RangerConfigCalculationTest {
expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
- replay(m_injector, m_clusters, cluster);
+ replay(m_injector, m_clusters, cluster, adminConfig, adminSiteConfig, rangerEnv);
m_clusterField = RangerConfigCalculation.class.getDeclaredField("m_clusters");
m_clusterField.setAccessible(true);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculationTest.java
index 25acb45..06092c3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculationTest.java
@@ -25,6 +25,7 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Field;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -34,9 +35,8 @@ import org.apache.ambari.server.agent.CommandReport;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.SecurityType;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.SecurityType;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
@@ -59,124 +59,50 @@ public class RangerKerberosConfigCalculationTest {
m_clusters = EasyMock.createMock(Clusters.class);
Cluster cluster = EasyMock.createMock(Cluster.class);
- Config hadoopConfig = new ConfigImpl("hadoop-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("hdfs_user", "hdfs");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
-
- Config hiveConfig = new ConfigImpl("hive-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("hive_user", "hive");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config yarnConfig = new ConfigImpl("yarn-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("yarn_user", "yarn");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config hbaseConfig = new ConfigImpl("hbase-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("hbase_user", "hbase");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config knoxConfig = new ConfigImpl("knox-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("knox_user", "knox");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config stormConfig = new ConfigImpl("storm-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("storm_user", "storm");
- put("storm_principal_name", "storm-c1@EXAMLE.COM");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config kafkaConfig = new ConfigImpl("kafka-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("kafka_user", "kafka");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config kmsConfig = new ConfigImpl("kms-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("kms_user", "kms");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config hdfsSiteConfig = new ConfigImpl("hdfs-site") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("dfs.web.authentication.kerberos.keytab", "/etc/security/keytabs/spnego.kytab");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
- };
-
- Config adminSiteConfig = new ConfigImpl("ranger-admin-site") {
- Map<String, String> mockProperties = new HashMap<String, String>();
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
+ Config hadoopConfig = EasyMock.createNiceMock(Config.class);
+ expect(hadoopConfig.getType()).andReturn("hadoop-env").anyTimes();
+ expect(hadoopConfig.getProperties()).andReturn(Collections.singletonMap("hdfs_user", "hdfs")).anyTimes();
+
+ Config hiveConfig = EasyMock.createNiceMock(Config.class);
+ expect(hiveConfig.getType()).andReturn("hive-env").anyTimes();
+ expect(hiveConfig.getProperties()).andReturn(Collections.singletonMap("hive_user", "hive")).anyTimes();
+
+ Config yarnConfig = EasyMock.createNiceMock(Config.class);
+ expect(yarnConfig.getType()).andReturn("yarn-env").anyTimes();
+ expect(yarnConfig.getProperties()).andReturn(Collections.singletonMap("yarn_user", "yarn")).anyTimes();
+
+ Config hbaseConfig = EasyMock.createNiceMock(Config.class);
+ expect(hbaseConfig.getType()).andReturn("hbase-env").anyTimes();
+ expect(hbaseConfig.getProperties()).andReturn(Collections.singletonMap("hbase_user", "hbase")).anyTimes();
+
+ Config knoxConfig = EasyMock.createNiceMock(Config.class);
+ expect(knoxConfig.getType()).andReturn("knox-env").anyTimes();
+ expect(knoxConfig.getProperties()).andReturn(Collections.singletonMap("knox_user", "knox")).anyTimes();
+
+ Map<String, String> mockProperties = new HashMap<String, String>() {{
+ put("storm_user", "storm");
+ put("storm_principal_name", "storm-c1@EXAMLE.COM");
+ }};
+
+ Config stormConfig = EasyMock.createNiceMock(Config.class);
+ expect(stormConfig.getType()).andReturn("storm-env").anyTimes();
+ expect(stormConfig.getProperties()).andReturn(mockProperties).anyTimes();
+
+ Config kafkaConfig = EasyMock.createNiceMock(Config.class);
+ expect(kafkaConfig.getType()).andReturn("kafka-env").anyTimes();
+ expect(kafkaConfig.getProperties()).andReturn(Collections.singletonMap("kafka_user", "kafka")).anyTimes();
+
+ Config kmsConfig = EasyMock.createNiceMock(Config.class);
+ expect(kmsConfig.getType()).andReturn("kms-env").anyTimes();
+ expect(kmsConfig.getProperties()).andReturn(Collections.singletonMap("kms_user", "kms")).anyTimes();
+
+ Config hdfsSiteConfig = EasyMock.createNiceMock(Config.class);
+ expect(hdfsSiteConfig.getType()).andReturn("hdfs-site").anyTimes();
+ expect(hdfsSiteConfig.getProperties()).andReturn(Collections.singletonMap("dfs.web.authentication.kerberos.keytab", "/etc/security/keytabs/spnego.kytab")).anyTimes();
+
+ Config adminSiteConfig = EasyMock.createNiceMock(Config.class);
+ expect(adminSiteConfig.getType()).andReturn("ranger-admin-site").anyTimes();
+ expect(adminSiteConfig.getProperties()).andReturn(new HashMap<String,String>()).anyTimes();
expect(cluster.getDesiredConfigByType("hadoop-env")).andReturn(hadoopConfig).atLeastOnce();
expect(cluster.getDesiredConfigByType("hive-env")).andReturn(hiveConfig).atLeastOnce();
@@ -193,7 +119,8 @@ public class RangerKerberosConfigCalculationTest {
expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
expect(cluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
- replay(m_injector, m_clusters, cluster);
+ replay(m_injector, m_clusters, cluster, hadoopConfig, hiveConfig, yarnConfig, hbaseConfig,
+ knoxConfig, stormConfig, kafkaConfig, kmsConfig, hdfsSiteConfig, adminSiteConfig);
m_clusterField = RangerKerberosConfigCalculation.class.getDeclaredField("m_clusters");
m_clusterField.setAccessible(true);
@@ -236,7 +163,7 @@ public class RangerKerberosConfigCalculationTest {
assertTrue(map.containsKey("ranger.plugins.storm.serviceuser"));
assertTrue(map.containsKey("ranger.plugins.kafka.serviceuser"));
assertTrue(map.containsKey("ranger.plugins.kms.serviceuser"));
- assertTrue(map.containsKey("ranger.spnego.kerberos.keytab"));
+ assertTrue(map.containsKey("ranger.spnego.kerberos.keytab"));
assertEquals("hdfs", map.get("ranger.plugins.hdfs.serviceuser"));
@@ -254,4 +181,4 @@ public class RangerKerberosConfigCalculationTest {
}
-}
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfigTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfigTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfigTest.java
index e65a824..518ab42 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfigTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfigTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
@@ -58,33 +57,20 @@ public class SparkShufflePropertyConfigTest {
m_clusters = EasyMock.createMock(Clusters.class);
cluster = EasyMock.createMock(Cluster.class);
+ Map<String, String> mockProperties = new HashMap<String, String>() {{
+ put("yarn.nodemanager.aux-services", "some_service");
+ }};
- Config adminConfig = new ConfigImpl("yarn-site") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
- put("yarn.nodemanager.aux-services", "some_service");
- }};
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
+ Config yarnConfig = EasyMock.createNiceMock(Config.class);
+ expect(yarnConfig.getType()).andReturn("yarn-site").anyTimes();
+ expect(yarnConfig.getProperties()).andReturn(mockProperties).anyTimes();
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
-
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
-
- expect(cluster.getDesiredConfigByType("yarn-site")).andReturn(adminConfig).atLeastOnce();
+ expect(cluster.getDesiredConfigByType("yarn-site")).andReturn(yarnConfig).atLeastOnce();
expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
- replay(m_injector, m_clusters);
+ replay(m_injector, m_clusters, yarnConfig);
clusterField = SparkShufflePropertyConfig.class.getDeclaredField("clusters");
clusterField.setAccessible(true);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
index 8f9d4f4..262b10a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/UpgradeActionTest.java
@@ -67,7 +67,7 @@ import org.apache.ambari.server.serveraction.ServerAction;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.RepositoryInfo;
import org.apache.ambari.server.state.RepositoryVersionState;
@@ -153,6 +153,8 @@ public class UpgradeActionTest {
private AmbariMetaInfo ambariMetaInfo;
@Inject
private FinalizeUpgradeAction finalizeUpgradeAction;
+ @Inject
+ private ConfigFactory configFactory;
@Before
public void setup() throws Exception {
@@ -1043,24 +1045,22 @@ public class UpgradeActionTest {
properties.put("a", "a1");
properties.put("b", "b1");
- Config c1 = new ConfigImpl(cluster, "zookeeper-env", properties, propertiesAttributes, m_injector);
+ configFactory.createNew(cluster, "zookeeper-env", "version-" + System.currentTimeMillis(),
+ properties, propertiesAttributes);
+
properties.put("zookeeper_a", "value_1");
properties.put("zookeeper_b", "value_2");
- Config c2 = new ConfigImpl(cluster, "hdfs-site", properties, propertiesAttributes, m_injector);
+ configFactory.createNew(cluster, "hdfs-site", "version-" + System.currentTimeMillis(),
+ properties, propertiesAttributes);
+
properties.put("hdfs_a", "value_3");
properties.put("hdfs_b", "value_4");
- Config c3 = new ConfigImpl(cluster, "core-site", properties, propertiesAttributes, m_injector);
- Config c4 = new ConfigImpl(cluster, "foo-site", properties, propertiesAttributes, m_injector);
-
- cluster.addConfig(c1);
- cluster.addConfig(c2);
- cluster.addConfig(c3);
- cluster.addConfig(c4);
- c1.persist();
- c2.persist();
- c3.persist();
- c4.persist();
+ configFactory.createNew(cluster, "core-site", "version-" + System.currentTimeMillis(),
+ properties, propertiesAttributes);
+
+ configFactory.createNew(cluster, "foo-site", "version-" + System.currentTimeMillis(),
+ properties, propertiesAttributes);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
index 80665a5..75853db 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigGroupTest.java
@@ -89,8 +89,7 @@ public class ConfigGroupTest {
Map<String, String> attributes = new HashMap<String, String>();
attributes.put("a", "true");
propertiesAttributes.put("final", attributes);
- Config config = configFactory.createNew(cluster, "hdfs-site", properties, propertiesAttributes);
- config.setTag("testversion");
+ Config config = configFactory.createNew(cluster, "hdfs-site", "testversion", properties, propertiesAttributes);
Host host = clusters.getHost("h1");
@@ -154,11 +153,8 @@ public class ConfigGroupTest {
Map<String, String> attributes = new HashMap<String, String>();
attributes.put("key1", "true");
propertiesAttributes.put("final", attributes);
- Config config = new ConfigImpl("test-site");
- config.setProperties(properties);
- config.setPropertiesAttributes(propertiesAttributes);
- config.setTag("version100");
+ Config config = configFactory.createNew(cluster, "test-site", "version100", properties, propertiesAttributes);
configGroup.addConfiguration(config);
Assert.assertEquals(2, configGroup.getConfigurations().values().size());
[14/14] ambari git commit: Merge branch 'branch-feature-AMBARI-18456'
into trunk
Posted by jo...@apache.org.
Merge branch 'branch-feature-AMBARI-18456' into trunk
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/97e3de68
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/97e3de68
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/97e3de68
Branch: refs/heads/trunk
Commit: 97e3de68f7f2d42a98802dde4bf91d154170f9f3
Parents: 96e69d5 88c2892
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Dec 7 16:49:43 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Dec 7 16:49:43 2016 -0500
----------------------------------------------------------------------
.../AmbariManagementControllerImpl.java | 13 +-
.../internal/ConfigGroupResourceProvider.java | 60 +-
.../serveraction/upgrades/ConfigureAction.java | 16 +-
.../serveraction/upgrades/FixLzoCodecPath.java | 16 +-
.../upgrades/FixOozieAdminUsers.java | 9 +-
.../upgrades/HBaseConfigCalculation.java | 14 +-
.../HBaseEnvMaxDirectMemorySizeAction.java | 13 +-
.../upgrades/HiveEnvClasspathAction.java | 13 +-
.../upgrades/HiveZKQuorumConfigAction.java | 2 +-
.../upgrades/OozieConfigCalculation.java | 13 +-
.../upgrades/RangerConfigCalculation.java | 4 +-
.../RangerKerberosConfigCalculation.java | 20 +-
.../upgrades/RangerKmsProxyConfig.java | 3 +-
.../upgrades/SparkShufflePropertyConfig.java | 3 +-
.../upgrades/YarnConfigCalculation.java | 2 +-
.../org/apache/ambari/server/state/Config.java | 22 +-
.../ambari/server/state/ConfigFactory.java | 20 +-
.../apache/ambari/server/state/ConfigImpl.java | 487 +++++++--------
.../server/state/cluster/ClusterImpl.java | 6 +-
.../server/state/configgroup/ConfigGroup.java | 33 +-
.../state/configgroup/ConfigGroupFactory.java | 34 +-
.../state/configgroup/ConfigGroupImpl.java | 613 +++++++++----------
.../ambari/server/topology/AmbariContext.java | 25 +-
.../ambari/server/update/HostUpdateHelper.java | 10 +-
.../ExecutionCommandWrapperTest.java | 17 +-
.../TestActionSchedulerThreading.java | 19 +-
.../server/agent/HeartbeatTestHelper.java | 6 +-
.../server/agent/TestHeartbeatMonitor.java | 13 +-
.../configuration/RecoveryConfigHelperTest.java | 2 +-
.../AmbariManagementControllerImplTest.java | 22 +-
.../AmbariManagementControllerTest.java | 109 +---
.../UpgradeResourceProviderHDP22Test.java | 14 +-
.../internal/UpgradeResourceProviderTest.java | 13 +-
.../ComponentVersionCheckActionTest.java | 19 +-
.../upgrades/ConfigureActionTest.java | 96 +--
.../upgrades/FixOozieAdminUsersTest.java | 76 +--
.../HBaseEnvMaxDirectMemorySizeActionTest.java | 187 +++---
.../upgrades/HiveEnvClasspathActionTest.java | 148 ++---
.../upgrades/HiveZKQuorumConfigActionTest.java | 2 +-
.../upgrades/KerberosKeytabsActionTest.java | 28 +-
.../upgrades/RangerConfigCalculationTest.java | 72 +--
.../RangerKerberosConfigCalculationTest.java | 173 ++----
.../upgrades/RangerKmsProxyConfigTest.java | 36 +-
.../SparkShufflePropertyConfigTest.java | 30 +-
.../upgrades/UpgradeActionTest.java | 28 +-
.../ambari/server/state/ConfigGroupTest.java | 26 +-
.../ambari/server/state/ConfigHelperTest.java | 49 +-
.../state/alerts/AlertReceivedListenerTest.java | 8 +-
.../state/cluster/ClusterDeadlockTest.java | 17 +-
.../server/state/cluster/ClusterTest.java | 133 +---
.../server/state/cluster/ClustersTest.java | 8 +-
...omponentHostConcurrentWriteDeadlockTest.java | 9 +-
.../ambari/server/state/host/HostTest.java | 6 +-
.../svccomphost/ServiceComponentHostTest.java | 24 +-
.../server/topology/AmbariContextTest.java | 38 +-
.../server/update/HostUpdateHelperTest.java | 40 +-
.../ambari/server/utils/StageUtilsTest.java | 4 +
57 files changed, 1197 insertions(+), 1726 deletions(-)
----------------------------------------------------------------------
[06/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/936626bd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/936626bd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/936626bd
Branch: refs/heads/trunk
Commit: 936626bdba424d3972704a68f98b34cc542f8a64
Parents: ab1c100 c9a4881
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Nov 21 11:03:42 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Nov 21 11:08:19 2016 -0500
----------------------------------------------------------------------
.../src/main/python/ambari_agent/AmbariAgent.py | 19 +-
.../ambari_agent/StatusCommandsExecutor.py | 4 +-
.../test/python/ambari_agent/TestAmbariAgent.py | 7 +-
.../timeline/AbstractTimelineMetricsSink.java | 5 +-
.../availability/MetricCollectorHAHelper.java | 6 +
.../availability/MetricCollectorHATest.java | 10 +-
.../apache/ambari/server/state/ConfigImpl.java | 13 +-
.../alerts/alert_segment_registration_status.py | 10 +-
.../package/scripts/hive_server_interactive.py | 22 +-
.../0.12.0.2.0/package/scripts/params_linux.py | 1 +
.../MAHOUT/1.0.0.2.3/metainfo.xml | 2 +-
.../OOZIE/4.0.0.2.0/metainfo.xml | 2 +-
.../OOZIE/4.2.0.2.3/metainfo.xml | 2 +-
.../common-services/PIG/0.12.0.2.0/metainfo.xml | 2 +-
.../common-services/TEZ/0.4.0.2.1/metainfo.xml | 2 +-
.../common-services/YARN/2.1.0.2.0/metainfo.xml | 2 +-
.../stacks/HDP/2.5/services/SPARK/kerberos.json | 60 +-
.../stacks/HDP/2.6/services/SPARK/kerberos.json | 62 +-
.../GRUMPY/configuration/grumpy-site.xml | 36 +
.../PERF/1.0/services/GRUMPY/metainfo.xml | 57 +
.../services/GRUMPY/package/scripts/dwarf.py | 38 +
.../GRUMPY/package/scripts/service_check.py | 30 +
.../PERF/1.0/services/GRUMPY/themes/theme.json | 65 +
.../stacks/PERF/1.0/services/HBASE/alerts.json | 127 +
.../services/HBASE/configuration/hbase-env.xml | 292 +
.../HBASE/configuration/hbase-log4j.xml | 146 +
.../configuration/hbase-logsearch-conf.xml | 111 +
.../HBASE/configuration/hbase-policy.xml | 53 +
.../services/HBASE/configuration/hbase-site.xml | 573 ++
.../HBASE/configuration/ranger-hbase-audit.xml | 122 +
.../ranger-hbase-policymgr-ssl.xml | 66 +
.../configuration/ranger-hbase-security.xml | 68 +
.../PERF/1.0/services/HBASE/kerberos.json | 159 +
.../stacks/PERF/1.0/services/HBASE/metainfo.xml | 196 +
.../stacks/PERF/1.0/services/HBASE/metrics.json | 9374 ++++++++++++++++++
.../HBASE/package/scripts/hbase_client.py | 38 +
.../HBASE/package/scripts/hbase_master.py | 41 +
.../HBASE/package/scripts/hbase_regionserver.py | 41 +
.../package/scripts/phoenix_queryserver.py | 38 +
.../HBASE/package/scripts/service_check.py | 30 +
.../services/HBASE/quicklinks/quicklinks.json | 97 +
.../PERF/1.0/services/HBASE/themes/theme.json | 411 +
.../stacks/PERF/1.0/services/HBASE/widgets.json | 510 +
.../stacks/PERF/1.0/services/HDFS/alerts.json | 1786 ++++
.../services/HDFS/configuration/core-site.xml | 225 +
.../services/HDFS/configuration/hadoop-env.xml | 419 +
.../hadoop-metrics2.properties.xml | 125 +
.../HDFS/configuration/hadoop-policy.xml | 130 +
.../services/HDFS/configuration/hdfs-log4j.xml | 225 +
.../HDFS/configuration/hdfs-logsearch-conf.xml | 248 +
.../services/HDFS/configuration/hdfs-site.xml | 633 ++
.../HDFS/configuration/ranger-hdfs-audit.xml | 124 +
.../ranger-hdfs-plugin-properties.xml | 88 +
.../configuration/ranger-hdfs-policymgr-ssl.xml | 67 +
.../HDFS/configuration/ranger-hdfs-security.xml | 65 +
.../services/HDFS/configuration/ssl-client.xml | 70 +
.../services/HDFS/configuration/ssl-server.xml | 80 +
.../stacks/PERF/1.0/services/HDFS/kerberos.json | 246 +
.../stacks/PERF/1.0/services/HDFS/metainfo.xml | 265 +
.../stacks/PERF/1.0/services/HDFS/metrics.json | 7905 +++++++++++++++
.../package/alerts/alert_checkpoint_time.py | 69 +
.../alerts/alert_datanode_unmounted_data_dir.py | 74 +
.../package/alerts/alert_ha_namenode_health.py | 75 +
.../package/alerts/alert_metrics_deviation.py | 85 +
.../package/alerts/alert_upgrade_finalized.py | 74 +
.../services/HDFS/package/scripts/datanode.py | 38 +
.../HDFS/package/scripts/hdfs_client.py | 38 +
.../HDFS/package/scripts/journalnode.py | 38 +
.../services/HDFS/package/scripts/namenode.py | 54 +
.../services/HDFS/package/scripts/nfsgateway.py | 38 +
.../HDFS/package/scripts/service_check.py | 30 +
.../services/HDFS/package/scripts/snamenode.py | 38 +
.../services/HDFS/package/scripts/zkfc_slave.py | 38 +
.../services/HDFS/quicklinks/quicklinks.json | 76 +
.../PERF/1.0/services/HDFS/themes/theme.json | 179 +
.../stacks/PERF/1.0/services/HDFS/widgets.json | 649 ++
.../SLEEPY/configuration/sleepy-site.xml | 36 +
.../PERF/1.0/services/SLEEPY/metainfo.xml | 57 +
.../services/SLEEPY/package/scripts/dwarf.py | 38 +
.../SLEEPY/package/scripts/service_check.py | 30 +
.../PERF/1.0/services/SLEEPY/themes/theme.json | 65 +
.../PERF/1.0/services/YARN/YARN_metrics.json | 3486 +++++++
.../PERF/1.0/services/YARN/YARN_widgets.json | 611 ++
.../stacks/PERF/1.0/services/YARN/alerts.json | 392 +
.../YARN/configuration-mapred/mapred-env.xml | 50 +
.../YARN/configuration-mapred/mapred-site.xml | 134 +
.../YARN/configuration/capacity-scheduler.xml | 69 +
.../YARN/configuration/ranger-yarn-audit.xml | 121 +
.../ranger-yarn-plugin-properties.xml | 82 +
.../configuration/ranger-yarn-policymgr-ssl.xml | 66 +
.../YARN/configuration/ranger-yarn-security.xml | 58 +
.../services/YARN/configuration/yarn-env.xml | 201 +
.../services/YARN/configuration/yarn-log4j.xml | 103 +
.../services/YARN/configuration/yarn-site.xml | 796 ++
.../stacks/PERF/1.0/services/YARN/kerberos.json | 279 +
.../stacks/PERF/1.0/services/YARN/metainfo.xml | 352 +
.../package/alerts/alert_nodemanager_health.py | 67 +
.../alerts/alert_nodemanagers_summary.py | 68 +
.../scripts/application_timeline_server.py | 38 +
.../YARN/package/scripts/historyserver.py | 38 +
.../package/scripts/mapred_service_check.py | 30 +
.../YARN/package/scripts/mapreduce2_client.py | 38 +
.../YARN/package/scripts/nodemanager.py | 38 +
.../YARN/package/scripts/resourcemanager.py | 44 +
.../YARN/package/scripts/service_check.py | 30 +
.../YARN/package/scripts/yarn_client.py | 38 +
.../YARN/quicklinks-mapred/quicklinks.json | 76 +
.../services/YARN/quicklinks/quicklinks.json | 76 +
.../1.0/services/YARN/themes-mapred/theme.json | 132 +
.../PERF/1.0/services/YARN/themes/theme.json | 250 +
.../PERF/1.0/services/ZOOKEEPER/metainfo.xml | 50 +
.../ZOOKEEPER/package/scripts/service_check.py | 30 +
.../package/scripts/zookeeper_client.py | 38 +
.../package/scripts/zookeeper_server.py | 38 +
.../svccomphost/ServiceComponentHostTest.java | 30 +-
.../test_alert_segment_registration_status.py | 6 +-
.../stacks/2.5/HIVE/test_hive_server_int.py | 112 +
.../test/python/stacks/2.5/configs/hsi_ha.json | 1254 +++
.../main/admin/service_auto_start.js | 6 +-
.../controllers/main/service/add_controller.js | 17 +-
ambari-web/app/controllers/wizard.js | 45 +-
.../app/controllers/wizard/step6_controller.js | 41 +
ambari-web/app/mappers/stack_service_mapper.js | 2 +-
.../configs/config_recommendation_parser.js | 20 +-
.../common/configs/config_recommendations.js | 2 +-
.../mixins/common/configs/enhanced_configs.js | 30 +-
ambari-web/app/styles/alerts.less | 55 +-
ambari-web/app/styles/application.less | 2531 +----
ambari-web/app/styles/bootstrap_overrides.less | 294 +
ambari-web/app/styles/common.less | 17 -
ambari-web/app/styles/dashboard.less | 350 +
ambari-web/app/styles/hosts.less | 494 +
ambari-web/app/styles/modal_popups.less | 190 +-
ambari-web/app/styles/timepicker-overrides.less | 59 +
ambari-web/app/styles/top-nav.less | 212 +
ambari-web/app/styles/wizard.less | 611 ++
ambari-web/app/templates/application.hbs | 6 +-
.../templates/common/host_progress_popup.hbs | 1 +
.../common/modal_popups/select_groups_popup.hbs | 6 +-
ambari-web/app/templates/installer.hbs | 2 +-
ambari-web/app/templates/main.hbs | 4 +-
ambari-web/app/templates/main/admin.hbs | 8 +-
.../app/templates/main/admin/kerberos.hbs | 3 +-
.../templates/main/admin/serviceAccounts.hbs | 6 +-
.../templates/main/admin/service_auto_start.hbs | 102 +-
.../stack_upgrade/stack_upgrade_wizard.hbs | 2 +-
.../main/alerts/definition_details.hbs | 2 +-
ambari-web/app/templates/main/dashboard.hbs | 2 +-
.../main/dashboard/edit_widget_popup.hbs | 30 +-
.../edit_widget_popup_single_threshold.hbs | 27 +-
ambari-web/app/templates/main/host/configs.hbs | 4 +-
.../app/templates/main/host/host_alerts.hbs | 10 +-
ambari-web/app/templates/main/service.hbs | 2 +-
.../main/service/all_services_actions.hbs | 18 +-
.../public_option_disabled_window_body.hbs | 4 +-
.../wizard/step9/step9HostTasksLogPopup.hbs | 1 +
ambari-web/app/utils/ajax/ajax.js | 2 +-
ambari-web/app/utils/host_progress_popup.js | 6 +-
.../configs/service_configs_by_category_view.js | 2 +-
ambari-web/app/views/common/controls_view.js | 11 +-
.../views/common/helpers/status_icon_view.js | 2 +-
ambari-web/app/views/common/table_view.js | 6 +-
.../views/main/admin/stack_and_upgrade_view.js | 4 +-
.../views/wizard/step9/hostLogPopupBody_view.js | 2 +-
.../main/service/add_controller_test.js | 81 +
.../test/controllers/wizard/step6_test.js | 85 +
.../wizard/step9/hostLogPopupBody_view_test.js | 2 +-
ambari-web/vendor/styles/bootstrap.css | 34 +-
.../resources/ui/hive-web/app/routes/splash.js | 56 +-
.../ambari/view/zeppelin/ZeppelinServlet.java | 103 +-
.../src/main/resources/WEB-INF/index.jsp | 63 -
docs/pom.xml | 2 +-
172 files changed, 39665 insertions(+), 2803 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/936626bd/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
index 052ee28,28bcd5f..0a861d8
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
@@@ -51,54 -50,35 +51,59 @@@ public class ConfigImpl implements Conf
*/
private final static Logger LOG = LoggerFactory.getLogger(ConfigImpl.class);
++ /**
++ * A label for {@link #hostLock} to use with the {@link LockFactory}.
++ */
++ private static final String PROPERTY_LOCK_LABEL = "configurationPropertyLock";
++
public static final String GENERATED_TAG_PREFIX = "generatedTag_";
- private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
+ private final long configId;
+ private final Cluster cluster;
+ private final StackId stackId;
+ private final String type;
+ private final String tag;
+ private final Long version;
- private Cluster cluster;
- private StackId stackId;
- private String type;
- private volatile String tag;
- private volatile Long version;
- private volatile Map<String, String> properties;
- private volatile Map<String, Map<String, String>> propertiesAttributes;
- private ClusterConfigEntity entity;
- private volatile Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes;
+ /**
+ * The properties of this configuration. This cannot be a
+ * {@link ConcurrentMap} since we allow null values. Therefore, it must be
+ * synchronized externally.
+ */
+ private Map<String, String> properties;
- @Inject
- private ClusterDAO clusterDAO;
+ /**
+ * A lock for reading/writing of {@link #properties} concurrently.
+ *
+ * @see #properties
+ */
+ private final ReadWriteLock propertyLock;
- @Inject
- private Gson gson;
+ /**
+ * The property attributes for this configuration.
+ */
+ private Map<String, Map<String, String>> propertiesAttributes;
+
+ private Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes;
+
+ private final ClusterDAO clusterDAO;
+
+ private final Gson gson;
@Inject
private ServiceConfigDAO serviceConfigDAO;
- @Inject
- private AmbariEventPublisher eventPublisher;
+ private final AmbariEventPublisher eventPublisher;
@AssistedInject
- public ConfigImpl(@Assisted Cluster cluster, @Assisted String type, @Assisted Map<String, String> properties,
- @Assisted Map<String, Map<String, String>> propertiesAttributes, Injector injector) {
+ ConfigImpl(@Assisted Cluster cluster, @Assisted("type") String type,
+ @Assisted("tag") @Nullable String tag,
+ @Assisted Map<String, String> properties,
+ @Assisted @Nullable Map<String, Map<String, String>> propertiesAttributes, ClusterDAO clusterDAO,
+ Gson gson, AmbariEventPublisher eventPublisher, LockFactory lockFactory) {
+
- propertyLock = lockFactory.newReadWriteLock("configurationPropertyLock");
++ propertyLock = lockFactory.newReadWriteLock(PROPERTY_LOCK_LABEL);
+
this.cluster = cluster;
this.type = type;
this.properties = properties;
@@@ -135,22 -87,15 +140,24 @@@
// when creating a brand new config without a backing entity, use the
// cluster's desired stack as the config's stack
stackId = cluster.getDesiredStackVersion();
-
- injector.injectMembers(this);
propertiesTypes = cluster.getConfigPropertiesTypes(type);
- }
+ persist(entity);
+ configId = entity.getConfigId();
+ }
@AssistedInject
- public ConfigImpl(@Assisted Cluster cluster, @Assisted ClusterConfigEntity entity, Injector injector) {
+ ConfigImpl(@Assisted Cluster cluster, @Assisted ClusterConfigEntity entity,
+ ClusterDAO clusterDAO, Gson gson, AmbariEventPublisher eventPublisher,
+ LockFactory lockFactory) {
++ propertyLock = lockFactory.newReadWriteLock(PROPERTY_LOCK_LABEL);
++
this.cluster = cluster;
+ this.clusterDAO = clusterDAO;
+ this.gson = gson;
+ this.eventPublisher = eventPublisher;
+ configId = entity.getConfigId();
+
type = entity.getType();
tag = entity.getTag();
version = entity.getVersion();
@@@ -190,37 -109,10 +197,39 @@@
}
/**
- * Constructor for clients not using factory.
+ * Constructor. This will create an instance suitable only for
+ * representation/serialization as it is incomplete.
+ *
+ * @param type
+ * @param tag
+ * @param properties
+ * @param propertiesAttributes
+ * @param clusterDAO
+ * @param gson
+ * @param eventPublisher
*/
- public ConfigImpl(String type) {
+ @AssistedInject
+ ConfigImpl(@Assisted("type") String type,
+ @Assisted("tag") @Nullable String tag,
+ @Assisted Map<String, String> properties,
+ @Assisted @Nullable Map<String, Map<String, String>> propertiesAttributes, ClusterDAO clusterDAO,
- Gson gson, AmbariEventPublisher eventPublisher) {
++ Gson gson, AmbariEventPublisher eventPublisher, LockFactory lockFactory) {
++
++ propertyLock = lockFactory.newReadWriteLock(PROPERTY_LOCK_LABEL);
+
+ this.tag = tag;
this.type = type;
+ this.properties = new HashMap<>(properties);
+ this.propertiesAttributes = null == propertiesAttributes ? null
+ : new HashMap<>(propertiesAttributes);
+ this.clusterDAO = clusterDAO;
+ this.gson = gson;
+ this.eventPublisher = eventPublisher;
+
+ cluster = null;
+ configId = 0;
+ version = 0L;
+ stackId = null;
}
/**
http://git-wip-us.apache.org/repos/asf/ambari/blob/936626bd/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
[09/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/087de8b7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/087de8b7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/087de8b7
Branch: refs/heads/trunk
Commit: 087de8b789d157aa8f4fd12963b0262ea6c40461
Parents: 276d124 6100be6
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Dec 1 10:57:35 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Dec 1 10:57:35 2016 -0500
----------------------------------------------------------------------
.../src/main/python/ambari_agent/ActionQueue.py | 10 +-
.../ambari_agent/CustomServiceOrchestrator.py | 4 +-
.../simulate_perf_cluster_alert_behaviour.py | 108 ++
.../libraries/script/script.py | 15 +-
.../ambari/logfeeder/mapper/MapperDateTest.java | 56 -
.../ambari/logsearch/dao/SolrCollectionDao.java | 64 +-
ambari-server/conf/unix/ambari.properties | 4 +-
ambari-server/conf/windows/ambari.properties | 2 +
ambari-server/docs/configuration/index.md | 33 +-
.../ambari/server/agent/HeartBeatHandler.java | 2 +-
.../ambari/server/agent/HeartbeatProcessor.java | 2 +-
.../server/checks/ServicePresenceCheck.java | 55 +-
.../server/configuration/Configuration.java | 53 +
.../security/AbstractSecurityHeaderFilter.java | 14 +
.../AmbariServerSecurityHeaderFilter.java | 1 +
.../AmbariViewsSecurityHeaderFilter.java | 1 +
.../kerberos/MITKerberosOperationHandler.java | 22 +-
.../ambari/server/state/quicklinks/Link.java | 21 +
.../services/RetryUpgradeActionService.java | 2 +-
.../server/upgrade/UpgradeCatalog250.java | 52 +
.../ambari/server/utils/ShellCommandUtil.java | 11 +-
ambari-server/src/main/python/ambari-server.py | 12 +-
.../main/python/ambari_server/serverUtils.py | 11 +
.../src/main/python/ambari_server/utils.py | 60 +-
.../src/main/python/ambari_server_main.py | 70 +-
.../package/scripts/metadata_server.py | 18 +
.../RANGER/0.4.0/package/scripts/params.py | 14 +
.../0.4.0/package/scripts/setup_ranger_xml.py | 21 +
.../common-services/RANGER/0.6.0/metainfo.xml | 2 +
.../custom_actions/scripts/install_packages.py | 2 +-
.../src/main/resources/scripts/stack_advisor.py | 11 +-
.../HDP/2.0.6/configuration/cluster-env.xml | 10 +
.../stacks/HDP/2.0.6/services/stack_advisor.py | 47 +-
.../stacks/HDP/2.1/services/stack_advisor.py | 20 +-
.../stacks/HDP/2.2/services/stack_advisor.py | 7 +
.../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 5 +-
.../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 5 +-
.../stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 5 +-
.../stacks/HDP/2.3/upgrades/upgrade-2.6.xml | 5 +-
.../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 5 +-
.../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 5 +-
.../stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 5 +-
.../stacks/HDP/2.4/upgrades/upgrade-2.6.xml | 5 +-
.../stacks/HDP/2.5/services/KNOX/metainfo.xml | 4 -
.../stacks/HDP/2.5/services/OOZIE/metainfo.xml | 4 +
.../stacks/HDP/2.5/upgrades/config-upgrade.xml | 12 +
.../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 4 +
.../stacks/HDP/2.5/upgrades/upgrade-2.6.xml | 3 +
.../configuration/hive-interactive-site.xml | 58 +
.../HIVE/configuration/tez-interactive-site.xml | 78 +
.../stacks/PERF/1.0/services/HAPPY/alerts.json | 20 +
.../HAPPY/configuration/happy-alert-config.xml | 75 +
.../stacks/PERF/1.0/services/HAPPY/metainfo.xml | 5 +
.../HAPPY/package/alerts/alert_happy_process.py | 59 +
.../stacks/PERF/1.0/services/HBASE/alerts.json | 110 +-
.../HBASE/configuration/hbase-alert-config.xml | 75 +
.../stacks/PERF/1.0/services/HBASE/metainfo.xml | 1 +
.../package/alerts/hbase_master_process.py | 59 +
.../alerts/hbase_regionserver_process.py | 59 +
.../stacks/PERF/1.0/services/HDFS/alerts.json | 1728 +-----------------
.../HDFS/configuration/hdfs-alert-config.xml | 75 +
.../stacks/PERF/1.0/services/HDFS/metainfo.xml | 1 +
.../package/alerts/alert_checkpoint_time.py | 38 +-
.../alerts/alert_datanode_unmounted_data_dir.py | 47 +-
.../package/alerts/alert_ha_namenode_health.py | 75 -
.../package/alerts/alert_metrics_deviation.py | 85 -
.../package/alerts/alert_nfs_gateway_process.py | 59 +
.../package/alerts/alert_snamenode_process.py | 59 +
.../package/alerts/alert_upgrade_finalized.py | 49 +-
.../stacks/PERF/1.0/services/SLEEPY/alerts.json | 20 +
.../configuration/sleepy-alert-config.xml | 75 +
.../PERF/1.0/services/SLEEPY/metainfo.xml | 5 +
.../package/alerts/alert_sleepy_process.py | 59 +
.../stacks/PERF/1.0/services/SNOW/alerts.json | 20 +
.../SNOW/configuration/snow-alert-config.xml | 75 +
.../stacks/PERF/1.0/services/SNOW/metainfo.xml | 5 +
.../SNOW/package/alerts/alert_snow_process.py | 59 +
.../stacks/PERF/1.0/services/YARN/alerts.json | 361 +---
.../YARN/configuration/yarn-alert-config.xml | 75 +
.../stacks/PERF/1.0/services/YARN/metainfo.xml | 3 +
.../package/alerts/alert_history_process.py | 59 +
.../package/alerts/alert_nodemanager_health.py | 36 +-
.../alerts/alert_nodemanagers_summary.py | 68 -
.../alerts/alert_resourcemanager_process.py | 59 +
.../package/alerts/alert_timeline_process.py | 59 +
.../PERF/1.0/services/ZOOKEEPER/alerts.json | 20 +
.../ZOOKEEPER/configuration/zk-alert-config.xml | 75 +
.../PERF/1.0/services/ZOOKEEPER/metainfo.xml | 4 +
.../package/alerts/alert_zk_server_process.py | 59 +
.../src/main/resources/stacks/stack_advisor.py | 209 ++-
.../AbstractSecurityHeaderFilterTest.java | 29 +-
.../MITKerberosOperationHandlerTest.java | 23 +
.../QuickLinksConfigurationModuleTest.java | 36 +
.../server/upgrade/UpgradeCatalog250Test.java | 5 +
.../server/utils/TestShellCommandUtil.java | 13 +-
.../src/test/python/TestAmbariServer.py | 6 +-
ambari-server/src/test/python/TestUtils.py | 37 +-
.../stacks/2.0.6/common/test_stack_advisor.py | 16 +-
.../stacks/2.1/common/test_stack_advisor.py | 2 +
.../stacks/2.2/common/test_stack_advisor.py | 17 +-
.../stacks/2.3/ATLAS/test_metadata_server.py | 5 +-
.../test/python/stacks/test_stack_adviser.py | 239 +++
.../child_quicklinks_with_properties.json | 64 +
.../parent_quicklinks_with_properties.json | 65 +
.../admin/stack_upgrade_history_controller.js | 2 +-
.../controllers/main/service/info/configs.js | 2 +-
ambari-web/app/controllers/main/service/item.js | 49 +-
.../app/controllers/wizard/step6_controller.js | 58 +-
.../app/controllers/wizard/step7_controller.js | 2 +-
.../app/mappers/stack_upgrade_history_mapper.js | 2 +-
ambari-web/app/messages.js | 13 +-
ambari-web/app/mixins.js | 1 -
.../app/mixins/main/service/groups_mapping.js | 2 +-
.../stack_version/stack_upgrade_history.js | 2 +-
ambari-web/app/styles/stack_versions.less | 16 +-
.../highAvailability/journalNode/step2.hbs | 2 +
.../templates/main/admin/service_auto_start.hbs | 24 +-
.../info/delete_service_warning_popup.hbs | 28 +
ambari-web/app/templates/wizard/step1.hbs | 2 +-
.../app/utils/configs/config_initializer.js | 28 +-
.../mount_points_based_initializer_mixin.js | 340 ----
ambari-web/app/views/main/admin.js | 3 +-
.../app/views/main/admin/service_auto_start.js | 5 +
.../service_auto_start/component_auto_start.js | 1 +
.../upgrade_history_details_view.js | 2 +-
.../admin/stack_upgrade/upgrade_history_view.js | 120 +-
ambari-web/app/views/wizard/step1_view.js | 7 +
.../test/controllers/main/service/item_test.js | 13 +-
.../test/controllers/wizard/step6_test.js | 6 +-
ambari-web/test/utils/ajax/ajax_test.js | 9 +-
.../utils/configs/config_initializer_test.js | 457 -----
.../src/main/resources/ui/app/app.js | 14 +-
.../src/main/resources/ui/pig-web/app/app.js | 14 +-
.../resources/ui/app/components/job-details.js | 3 +
134 files changed, 3183 insertions(+), 3725 deletions(-)
----------------------------------------------------------------------
[07/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f804d13
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f804d13
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f804d13
Branch: refs/heads/trunk
Commit: 1f804d1392fc076542b9905766108d658258bcb4
Parents: 936626b ece1de3
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Nov 28 08:37:22 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Nov 28 08:37:22 2016 -0500
----------------------------------------------------------------------
ambari-agent/conf/unix/agent-multiplier.py | 14 +-
.../ambari_agent/CustomServiceOrchestrator.py | 17 +-
.../TestCustomServiceOrchestrator.py | 25 +-
.../ambari-metrics/datasource.js | 6 +-
ambari-server/pom.xml | 2 +
ambari-server/sbin/ambari-server | 6 +-
.../server/actionmanager/ActionScheduler.java | 2 +-
.../checks/DatabaseConsistencyCheckHelper.java | 10 +-
.../server/configuration/Configuration.java | 23 +-
.../ambari/server/events/AmbariEvent.java | 5 +
.../events/ClusterConfigFinishedEvent.java | 53 +
.../AmbariLdapAuthenticationProvider.java | 23 +-
.../AmbariLdapBindAuthenticator.java | 233 +-
.../apache/ambari/server/stack/StackModule.java | 101 +-
.../ambari/server/state/stack/UpgradePack.java | 12 +-
.../ambari/server/topology/TopologyManager.java | 16 +-
.../ambari/server/view/ViewExtractor.java | 2 +-
.../apache/ambari/server/view/ViewRegistry.java | 381 +--
ambari-server/src/main/python/ambari-server.py | 137 +-
.../python/ambari_server/serverConfiguration.py | 9 +-
.../main/python/ambari_server/setupActions.py | 1 +
.../main/python/ambari_server/setupMpacks.py | 167 +-
.../0.1.0/configuration/storm-site.xml | 84 -
.../AMBARI_METRICS/0.1.0/metainfo.xml | 4 -
.../FLUME/1.4.0.2.0/metrics.json | 62 -
.../HIVE/0.12.0.2.0/package/scripts/hive.py | 497 ++--
.../package/scripts/hive_server_interactive.py | 4 +-
.../0.12.0.2.0/package/scripts/params_linux.py | 1 +
.../0.4.0/package/scripts/setup_ranger_xml.py | 18 +
.../0.4.0/package/templates/ranger_admin_pam.j2 | 22 +
.../package/templates/ranger_remote_pam.j2 | 22 +
.../0.5.0/configuration/ranger-admin-site.xml | 2 +-
.../STORM/1.0.1/configuration/storm-site.xml | 54 +
.../src/main/resources/scripts/stack_advisor.py | 11 +-
.../HDP/2.0.6/configuration/cluster-env.xml | 10 -
.../stacks/HDP/2.0.6/services/stack_advisor.py | 50 +-
.../stacks/HDP/2.1/services/stack_advisor.py | 20 +-
.../stacks/HDP/2.2/services/stack_advisor.py | 7 -
.../stacks/HDP/2.2/upgrades/upgrade-2.3.xml | 1042 -------
.../stacks/HDP/2.2/upgrades/upgrade-2.4.xml | 1050 -------
.../stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 4 +-
.../stacks/HDP/2.3/upgrades/upgrade-2.6.xml | 4 +-
.../stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 4 +-
.../stacks/HDP/2.4/upgrades/upgrade-2.6.xml | 4 +-
.../HIVE/configuration/hive-interactive-env.xml | 10 +
.../HIVE/configuration/hivemetastore-site.xml | 8 +
.../hiveserver2-interactive-site.xml | 8 +
.../HIVE/configuration/hiveserver2-site.xml | 8 +
.../stacks/HDP/2.5/services/stack_advisor.py | 52 +-
.../services/ATLAS/themes/theme_version_2.json | 28 +-
.../stacks/HDP/2.6/services/stack_advisor.py | 2 +-
.../PERF/1.0/configuration/cluster-env.xml | 6 +-
.../GRUMPY/configuration/grumpy-site.xml | 2 +-
.../services/HAPPY/configuration/happy-site.xml | 2 +-
.../services/HBASE/configuration/hbase-env.xml | 30 +-
.../HBASE/configuration/hbase-log4j.xml | 2 +-
.../configuration/hbase-logsearch-conf.xml | 6 +-
.../HBASE/configuration/hbase-policy.xml | 6 +-
.../services/HBASE/configuration/hbase-site.xml | 74 +-
.../services/HDFS/configuration/core-site.xml | 32 +-
.../services/HDFS/configuration/hadoop-env.xml | 44 +-
.../hadoop-metrics2.properties.xml | 2 +-
.../HDFS/configuration/hadoop-policy.xml | 22 +-
.../services/HDFS/configuration/hdfs-log4j.xml | 2 +-
.../HDFS/configuration/hdfs-logsearch-conf.xml | 6 +-
.../services/HDFS/configuration/hdfs-site.xml | 108 +-
.../services/HDFS/configuration/ssl-client.xml | 14 +-
.../services/HDFS/configuration/ssl-server.xml | 16 +-
.../SLEEPY/configuration/sleepy-site.xml | 2 +-
.../services/SNOW/configuration/snow-site.xml | 2 +-
.../YARN/configuration-mapred/mapred-env.xml | 2 +-
.../YARN/configuration-mapred/mapred-site.xml | 28 +-
.../services/YARN/configuration/yarn-env.xml | 8 +-
.../services/YARN/configuration/yarn-log4j.xml | 2 +-
.../services/YARN/configuration/yarn-site.xml | 150 +-
.../src/main/resources/stacks/stack_advisor.py | 200 +-
.../src/main/resources/upgrade-pack.xsd | 10 +-
.../StackDefinedPropertyProviderTest.java | 2 +
.../RestMetricsPropertyProviderTest.java | 2 +
.../AmbariLdapBindAuthenticatorTest.java | 226 +-
.../server/state/stack/UpgradePackTest.java | 22 +
.../ClusterDeployWithStartOnlyTest.java | 2 +-
...InstallWithoutStartOnComponentLevelTest.java | 2 +-
.../ClusterInstallWithoutStartTest.java | 2 +-
.../ambari/server/view/ViewRegistryTest.java | 2 +
ambari-server/src/test/python/TestMpacks.py | 223 +-
.../grafana-dashboards/grafana-hdfs-users.json | 270 ++
.../dashboards/service-metrics/STORM.txt | 7 +
.../stacks/2.0.6/HIVE/test_hive_metastore.py | 147 +-
.../stacks/2.0.6/HIVE/test_hive_server.py | 311 +--
.../stacks/2.0.6/common/test_stack_advisor.py | 34 +-
.../stacks/2.1/HIVE/test_hive_metastore.py | 170 +-
.../stacks/2.1/common/test_stack_advisor.py | 5 -
.../stacks/2.2/common/test_stack_advisor.py | 20 +-
.../stacks/2.5/common/test_stack_advisor.py | 79 +-
.../stacks/2.6/common/test_stack_advisor.py | 4 +-
.../test/python/stacks/test_stack_adviser.py | 239 --
.../python/uninstall/common-services/SERVICEA | 1 +
.../python/uninstall/common-services/SERVICEB | 1 +
.../test/python/uninstall/dashboards/SERVICEA | 1 +
.../test/python/uninstall/dashboards/SERVICEB | 1 +
.../uninstall/dashboards/files/README.txt | 17 +
.../python/uninstall/dashboards/files/STORM.txt | 1 +
.../uninstall/dashboards/files/metainfo.xml | 1 +
.../test/python/uninstall/extensions/SERVICEA | 1 +
.../test/python/uninstall/extensions/SERVICEB | 1 +
.../test/python/uninstall/stacks/2.0/SERVICEA | 1 +
.../test/python/uninstall/stacks/2.0/SERVICEB | 1 +
.../uninstall/stacks/2.0/files/README.txt | 17 +
.../uninstall/stacks/2.0/files/metainfo1.xml | 1 +
.../uninstall/stacks/2.0/files/metainfo2.xml | 1 +
.../GANGLIA/upgrades/HDP/rolling-upgrade.xml | 41 +
ambari-web/app/assets/test/tests.js | 9 +-
.../hawq/activateStandby/step2_controller.js | 2 +-
.../hawq/addStandby/step3_controller.js | 3 +-
.../journalNode/step2_controller.js | 3 +-
.../nameNode/step3_controller.js | 3 +-
.../resourceManager/step3_controller.js | 2 +-
.../main/admin/service_auto_start.js | 330 ++-
.../add_alert_definition/step1_controller.js | 41 +-
.../alerts/definition_configs_controller.js | 40 +-
.../app/controllers/main/charts/heatmap.js | 6 +-
ambari-web/app/controllers/main/host/details.js | 2 +-
.../app/controllers/wizard/step7_controller.js | 2 +-
.../app/controllers/wizard/step8_controller.js | 2 +-
ambari-web/app/data/HDP2.2/hive_properties.js | 119 -
ambari-web/app/data/HDP2.2/site_properties.js | 275 --
ambari-web/app/data/HDP2.2/tez_properties.js | 43 -
ambari-web/app/data/HDP2.2/yarn_properties.js | 54 -
.../HDP2.3/hawq_activate_standby_properties.js | 43 -
.../app/data/HDP2.3/hawq_ha_properties.js | 43 -
ambari-web/app/data/HDP2.3/site_properties.js | 370 ---
ambari-web/app/data/HDP2/alert_notification.js | 145 -
.../app/data/HDP2/gluster_fs_properties.js | 94 -
ambari-web/app/data/HDP2/ha_properties.js | 538 ----
.../data/HDP2/kerberos_descriptor_properties.js | 35 -
ambari-web/app/data/HDP2/kerberos_identities.js | 129 -
.../data/HDP2/move_journal_node_properties.js | 42 -
ambari-web/app/data/HDP2/rm_ha_properties.js | 206 --
ambari-web/app/data/HDP2/secure_mapping.js | 1004 -------
ambari-web/app/data/HDP2/site_properties.js | 2579 ------------------
ambari-web/app/data/HDP2/ui_properties.js | 49 -
.../app/data/configs/alert_notification.js | 145 +
.../configs/services/accumulo_properties.js | 116 +
.../configs/services/ambari_infra_properties.js | 167 ++
.../services/ambari_metrics_properties.js | 188 ++
.../data/configs/services/falcon_properties.js | 302 ++
.../data/configs/services/flume_properties.js | 38 +
.../configs/services/glusterfs_properties.js | 93 +
.../data/configs/services/hawq_properties.js | 110 +
.../data/configs/services/hbase_properties.js | 124 +
.../data/configs/services/hdfs_properties.js | 164 ++
.../data/configs/services/hive_properties.js | 590 ++++
.../data/configs/services/kafka_properties.js | 66 +
.../configs/services/kerberos_properties.js | 204 ++
.../data/configs/services/knox_properties.js | 32 +
.../configs/services/logsearch_properties.js | 447 +++
.../configs/services/mapreduce2_properties.js | 45 +
.../data/configs/services/oozie_properties.js | 93 +
.../data/configs/services/ranger_properties.js | 327 +++
.../data/configs/services/storm_properties.js | 296 ++
.../app/data/configs/services/tez_properties.js | 92 +
.../data/configs/services/yarn_properties.js | 300 ++
.../configs/services/zookeeper_properties.js | 67 +
ambari-web/app/data/configs/site_properties.js | 55 +
ambari-web/app/data/configs/ui_properties.js | 49 +
.../app/data/configs/wizards/ha_properties.js | 538 ++++
.../wizards/hawq_activate_standby_properties.js | 43 +
.../data/configs/wizards/hawq_ha_properties.js | 43 +
.../wizards/kerberos_descriptor_properties.js | 35 +
.../data/configs/wizards/kerberos_identities.js | 129 +
.../wizards/move_journal_node_properties.js | 42 +
.../data/configs/wizards/rm_ha_properties.js | 206 ++
.../app/data/configs/wizards/secure_mapping.js | 1004 +++++++
ambari-web/app/data/custom_stack_map.js | 2 -
.../configs/stack_config_properties_mapper.js | 2 +-
ambari-web/app/messages.js | 19 +-
ambari-web/app/mixins.js | 1 +
.../app/mixins/wizard/addSecurityConfigs.js | 2 +-
.../app/models/alerts/alert_definition.js | 54 +
ambari-web/app/models/service/flume.js | 8 +
ambari-web/app/models/stack_service.js | 13 +-
ambari-web/app/styles/alerts.less | 135 +-
ambari-web/app/styles/application.less | 448 +--
.../app/styles/enhanced_service_dashboard.less | 2 +
ambari-web/app/styles/modal_popups.less | 67 +-
ambari-web/app/styles/stack_versions.less | 6 +-
.../app/styles/theme/bootstrap-ambari.css | 25 +-
ambari-web/app/styles/wizard.less | 23 +-
.../common/assign_master_components.hbs | 62 +-
.../templates/common/configs/overrideWindow.hbs | 2 +-
.../common/form/check_db_connection.hbs | 26 +-
.../templates/common/host_progress_popup.hbs | 317 ++-
.../common/modal_popups/alerts_popup.hbs | 80 +-
.../modal_popups/dependent_configs_list.hbs | 2 +-
ambari-web/app/templates/experimental.hbs | 2 +-
.../templates/main/admin/service_auto_start.hbs | 76 +-
.../service_auto_start/component_auto_start.hbs | 2 +-
.../admin/stack_upgrade/edit_repositories.hbs | 52 +-
.../stack_upgrade/stack_upgrade_wizard.hbs | 4 +-
.../stack_upgrade/upgrade_history_details.hbs | 4 +-
.../main/admin/stack_upgrade/upgrade_task.hbs | 4 +-
.../main/alerts/add_alert_definition/step1.hbs | 30 +-
.../alerts/add_definition_to_group_popup.hbs | 2 +-
.../main/charts/heatmap/heatmap_host_detail.hbs | 70 +-
.../main/charts/heatmap/heatmap_rack.hbs | 37 +-
.../templates/main/charts/heatmap_dropdown.hbs | 4 +-
.../main/host/bulk_operation_confirm_popup.hbs | 2 +-
.../main/host/details/deleteComponentPopup.hbs | 20 +-
.../main/host/details/doDeleteHostPopup.hbs | 35 +-
.../details/raiseDeleteComponentErrorPopup.hbs | 21 +-
.../main/service/info/heatmap_dropdown.hbs | 2 +-
.../app/templates/main/service/info/summary.hbs | 2 +-
.../templates/main/service/services/flume.hbs | 196 +-
ambari-web/app/templates/wizard/step1.hbs | 2 +-
ambari-web/app/templates/wizard/step3.hbs | 2 +-
.../wizard/step3/step3_host_warnings_popup.hbs | 18 +-
.../wizard/step9/step9HostTasksLogPopup.hbs | 88 +-
ambari-web/app/utils/config.js | 25 +-
.../app/utils/configs/config_initializer.js | 28 +-
.../mount_points_based_initializer_mixin.js | 340 +++
ambari-web/app/utils/host_progress_popup.js | 5 +-
.../common/assign_master_components_view.js | 6 +-
ambari-web/app/views/common/controls_view.js | 5 +-
.../app/views/main/admin/service_auto_start.js | 55 +-
.../service_auto_start/component_auto_start.js | 8 +-
.../alerts/add_alert_definition/step1_view.js | 3 +
.../views/main/charts/heatmap/heatmap_rack.js | 23 +-
.../views/main/dashboard/widgets/hbase_links.js | 6 +-
ambari-web/app/views/main/host.js | 5 +-
.../views/main/service/reassign/step5_view.js | 3 +-
.../app/views/main/service/services/flume.js | 32 +-
ambari-web/app/views/wizard/step3_view.js | 2 +-
.../views/wizard/step9/hostLogPopupBody_view.js | 2 +
ambari-web/brunch-config.js | 1 +
ambari-web/copy-pluggable-stack-resources.sh | 2 +-
.../main/admin/service_auto_start_test.js | 326 +++
.../step1_controller_test.js | 33 +-
.../test/controllers/main/host/details_test.js | 25 +-
.../test/data/HDP2.2/site_properties_test.js | 76 -
.../test/data/HDP2.3/site_properties_test.js | 77 -
.../test/data/HDP2/secure_mapping_test.js | 36 -
.../test/data/HDP2/site_properties_test.js | 76 -
.../test/data/configs/site_properties_test.js | 76 +
.../data/configs/wizards/secure_mapping_test.js | 36 +
ambari-web/test/models/service/flume_test.js | 6 +
ambari-web/test/utils/ajax/ajax_test.js | 9 +-
ambari-web/test/utils/config_test.js | 46 +-
.../utils/configs/config_initializer_test.js | 458 ++++
.../component_auto_start_test.js | 64 +
.../views/main/admin/service_auto_start_test.js | 83 +
.../views/main/service/services/flume_test.js | 17 +-
ambari-web/vendor/scripts/jquery.typeahead.js | 300 ++
contrib/utils/perf/deploy-gce-perf-cluster.py | 230 +-
contrib/views/ambari-views-package/pom.xml | 123 +
.../src/main/package/deb/control/control | 22 +
contrib/views/capacity-scheduler/pom.xml | 18 +
contrib/views/files/pom.xml | 18 +
contrib/views/hawq/pom.xml | 18 +
contrib/views/hive-next/pom.xml | 18 +
contrib/views/hive/pom.xml | 18 +
contrib/views/hueambarimigration/pom.xml | 18 +
contrib/views/jobs/pom.xml | 23 +
contrib/views/pig/pom.xml | 18 +
contrib/views/pom.xml | 5 +
contrib/views/slider/pom.xml | 18 +
contrib/views/storm/pom.xml | 23 +
contrib/views/tez/pom.xml | 18 +
contrib/views/wfmanager/pom.xml | 18 +
contrib/views/zeppelin/pom.xml | 18 +
docs/pom.xml | 2 +-
docs/src/site/apt/index.apt | 2 +-
docs/src/site/apt/whats-new.apt | 4 +-
docs/src/site/site.xml | 2 +
274 files changed, 12584 insertions(+), 11650 deletions(-)
----------------------------------------------------------------------
[03/14] ambari git commit: AMBARI-18906 - Remove Unnecessary Locks
Inside Of Config Business Object Implementations (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-18906 - Remove Unnecessary Locks Inside Of Config Business Object Implementations (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a6639a7c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a6639a7c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a6639a7c
Branch: refs/heads/trunk
Commit: a6639a7c72043ff7bda03e6ba305913c7503193a
Parents: 5d7824e
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Nov 16 08:35:20 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Nov 17 11:20:55 2016 -0500
----------------------------------------------------------------------
.../AmbariManagementControllerImpl.java | 13 +-
.../internal/ConfigGroupResourceProvider.java | 13 +-
.../serveraction/upgrades/ConfigureAction.java | 16 +-
.../serveraction/upgrades/FixLzoCodecPath.java | 16 +-
.../upgrades/FixOozieAdminUsers.java | 9 +-
.../upgrades/HBaseConfigCalculation.java | 14 +-
.../HBaseEnvMaxDirectMemorySizeAction.java | 13 +-
.../upgrades/HiveEnvClasspathAction.java | 13 +-
.../upgrades/HiveZKQuorumConfigAction.java | 2 +-
.../upgrades/OozieConfigCalculation.java | 13 +-
.../upgrades/RangerConfigCalculation.java | 4 +-
.../RangerKerberosConfigCalculation.java | 20 +-
.../upgrades/SparkShufflePropertyConfig.java | 3 +-
.../upgrades/YarnConfigCalculation.java | 2 +-
.../org/apache/ambari/server/state/Config.java | 22 +-
.../ambari/server/state/ConfigFactory.java | 20 +-
.../apache/ambari/server/state/ConfigImpl.java | 474 +++++++++----------
.../state/configgroup/ConfigGroupImpl.java | 38 +-
.../ambari/server/topology/AmbariContext.java | 23 +-
.../ambari/server/update/HostUpdateHelper.java | 10 +-
.../ExecutionCommandWrapperTest.java | 17 +-
.../TestActionSchedulerThreading.java | 19 +-
.../server/agent/HeartbeatTestHelper.java | 6 +-
.../server/agent/TestHeartbeatMonitor.java | 13 +-
.../configuration/RecoveryConfigHelperTest.java | 2 +-
.../AmbariManagementControllerImplTest.java | 22 +-
.../AmbariManagementControllerTest.java | 107 ++---
.../UpgradeResourceProviderHDP22Test.java | 14 +-
.../internal/UpgradeResourceProviderTest.java | 15 +-
.../ComponentVersionCheckActionTest.java | 19 +-
.../upgrades/ConfigureActionTest.java | 96 +---
.../upgrades/FixOozieAdminUsersTest.java | 76 ++-
.../HBaseEnvMaxDirectMemorySizeActionTest.java | 187 ++++----
.../upgrades/HiveEnvClasspathActionTest.java | 148 +++---
.../upgrades/HiveZKQuorumConfigActionTest.java | 2 +-
.../upgrades/KerberosKeytabsActionTest.java | 28 +-
.../upgrades/RangerConfigCalculationTest.java | 72 +--
.../RangerKerberosConfigCalculationTest.java | 173 ++-----
.../SparkShufflePropertyConfigTest.java | 30 +-
.../upgrades/UpgradeActionTest.java | 28 +-
.../ambari/server/state/ConfigGroupTest.java | 8 +-
.../ambari/server/state/ConfigHelperTest.java | 48 +-
.../state/alerts/AlertReceivedListenerTest.java | 8 +-
.../state/cluster/ClusterDeadlockTest.java | 17 +-
.../server/state/cluster/ClusterTest.java | 126 ++---
.../server/state/cluster/ClustersTest.java | 8 +-
...omponentHostConcurrentWriteDeadlockTest.java | 9 +-
.../ambari/server/state/host/HostTest.java | 6 +-
.../svccomphost/ServiceComponentHostTest.java | 21 +-
.../server/topology/AmbariContextTest.java | 37 +-
.../server/update/HostUpdateHelperTest.java | 40 +-
.../ambari/server/utils/StageUtilsTest.java | 4 +
52 files changed, 846 insertions(+), 1298 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index b04fdd7..7da1034 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -54,7 +54,6 @@ import java.util.EnumMap;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
@@ -79,10 +78,10 @@ import org.apache.ambari.server.ServiceComponentNotFoundException;
import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.StackAccessException;
import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.CommandExecutionType;
import org.apache.ambari.server.actionmanager.HostRoleCommand;
import org.apache.ambari.server.actionmanager.RequestFactory;
import org.apache.ambari.server.actionmanager.Stage;
-import org.apache.ambari.server.actionmanager.CommandExecutionType;
import org.apache.ambari.server.actionmanager.StageFactory;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
@@ -935,17 +934,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
@Override
public Config createConfig(Cluster cluster, String type, Map<String, String> properties,
String versionTag, Map<String, Map<String, String>> propertiesAttributes) {
- Config config = configFactory.createNew(cluster, type,
- properties, propertiesAttributes);
- if (!StringUtils.isEmpty(versionTag)) {
- config.setTag(versionTag);
- }
-
- config.persist();
+ Config config = configFactory.createNew(cluster, type, versionTag, properties,
+ propertiesAttributes);
cluster.addConfig(config);
-
return config;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
index 96bb8f9..b957f0a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java
@@ -48,6 +48,7 @@ import org.apache.ambari.server.security.authorization.RoleAuthorization;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.configgroup.ConfigGroup;
@@ -100,6 +101,12 @@ public class ConfigGroupResourceProvider extends
@Inject
private static HostDAO hostDAO;
+
+ /**
+ * Used for creating {@link Config} instances to return in the REST response.
+ */
+ @Inject
+ private static ConfigFactory configFactory;
/**
* Create a new resource provider for the given management controller.
@@ -781,11 +788,7 @@ public class ConfigGroupResourceProvider extends
}
}
- Config config = new ConfigImpl(type);
- config.setTag(tag);
- config.setProperties(configProperties);
- config.setPropertiesAttributes(configAttributes);
-
+ Config config = configFactory.createReadOnly(type, tag, configProperties, configAttributes);
configurations.put(config.getType(), config);
}
} catch (Exception e) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
index 5459ddb..97280ee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
@@ -451,7 +451,7 @@ public class ConfigureAction extends AbstractServerAction {
// of creating a whole new history record since it was already done
if (!targetStack.equals(currentStack) && targetStack.equals(configStack)) {
config.setProperties(newValues);
- config.persist(false);
+ config.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outputBuffer.toString(), "");
}
@@ -570,8 +570,9 @@ public class ConfigureAction extends AbstractServerAction {
for(Replace replacement: replacements){
if(isOperationAllowed(cluster, configType, replacement.key,
- replacement.ifKey, replacement.ifType, replacement.ifValue, replacement.ifKeyState))
+ replacement.ifKey, replacement.ifType, replacement.ifValue, replacement.ifKeyState)) {
allowedReplacements.add(replacement);
+ }
}
return allowedReplacements;
@@ -582,8 +583,9 @@ public class ConfigureAction extends AbstractServerAction {
for(ConfigurationKeyValue configurationKeyValue: sets){
if(isOperationAllowed(cluster, configType, configurationKeyValue.key,
- configurationKeyValue.ifKey, configurationKeyValue.ifType, configurationKeyValue.ifValue, configurationKeyValue.ifKeyState))
+ configurationKeyValue.ifKey, configurationKeyValue.ifType, configurationKeyValue.ifValue, configurationKeyValue.ifKeyState)) {
allowedSets.add(configurationKeyValue);
+ }
}
return allowedSets;
@@ -593,14 +595,16 @@ public class ConfigureAction extends AbstractServerAction {
List<Transfer> allowedTransfers = new ArrayList<>();
for (Transfer transfer : transfers) {
String key = "";
- if(transfer.operation == TransferOperation.DELETE)
+ if(transfer.operation == TransferOperation.DELETE) {
key = transfer.deleteKey;
- else
+ } else {
key = transfer.fromKey;
+ }
if(isOperationAllowed(cluster, configType, key,
- transfer.ifKey, transfer.ifType, transfer.ifValue, transfer.ifKeyState))
+ transfer.ifKey, transfer.ifType, transfer.ifValue, transfer.ifKeyState)) {
allowedTransfers.add(transfer);
+ }
}
return allowedTransfers;
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java
index ffa21ab..4833729 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java
@@ -18,7 +18,11 @@
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Inject;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
@@ -28,13 +32,7 @@ import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.commons.lang.StringUtils;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.inject.Inject;
/**
* During stack upgrade, update lzo codec path in mapreduce.application.classpath and
@@ -78,7 +76,7 @@ public class FixLzoCodecPath extends AbstractServerAction {
}
}
config.setProperties(properties);
- config.persist(false);
+ config.save();
}
if (modifiedProperties.isEmpty()) {
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java
index 3a06476..75588d5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java
@@ -18,7 +18,9 @@
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Inject;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
@@ -28,8 +30,7 @@ import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.commons.lang.StringUtils;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
+import com.google.inject.Inject;
/**
* During stack upgrade, update lzo codec path in mapreduce.application.classpath and
@@ -86,7 +87,7 @@ public class FixOozieAdminUsers extends AbstractServerAction {
oozieProperties.put(OOZIE_ADMIN_USERS_PROP, newOozieAdminUsers);
oozieConfig.setProperties(oozieProperties);
- oozieConfig.persist(false);
+ oozieConfig.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("Set oozie admin users to %s", newOozieAdminUsers), "");
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java
index 7f6d4b1..739dd7e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java
@@ -18,7 +18,10 @@
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Inject;
+import java.math.BigDecimal;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
@@ -27,9 +30,7 @@ import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import java.math.BigDecimal;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
+import com.google.inject.Inject;
/**
* Computes HBase properties. This class is only used when moving from
@@ -79,8 +80,9 @@ public class HBaseConfigCalculation extends AbstractServerAction {
"Upper or lower memstore limit setting value is malformed, skipping", "");
}
- if (lowerLimit.scale() < 2) //make sure result will have at least 2 digits after decimal point
+ if (lowerLimit.scale() < 2) {
lowerLimit = lowerLimit.setScale(2, BigDecimal.ROUND_HALF_UP);
+ }
BigDecimal lowerLimitNew = lowerLimit.divide(upperLimit, BigDecimal.ROUND_HALF_UP);
properties.put(NEW_LOWER_LIMIT_PROPERTY_NAME, lowerLimitNew.toString());
@@ -90,7 +92,7 @@ public class HBaseConfigCalculation extends AbstractServerAction {
properties.remove(OLD_LOWER_LIMIT_PROPERTY_NAME);
config.setProperties(properties);
- config.persist(false);
+ config.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("%s was set to %s", NEW_LOWER_LIMIT_PROPERTY_NAME, lowerLimitNew.toString()), "");
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java
index b238bca..fb15555 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java
@@ -18,7 +18,11 @@
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Inject;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
@@ -27,10 +31,7 @@ import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.inject.Inject;
/**
* Computes HBase Env content property.
@@ -79,7 +80,7 @@ public class HBaseEnvMaxDirectMemorySizeAction extends AbstractServerAction {
properties.put(CONTENT_NAME, appendedContent);
config.setProperties(properties);
- config.persist(false);
+ config.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("The %s/%s property was appended with %s", SOURCE_CONFIG_TYPE, CONTENT_NAME, APPEND_CONTENT_LINE),"");
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java
index 0e10160..c5000bf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java
@@ -18,7 +18,11 @@
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Inject;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
@@ -27,10 +31,7 @@ import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.inject.Inject;
/**
* Append hive-env config type with HIVE_HOME and HIVE_CONF_DIR variables if they are absent
@@ -103,7 +104,7 @@ public class HiveEnvClasspathAction extends AbstractServerAction {
}
config.setProperties(properties);
- config.persist(false);
+ config.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("Added %s, %s to content at %s", HIVE_CONF_DIR, HIVE_HOME, TARGET_CONFIG_TYPE), "");
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java
index 0ade30b..7ebad08 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java
@@ -85,7 +85,7 @@ public class HiveZKQuorumConfigAction extends AbstractServerAction {
hiveSiteProperties.put(HIVE_SITE_ZK_CONNECT_STRING, zookeeperQuorum);
hiveSite.setProperties(hiveSiteProperties);
- hiveSite.persist(false);
+ hiveSite.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("Successfully set %s and %s in %s", HIVE_SITE_ZK_QUORUM,
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java
index 4da67ca..9b8a7dc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java
@@ -18,7 +18,11 @@
package org.apache.ambari.server.serveraction.upgrades;
-import com.google.inject.Inject;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
@@ -27,10 +31,7 @@ import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import java.util.Map;
-import java.util.concurrent.ConcurrentMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.inject.Inject;
/**
* Changes oozie-env during upgrade (adds -Dhdp.version to $HADOOP_OPTS variable)
@@ -67,7 +68,7 @@ public class OozieConfigCalculation extends AbstractServerAction {
}
config.setProperties(properties);
- config.persist(false);
+ config.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("Added -Dhdp.version to $HADOOP_OPTS variable at %s", TARGET_CONFIG_TYPE), "");
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java
index ff4a20e..8e0161b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java
@@ -141,13 +141,13 @@ public class RangerConfigCalculation extends AbstractServerAction {
targetValues.put("ranger.jpa.audit.jdbc.dialect", dialect);
config.setProperties(targetValues);
- config.persist(false);
+ config.save();
config = cluster.getDesiredConfigByType(RANGER_ENV_CONFIG_TYPE);
targetValues = config.getProperties();
targetValues.put("ranger_privelege_user_jdbc_url", userJDBCUrl);
config.setProperties(targetValues);
- config.persist(false);
+ config.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", stdout.toString(), "");
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java
index ba0da79..c059c9e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java
@@ -87,7 +87,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != hadoopUser) {
targetValues.put(RANGER_PLUGINS_HDFS_SERVICE_USER, hadoopUser);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_HDFS_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "hdfs_user", HADOOP_ENV_CONFIG_TYPE);
@@ -104,7 +104,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != hiveUser) {
targetValues.put(RANGER_PLUGINS_HIVE_SERVICE_USER, hiveUser);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_HIVE_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "hive_user", HIVE_ENV_CONFIG_TYPE);
@@ -121,7 +121,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != yarnUser) {
targetValues.put(RANGER_PLUGINS_YARN_SERVICE_USER, yarnUser);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_YARN_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "yarn_user", YARN_ENV_CONFIG_TYPE);
@@ -138,7 +138,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != hbaseUser) {
targetValues.put(RANGER_PLUGINS_HBASE_SERVICE_USER, hbaseUser);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_HBASE_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "hbase_user", HBASE_ENV_CONFIG_TYPE);
@@ -155,7 +155,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != knoxUser) {
targetValues.put(RANGER_PLUGINS_KNOX_SERVICE_USER, knoxUser);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_KNOX_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "knox_user", KNOX_ENV_CONFIG_TYPE);
@@ -190,7 +190,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
}
targetValues.put(RANGER_PLUGINS_STORM_SERVICE_USER, stormValue);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_STORM_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "storm_user", STORM_ENV_CONFIG_TYPE);
@@ -207,7 +207,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != kafkaUser) {
targetValues.put(RANGER_PLUGINS_KAFKA_SERVICE_USER, kafkaUser);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_KAFKA_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "kafka_user", KAFKA_ENV_CONFIG_TYPE);
@@ -224,7 +224,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != rangerKmsUser) {
targetValues.put(RANGER_PLUGINS_KMS_SERVICE_USER, rangerKmsUser);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_KMS_SERVICE_USER);
} else {
errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "kms_user", RANGER_KMS_ENV_CONFIG_TYPE);
@@ -243,10 +243,10 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction {
if (null != spnegoKeytab) {
targetValues.put(RANGER_SPNEGO_KEYTAB, spnegoKeytab);
rangerAdminconfig.setProperties(targetValues);
- rangerAdminconfig.persist(false);
+ rangerAdminconfig.save();
sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_SPNEGO_KEYTAB);
} else {
- errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "dfs.web.authentication.kerberos.keytab", HDFS_SITE_CONFIG_TYPE);
+ errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "dfs.web.authentication.kerberos.keytab", HDFS_SITE_CONFIG_TYPE);
}
} else {
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java
index 299a373..b1aa6e1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java
@@ -25,7 +25,6 @@ import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.agent.CommandReport;
import org.apache.ambari.server.serveraction.AbstractServerAction;
@@ -89,7 +88,7 @@ public class SparkShufflePropertyConfig extends AbstractServerAction {
yarnSiteProperties.put(YARN_NODEMANAGER_AUX_SERVICES, newAuxServices);
yarnSiteProperties.put(YARN_NODEMANAGER_AUX_SERVICES_SPARK_SHUFFLE_CLASS, YARN_NODEMANAGER_AUX_SERVICES_SPARK_SHUFFLE_CLASS_VALUE);
yarnSiteConfig.setProperties(yarnSiteProperties);
- yarnSiteConfig.persist(false);
+ yarnSiteConfig.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("%s was set from %s to %s. %s was set to %s",
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java
index feefcaf..d638858 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java
@@ -67,7 +67,7 @@ public class YarnConfigCalculation extends AbstractServerAction {
yarnSiteProperties.put(YARN_RM_ZK_ADDRESS_PROPERTY_NAME, zkServersStr);
yarnSiteProperties.put(HADOOP_REGISTRY_ZK_QUORUM_PROPERTY_NAME, zkServersStr);
yarnSiteConfig.setProperties(yarnSiteProperties);
- yarnSiteConfig.persist(false);
+ yarnSiteConfig.save();
return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
String.format("%s was set from %s to %s. %s was set from %s to %s",
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
index b35aad9..67570f4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
@@ -30,8 +30,6 @@ public interface Config {
void setPropertiesTypes(Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes);
- void setStackId(StackId stackId);
-
/**
* @return Config Type
*/
@@ -66,18 +64,6 @@ public interface Config {
public Map<String, Map<String, String>> getPropertiesAttributes();
/**
- * Change the version tag
- * @param versionTag
- */
- public void setTag(String versionTag);
-
- /**
- * Set config version
- * @param version
- */
- public void setVersion(Long version);
-
- /**
* Replace properties with new provided set
* @param properties Property Map to replace existing one
*/
@@ -110,11 +96,5 @@ public interface Config {
/**
* Persist the configuration.
*/
- public void persist();
-
- /**
- * Persist the configuration, optionally creating a new config entity.
- */
- public void persist(boolean newConfig);
-
+ public void save();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
index eaf68aa..d6cd997 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java
@@ -27,18 +27,20 @@ import com.google.inject.assistedinject.Assisted;
* Factory for creating configuration objects using {@link Assisted} constructor parameters
*/
public interface ConfigFactory {
-
+
/**
* Creates a new {@link Config} object using provided values.
*
* @param cluster
* @param type
+ * @param tag
* @param map
* @param mapAttributes
* @return
*/
- Config createNew(Cluster cluster, String type, Map<String, String> map, Map<String, Map<String, String>> mapAttributes);
-
+ Config createNew(Cluster cluster, @Assisted("type") String type, @Assisted("tag") String tag,
+ Map<String, String> map, Map<String, Map<String, String>> mapAttributes);
+
/**
* Creates a new {@link Config} object using provided entity
*
@@ -48,4 +50,16 @@ public interface ConfigFactory {
*/
Config createExisting(Cluster cluster, ClusterConfigEntity entity);
+ /**
+ * Creates a read-only instance of a {@link Config} suitable for returning in
+ * REST responses.
+ *
+ * @param type
+ * @param tag
+ * @param map
+ * @param mapAttributes
+ * @return
+ */
+ Config createReadOnly(@Assisted("type") String type, @Assisted("tag") String tag,
+ Map<String, String> map, Map<String, Map<String, String>> mapAttributes);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
index 28bcd5f..e68839f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
@@ -18,28 +18,28 @@
package org.apache.ambari.server.state;
-import java.util.Collections;
-import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import javax.annotation.Nullable;
+
import org.apache.ambari.server.events.ClusterConfigChangedEvent;
import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
import org.apache.ambari.server.orm.dao.ClusterDAO;
import org.apache.ambari.server.orm.dao.ServiceConfigDAO;
import org.apache.ambari.server.orm.entities.ClusterConfigEntity;
import org.apache.ambari.server.orm.entities.ClusterEntity;
+import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import com.google.inject.Inject;
-import com.google.inject.Injector;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import com.google.inject.persist.Transactional;
@@ -52,50 +52,101 @@ public class ConfigImpl implements Config {
public static final String GENERATED_TAG_PREFIX = "generatedTag_";
- private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
+ private final long configId;
+ private final Cluster cluster;
+ private final StackId stackId;
+ private final String type;
+ private final String tag;
+ private final Long version;
- private Cluster cluster;
- private StackId stackId;
- private String type;
- private volatile String tag;
- private volatile Long version;
- private volatile Map<String, String> properties;
- private volatile Map<String, Map<String, String>> propertiesAttributes;
- private ClusterConfigEntity entity;
- private volatile Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes;
+ /**
+ * The properties of this configuration. This cannot be a
+ * {@link ConcurrentMap} since we allow null values. Therefore, it must be
+ * synchronized externally.
+ */
+ private Map<String, String> properties;
- @Inject
- private ClusterDAO clusterDAO;
+ /**
+ * A lock for reading/writing of {@link #properties} concurrently.
+ *
+ * @see #properties
+ */
+ private final ReentrantReadWriteLock propertyLock = new ReentrantReadWriteLock();
- @Inject
- private Gson gson;
+ /**
+ * The property attributes for this configuration.
+ */
+ private Map<String, Map<String, String>> propertiesAttributes;
+
+ private Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes;
+
+ private final ClusterDAO clusterDAO;
+
+ private final Gson gson;
@Inject
private ServiceConfigDAO serviceConfigDAO;
- @Inject
- private AmbariEventPublisher eventPublisher;
+ private final AmbariEventPublisher eventPublisher;
@AssistedInject
- public ConfigImpl(@Assisted Cluster cluster, @Assisted String type, @Assisted Map<String, String> properties,
- @Assisted Map<String, Map<String, String>> propertiesAttributes, Injector injector) {
+ ConfigImpl(@Assisted Cluster cluster, @Assisted("type") String type,
+ @Assisted("tag") @Nullable String tag,
+ @Assisted Map<String, String> properties,
+ @Assisted @Nullable Map<String, Map<String, String>> propertiesAttributes, ClusterDAO clusterDAO,
+ Gson gson, AmbariEventPublisher eventPublisher) {
+
this.cluster = cluster;
this.type = type;
this.properties = properties;
- this.propertiesAttributes = propertiesAttributes;
+
+ // only set this if it's non-null
+ this.propertiesAttributes = null == propertiesAttributes ? null
+ : new HashMap<>(propertiesAttributes);
+
+ this.clusterDAO = clusterDAO;
+ this.gson = gson;
+ this.eventPublisher = eventPublisher;
+ version = cluster.getNextConfigVersion(type);
+
+ // tag is nullable from factory but not in the DB, so ensure we generate something
+ tag = StringUtils.isBlank(tag) ? GENERATED_TAG_PREFIX + version : tag;
+ this.tag = tag;
+
+ ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId());
+
+ ClusterConfigEntity entity = new ClusterConfigEntity();
+ entity.setClusterEntity(clusterEntity);
+ entity.setClusterId(cluster.getClusterId());
+ entity.setType(type);
+ entity.setVersion(version);
+ entity.setTag(this.tag);
+ entity.setTimestamp(System.currentTimeMillis());
+ entity.setStack(clusterEntity.getDesiredStack());
+ entity.setData(gson.toJson(properties));
+
+ if (null != propertiesAttributes) {
+ entity.setAttributes(gson.toJson(propertiesAttributes));
+ }
// when creating a brand new config without a backing entity, use the
// cluster's desired stack as the config's stack
stackId = cluster.getDesiredStackVersion();
-
- injector.injectMembers(this);
propertiesTypes = cluster.getConfigPropertiesTypes(type);
- }
+ persist(entity);
+ configId = entity.getConfigId();
+ }
@AssistedInject
- public ConfigImpl(@Assisted Cluster cluster, @Assisted ClusterConfigEntity entity, Injector injector) {
+ ConfigImpl(@Assisted Cluster cluster, @Assisted ClusterConfigEntity entity,
+ ClusterDAO clusterDAO, Gson gson, AmbariEventPublisher eventPublisher) {
this.cluster = cluster;
+ this.clusterDAO = clusterDAO;
+ this.gson = gson;
+ this.eventPublisher = eventPublisher;
+ configId = entity.getConfigId();
+
type = entity.getType();
tag = entity.getTag();
version = entity.getVersion();
@@ -103,16 +154,69 @@ public class ConfigImpl implements Config {
// when using an existing entity, use the actual value of the entity's stack
stackId = new StackId(entity.getStack());
- this.entity = entity;
- injector.injectMembers(this);
propertiesTypes = cluster.getConfigPropertiesTypes(type);
+
+ // incur the hit on deserialization since this business object is stored locally
+ try {
+ Map<String, String> deserializedProperties = gson.<Map<String, String>> fromJson(
+ entity.getData(), Map.class);
+
+ if (null == deserializedProperties) {
+ deserializedProperties = new HashMap<>();
+ }
+
+ properties = deserializedProperties;
+ } catch (JsonSyntaxException e) {
+ LOG.error("Malformed configuration JSON stored in the database for {}/{}", entity.getType(),
+ entity.getTag());
+ }
+
+ // incur the hit on deserialization since this business object is stored locally
+ try {
+ Map<String, Map<String, String>> deserializedAttributes = gson.<Map<String, Map<String, String>>> fromJson(
+ entity.getAttributes(), Map.class);
+
+ if (null != deserializedAttributes) {
+ propertiesAttributes = new HashMap<>(deserializedAttributes);
+ }
+ } catch (JsonSyntaxException e) {
+ LOG.error("Malformed configuration attribute JSON stored in the database for {}/{}",
+ entity.getType(), entity.getTag());
+ }
}
/**
- * Constructor for clients not using factory.
+ * Constructor. This will create an instance suitable only for
+ * representation/serialization as it is incomplete.
+ *
+ * @param type
+ * @param tag
+ * @param properties
+ * @param propertiesAttributes
+ * @param clusterDAO
+ * @param gson
+ * @param eventPublisher
*/
- public ConfigImpl(String type) {
+ @AssistedInject
+ ConfigImpl(@Assisted("type") String type,
+ @Assisted("tag") @Nullable String tag,
+ @Assisted Map<String, String> properties,
+ @Assisted @Nullable Map<String, Map<String, String>> propertiesAttributes, ClusterDAO clusterDAO,
+ Gson gson, AmbariEventPublisher eventPublisher) {
+
+ this.tag = tag;
this.type = type;
+ this.properties = new HashMap<>(properties);
+ this.propertiesAttributes = null == propertiesAttributes ? null
+ : new HashMap<>(propertiesAttributes);
+ this.clusterDAO = clusterDAO;
+ this.gson = gson;
+ this.eventPublisher = eventPublisher;
+
+ cluster = null;
+ configId = 0;
+ version = 0L;
+ stackId = null;
}
/**
@@ -120,240 +224,124 @@ public class ConfigImpl implements Config {
*/
@Override
public StackId getStackId() {
- readWriteLock.readLock().lock();
- try {
- return stackId;
- } finally {
- readWriteLock.readLock().unlock();
- }
-
+ return stackId;
}
@Override
public Map<PropertyInfo.PropertyType, Set<String>> getPropertiesTypes() {
- readWriteLock.readLock().lock();
- try {
- return propertiesTypes;
- } finally {
- readWriteLock.readLock().unlock();
- }
+ return propertiesTypes;
}
@Override
public void setPropertiesTypes(Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes) {
- readWriteLock.writeLock().lock();
- try {
- this.propertiesTypes = propertiesTypes;
- } finally {
- readWriteLock.writeLock().unlock();
- }
- }
-
- @Override
- public void setStackId(StackId stackId) {
- readWriteLock.writeLock().lock();
- try {
- this.stackId = stackId;
- } finally {
- readWriteLock.writeLock().unlock();
- }
-
+ this.propertiesTypes = propertiesTypes;
}
@Override
public String getType() {
- readWriteLock.readLock().lock();
- try {
- return type;
- } finally {
- readWriteLock.readLock().unlock();
- }
-
+ return type;
}
@Override
public String getTag() {
- if (tag == null) {
- readWriteLock.writeLock().lock();
- try {
- if (tag == null) {
- tag = GENERATED_TAG_PREFIX + getVersion();
- }
- } finally {
- readWriteLock.writeLock().unlock();
- }
- }
-
- readWriteLock.readLock().lock();
- try {
-
- return tag;
- } finally {
- readWriteLock.readLock().unlock();
- }
-
+ return tag;
}
@Override
public Long getVersion() {
- if (version == null && cluster != null) {
- readWriteLock.writeLock().lock();
- try {
- if (version == null) {
- version = cluster.getNextConfigVersion(type); //pure DB calculation call, no cluster locking required
- }
- } finally {
- readWriteLock.writeLock().unlock();
- }
- }
-
- readWriteLock.readLock().lock();
- try {
- return version;
- } finally {
- readWriteLock.readLock().unlock();
- }
-
+ return version;
}
@Override
public Map<String, String> getProperties() {
- if (null != entity && null == properties) {
- readWriteLock.writeLock().lock();
- try {
- if (properties == null) {
- try {
- properties = gson.<Map<String, String>>fromJson(entity.getData(), Map.class);
- } catch (JsonSyntaxException e){
- String msg = String.format(
- "Malformed JSON stored in the database for %s configuration record with config_id %d",
- entity.getType(), entity.getConfigId());
- LOG.error(msg);
- throw new JsonSyntaxException(msg, e);
- }
- }
- } finally {
- readWriteLock.writeLock().unlock();
- }
- }
-
- readWriteLock.readLock().lock();
+ propertyLock.readLock().lock();
try {
- return null == properties ? new HashMap<String, String>()
- : new HashMap<String, String>(properties);
+ return properties == null ? new HashMap<String, String>() : new HashMap<>(properties);
} finally {
- readWriteLock.readLock().unlock();
+ propertyLock.readLock().unlock();
}
-
}
@Override
public Map<String, Map<String, String>> getPropertiesAttributes() {
- if (null != entity && null == propertiesAttributes) {
- readWriteLock.writeLock().lock();
- try {
- if (propertiesAttributes == null) {
- propertiesAttributes = gson.<Map<String, Map<String, String>>>fromJson(entity.getAttributes(), Map.class);
- }
- } finally {
- readWriteLock.writeLock().unlock();
- }
- }
-
- readWriteLock.readLock().lock();
- try {
- return null == propertiesAttributes ? null : new HashMap<String, Map<String, String>>(propertiesAttributes);
- } finally {
- readWriteLock.readLock().unlock();
- }
-
- }
-
- @Override
- public void setTag(String tag) {
- readWriteLock.writeLock().lock();
- try {
- this.tag = tag;
- } finally {
- readWriteLock.writeLock().unlock();
- }
-
- }
-
- @Override
- public void setVersion(Long version) {
- readWriteLock.writeLock().lock();
- try {
- this.version = version;
- } finally {
- readWriteLock.writeLock().unlock();
- }
-
+ return null == propertiesAttributes ? null
+ : new HashMap<String, Map<String, String>>(propertiesAttributes);
}
@Override
public void setProperties(Map<String, String> properties) {
- readWriteLock.writeLock().lock();
+ propertyLock.writeLock().lock();
try {
this.properties = properties;
} finally {
- readWriteLock.writeLock().unlock();
+ propertyLock.writeLock().unlock();
}
-
}
@Override
public void setPropertiesAttributes(Map<String, Map<String, String>> propertiesAttributes) {
- readWriteLock.writeLock().lock();
- try {
- this.propertiesAttributes = propertiesAttributes;
- } finally {
- readWriteLock.writeLock().unlock();
- }
-
+ this.propertiesAttributes = propertiesAttributes;
}
@Override
- public void updateProperties(Map<String, String> properties) {
- readWriteLock.writeLock().lock();
+ public void updateProperties(Map<String, String> propertiesToUpdate) {
+ propertyLock.writeLock().lock();
try {
- this.properties.putAll(properties);
+ properties.putAll(propertiesToUpdate);
} finally {
- readWriteLock.writeLock().unlock();
+ propertyLock.writeLock().unlock();
}
-
}
@Override
public List<Long> getServiceConfigVersions() {
- readWriteLock.readLock().lock();
- try {
- if (cluster == null || type == null || version == null) {
- return Collections.emptyList();
- }
- return serviceConfigDAO.getServiceConfigVersionsByConfig(cluster.getClusterId(), type, version);
- } finally {
- readWriteLock.readLock().unlock();
- }
-
+ return serviceConfigDAO.getServiceConfigVersionsByConfig(cluster.getClusterId(), type, version);
}
@Override
- public void deleteProperties(List<String> properties) {
- readWriteLock.writeLock().lock();
+ public void deleteProperties(List<String> propertyKeysToRemove) {
+ propertyLock.writeLock().lock();
try {
- for (String key : properties) {
- this.properties.remove(key);
- }
+ Set<String> keySet = properties.keySet();
+ keySet.removeAll(propertyKeysToRemove);
} finally {
- readWriteLock.writeLock().unlock();
+ propertyLock.writeLock().unlock();
}
+ }
+ /**
+ * Persist the entity and update the internal state relationships once the
+ * transaction has been committed.
+ */
+ private void persist(ClusterConfigEntity entity) {
+ persistEntitiesInTransaction(entity);
+
+ // ensure that the in-memory state of the cluster is kept consistent
+ cluster.addConfig(this);
+
+ // re-load the entity associations for the cluster
+ cluster.refresh();
+
+ // broadcast the change event for the configuration
+ ClusterConfigChangedEvent event = new ClusterConfigChangedEvent(cluster.getClusterName(),
+ getType(), getTag(), getVersion());
+
+ eventPublisher.publish(event);
}
- @Override
- public void persist() {
- persist(true);
+ /**
+ * Persist the cluster and configuration entities in their own transaction.
+ */
+ @Transactional
+ private void persistEntitiesInTransaction(ClusterConfigEntity entity) {
+ ClusterEntity clusterEntity = entity.getClusterEntity();
+
+ clusterDAO.createConfig(entity);
+ clusterEntity.getClusterConfigEntities().add(entity);
+
+ // save the entity, forcing a flush to ensure the refresh picks up the
+ // newest data
+ clusterDAO.merge(clusterEntity, true);
}
/**
@@ -361,69 +349,29 @@ public class ConfigImpl implements Config {
*/
@Override
@Transactional
- public void persist(boolean newConfig) {
- readWriteLock.writeLock().lock();
- try {
- ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId());
-
- if (newConfig) {
- ClusterConfigEntity entity = new ClusterConfigEntity();
- entity.setClusterEntity(clusterEntity);
- entity.setClusterId(cluster.getClusterId());
- entity.setType(getType());
- entity.setVersion(getVersion());
- entity.setTag(getTag());
- entity.setTimestamp(new Date().getTime());
- entity.setStack(clusterEntity.getDesiredStack());
- entity.setData(gson.toJson(getProperties()));
-
- if (null != getPropertiesAttributes()) {
- entity.setAttributes(gson.toJson(getPropertiesAttributes()));
- }
-
- clusterDAO.createConfig(entity);
- clusterEntity.getClusterConfigEntities().add(entity);
-
- // save the entity, forcing a flush to ensure the refresh picks up the
- // newest data
- clusterDAO.merge(clusterEntity, true);
- } else {
- // only supporting changes to the properties
- ClusterConfigEntity entity = null;
-
- // find the existing configuration to update
- for (ClusterConfigEntity cfe : clusterEntity.getClusterConfigEntities()) {
- if (getTag().equals(cfe.getTag()) && getType().equals(cfe.getType())
- && getVersion().equals(cfe.getVersion())) {
- entity = cfe;
- break;
- }
- }
-
- // if the configuration was found, then update it
- if (null != entity) {
- LOG.debug(
- "Updating {} version {} with new configurations; a new version will not be created",
- getType(), getVersion());
-
- entity.setData(gson.toJson(getProperties()));
-
- // save the entity, forcing a flush to ensure the refresh picks up the
- // newest data
- clusterDAO.merge(clusterEntity, true);
- }
- }
- } finally {
- readWriteLock.writeLock().unlock();
- }
+ public void save() {
+ ClusterConfigEntity entity = clusterDAO.findConfig(configId);
+ ClusterEntity clusterEntity = clusterDAO.findById(entity.getClusterId());
- // re-load the entity associations for the cluster
- cluster.refresh();
+ // if the configuration was found, then update it
+ if (null != entity) {
+ LOG.debug("Updating {} version {} with new configurations; a new version will not be created",
+ getType(), getVersion());
- // broadcast the change event for the configuration
- ClusterConfigChangedEvent event = new ClusterConfigChangedEvent(cluster.getClusterName(),
- getType(), getTag(), getVersion());
+ entity.setData(gson.toJson(getProperties()));
+
+ // save the entity, forcing a flush to ensure the refresh picks up the
+ // newest data
+ clusterDAO.merge(clusterEntity, true);
+
+ // re-load the entity associations for the cluster
+ cluster.refresh();
+
+ // broadcast the change event for the configuration
+ ClusterConfigChangedEvent event = new ClusterConfigChangedEvent(cluster.getClusterName(),
+ getType(), getTag(), getVersion());
eventPublisher.publish(event);
+ }
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
index 9917720..9a2fc88 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
@@ -21,6 +21,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
+import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
@@ -44,6 +45,7 @@ import org.apache.ambari.server.orm.entities.HostEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.Host;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -78,7 +80,10 @@ public class ConfigGroupImpl implements ConfigGroup {
@Inject
private ClusterDAO clusterDAO;
@Inject
- Clusters clusters;
+ private Clusters clusters;
+
+ @Inject
+ private ConfigFactory configFactory;
@AssistedInject
public ConfigGroupImpl(@Assisted("cluster") Cluster cluster,
@@ -398,35 +403,24 @@ public class ConfigGroupImpl implements ConfigGroup {
}
if (configurations != null && !configurations.isEmpty()) {
- for (Config config : configurations.values()) {
+ for (Entry<String, Config> entry : configurations.entrySet()) {
+ Config config = entry.getValue();
ClusterConfigEntity clusterConfigEntity = clusterDAO.findConfig
(cluster.getClusterId(), config.getType(), config.getTag());
if (clusterConfigEntity == null) {
- config.setVersion(cluster.getNextConfigVersion(config.getType()));
- config.setStackId(cluster.getDesiredStackVersion());
- // Create configuration
- clusterConfigEntity = new ClusterConfigEntity();
- clusterConfigEntity.setClusterId(clusterEntity.getClusterId());
- clusterConfigEntity.setClusterEntity(clusterEntity);
- clusterConfigEntity.setStack(clusterEntity.getDesiredStack());
- clusterConfigEntity.setType(config.getType());
- clusterConfigEntity.setVersion(config.getVersion());
- clusterConfigEntity.setTag(config.getTag());
- clusterConfigEntity.setData(gson.toJson(config.getProperties()));
- if (null != config.getPropertiesAttributes()) {
- clusterConfigEntity.setAttributes(gson.toJson(config.getPropertiesAttributes()));
- }
- clusterConfigEntity.setTimestamp(System.currentTimeMillis());
- clusterDAO.createConfig(clusterConfigEntity);
- clusterEntity.getClusterConfigEntities().add(clusterConfigEntity);
- cluster.addConfig(config);
- clusterDAO.merge(clusterEntity);
- cluster.refresh();
+ config = configFactory.createNew(cluster, config.getType(), config.getTag(),
+ config.getProperties(), config.getPropertiesAttributes());
+
+ entry.setValue(config);
+
+ clusterConfigEntity = clusterDAO.findConfig(cluster.getClusterId(), config.getType(),
+ config.getTag());
}
ConfigGroupConfigMappingEntity configMappingEntity =
new ConfigGroupConfigMappingEntity();
+
configMappingEntity.setTimestamp(System.currentTimeMillis());
configMappingEntity.setClusterId(clusterEntity.getClusterId());
configMappingEntity.setClusterConfigEntity(clusterConfigEntity);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
index 83f8470..bb6be30 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
@@ -67,7 +67,7 @@ import org.apache.ambari.server.security.authorization.AuthorizationException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.SecurityType;
@@ -91,8 +91,13 @@ public class AmbariContext {
@Inject
private PersistedState persistedState;
+ /**
+ * Used for creating read-only instances of existing {@link Config} in order
+ * to send them to the {@link ConfigGroupResourceProvider} to create
+ * {@link ConfigGroup}s.
+ */
@Inject
- private org.apache.ambari.server.configuration.Configuration configs;
+ ConfigFactory configFactory;
private static AmbariManagementController controller;
private static ClusterController clusterController;
@@ -474,11 +479,13 @@ public class AmbariContext {
SortedSet<DesiredConfig> desiredConfigsOrderedByVersion = new TreeSet<>(new Comparator<DesiredConfig>() {
@Override
public int compare(DesiredConfig o1, DesiredConfig o2) {
- if (o1.getVersion() < o2.getVersion())
+ if (o1.getVersion() < o2.getVersion()) {
return -1;
+ }
- if (o1.getVersion() > o2.getVersion())
+ if (o1.getVersion() > o2.getVersion()) {
return 1;
+ }
return 0;
}
@@ -489,9 +496,9 @@ public class AmbariContext {
int tagMatchState = 0; // 0 -> INITIAL -> tagMatchState = 1 -> TOPLOGY_RESOLVED -> tagMatchState = 2
for (DesiredConfig config: desiredConfigsOrderedByVersion) {
- if (config.getTag().equals(TopologyManager.INITIAL_CONFIG_TAG) && tagMatchState == 0)
+ if (config.getTag().equals(TopologyManager.INITIAL_CONFIG_TAG) && tagMatchState == 0) {
tagMatchState = 1;
- else if (config.getTag().equals(TopologyManager.TOPOLOGY_RESOLVED_TAG) && tagMatchState == 1) {
+ } else if (config.getTag().equals(TopologyManager.TOPOLOGY_RESOLVED_TAG) && tagMatchState == 1) {
tagMatchState = 2;
break;
}
@@ -605,9 +612,7 @@ public class AmbariContext {
for (Map.Entry<String, Map<String, String>> entry : userProvidedGroupProperties.entrySet()) {
String type = entry.getKey();
String service = stack.getServiceForConfigType(type);
- Config config = new ConfigImpl(type);
- config.setTag(groupName);
- config.setProperties(entry.getValue());
+ Config config = configFactory.createReadOnly(type, groupName, entry.getValue(), null);
//todo: attributes
Map<String, Config> serviceConfigs = groupConfigs.get(service);
if (serviceConfigs == null) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
index 6a8057c..4c1ef5a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/update/HostUpdateHelper.java
@@ -53,8 +53,8 @@ import org.apache.ambari.server.orm.entities.TopologyRequestEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.utils.EventBusSynchronizer;
import org.apache.commons.lang.StringUtils;
@@ -234,12 +234,12 @@ public class HostUpdateHelper {
boolean configUpdated;
// going through all cluster configs and update property values
+ ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
for (ClusterConfigEntity clusterConfigEntity : clusterConfigEntities) {
- ConfigImpl config = new ConfigImpl(cluster, clusterConfigEntity, injector);
+ Config config = configFactory.createExisting(cluster, clusterConfigEntity);
configUpdated = false;
for (Map.Entry<String,String> property : config.getProperties().entrySet()) {
-
updatedPropertyValue = replaceHosts(property.getValue(), currentHostNames, hostMapping);
if (updatedPropertyValue != null) {
@@ -249,8 +249,9 @@ public class HostUpdateHelper {
configUpdated = true;
}
}
+
if (configUpdated) {
- config.persist(false);
+ config.save();
}
}
}
@@ -317,6 +318,7 @@ public class HostUpdateHelper {
* */
public class StringComparator implements Comparator<String> {
+ @Override
public int compare(String s1, String s2) {
return s2.length() - s1.length();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
index ffca51d..62ce93b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
@@ -36,7 +36,6 @@ import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.StackId;
@@ -128,24 +127,16 @@ public class ExecutionCommandWrapperTest {
CONFIG_ATTRIBUTES = new HashMap<String, Map<String,String>>();
//Cluster level global config
- Config globalConfig = configFactory.createNew(cluster1, GLOBAL_CONFIG, GLOBAL_CLUSTER, CONFIG_ATTRIBUTES);
- globalConfig.setTag(CLUSTER_VERSION_TAG);
- cluster1.addConfig(globalConfig);
+ configFactory.createNew(cluster1, GLOBAL_CONFIG, CLUSTER_VERSION_TAG, GLOBAL_CLUSTER, CONFIG_ATTRIBUTES);
//Cluster level service config
- Config serviceSiteConfigCluster = configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_SITE_CLUSTER, CONFIG_ATTRIBUTES);
- serviceSiteConfigCluster.setTag(CLUSTER_VERSION_TAG);
- cluster1.addConfig(serviceSiteConfigCluster);
+ configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, CLUSTER_VERSION_TAG, SERVICE_SITE_CLUSTER, CONFIG_ATTRIBUTES);
//Service level service config
- Config serviceSiteConfigService = configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_SITE_SERVICE, CONFIG_ATTRIBUTES);
- serviceSiteConfigService.setTag(SERVICE_VERSION_TAG);
- cluster1.addConfig(serviceSiteConfigService);
+ configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_VERSION_TAG, SERVICE_SITE_SERVICE, CONFIG_ATTRIBUTES);
//Host level service config
- Config serviceSiteConfigHost = configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, SERVICE_SITE_HOST, CONFIG_ATTRIBUTES);
- serviceSiteConfigHost.setTag(HOST_VERSION_TAG);
- cluster1.addConfig(serviceSiteConfigHost);
+ configFactory.createNew(cluster1, SERVICE_SITE_CONFIG, HOST_VERSION_TAG, SERVICE_SITE_HOST, CONFIG_ATTRIBUTES);
ActionDBAccessor db = injector.getInstance(ActionDBAccessorImpl.class);
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
index 90a4421..246c8b3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionSchedulerThreading.java
@@ -34,8 +34,8 @@ import org.apache.ambari.server.orm.OrmTestHelper;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.StackId;
import org.junit.After;
@@ -103,15 +103,11 @@ public class TestActionSchedulerThreading {
Map<String, String> properties = new HashMap<String, String>();
Map<String, Map<String, String>> propertiesAttributes = new HashMap<String, Map<String, String>>();
+ ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
+
// foo-type for v1 on current stack
properties.put("foo-property-1", "foo-value-1");
- Config c1 = new ConfigImpl(cluster, "foo-type", properties, propertiesAttributes, injector);
- c1.setTag("version-1");
- c1.setStackId(stackId);
- c1.setVersion(1L);
-
- cluster.addConfig(c1);
- c1.persist();
+ Config c1 = configFactory.createNew(cluster, "foo-type", "version-1", properties, propertiesAttributes);
// make v1 "current"
cluster.addDesiredConfig("admin", Sets.newHashSet(c1), "note-1");
@@ -122,12 +118,7 @@ public class TestActionSchedulerThreading {
// save v2
// foo-type for v2 on new stack
properties.put("foo-property-2", "foo-value-2");
- Config c2 = new ConfigImpl(cluster, "foo-type", properties, propertiesAttributes, injector);
- c2.setTag("version-2");
- c2.setStackId(newStackId);
- c2.setVersion(2L);
- cluster.addConfig(c2);
- c2.persist();
+ Config c2 = configFactory.createNew(cluster, "foo-type", "version-2", properties, propertiesAttributes);
// make v2 "current"
cluster.addDesiredConfig("admin", Sets.newHashSet(c2), "note-2");
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
index 43503fa..fc2bca5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/HeartbeatTestHelper.java
@@ -193,11 +193,7 @@ public class HeartbeatTestHelper {
cluster.setCurrentStackVersion(stackId);
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config = cf.createNew(cluster, "cluster-env", configProperties, new HashMap<String, Map<String, String>>());
- config.setTag("version1");
- config.persist();
-
- cluster.addConfig(config);
+ Config config = cf.createNew(cluster, "cluster-env", "version1", configProperties, new HashMap<String, Map<String, String>>());
cluster.addDesiredConfig("user", Collections.singleton(config));
helper.getOrCreateRepositoryVersion(stackId, stackId.getStackVersion());
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
index 76ab45c..68e9993 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatMonitor.java
@@ -159,10 +159,8 @@ public class TestHeartbeatMonitor {
}};
ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
- Config config = configFactory.createNew(cluster, "hadoop-env",
+ Config config = configFactory.createNew(cluster, "hadoop-env", "version1",
new HashMap<String,String>() {{ put("a", "b"); }}, new HashMap<String, Map<String,String>>());
- config.setTag("version1");
- cluster.addConfig(config);
cluster.addDesiredConfig("_test", Collections.singleton(config));
@@ -243,18 +241,15 @@ public class TestHeartbeatMonitor {
}};
ConfigFactory configFactory = injector.getInstance(ConfigFactory.class);
- Config hadoopEnvConfig = configFactory.createNew(cluster, "hadoop-env",
+ Config hadoopEnvConfig = configFactory.createNew(cluster, "hadoop-env", "version1",
new HashMap<String, String>() {{
put("a", "b");
}}, new HashMap<String, Map<String,String>>());
- Config hbaseEnvConfig = configFactory.createNew(cluster, "hbase-env",
+ Config hbaseEnvConfig = configFactory.createNew(cluster, "hbase-env", "version1",
new HashMap<String, String>() {{
put("a", "b");
}}, new HashMap<String, Map<String,String>>());
- hadoopEnvConfig.setTag("version1");
- cluster.addConfig(hadoopEnvConfig);
- hbaseEnvConfig.setTag("version1");
- cluster.addConfig(hbaseEnvConfig);
+
cluster.addDesiredConfig("_test", Collections.singleton(hadoopEnvConfig));
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
index 6533e1c..6640837 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/RecoveryConfigHelperTest.java
@@ -218,7 +218,7 @@ public class RecoveryConfigHelperTest {
config.updateProperties(new HashMap<String, String>() {{
put(RecoveryConfigHelper.RECOVERY_ENABLED_KEY, "false");
}});
- config.persist(false);
+ config.save();
// Recovery config should be stale because of the above change.
boolean isConfigStale = recoveryConfigHelper.isConfigStale(cluster.getClusterName(), DummyHostname1,
http://git-wip-us.apache.org/repos/asf/ambari/blob/a6639a7c/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index e54a117..2507a46 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -87,8 +87,8 @@ import org.apache.ambari.server.security.ldap.LdapBatchDto;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.ConfigImpl;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.MaintenanceState;
@@ -610,6 +610,7 @@ public class AmbariManagementControllerImplTest {
Cluster cluster = createNiceMock(Cluster.class);
ActionManager actionManager = createNiceMock(ActionManager.class);
ClusterRequest clusterRequest = createNiceMock(ClusterRequest.class);
+ Config config = createNiceMock(Config.class);
// requests
Set<ClusterRequest> setRequests = Collections.singleton(clusterRequest);
@@ -632,18 +633,11 @@ public class AmbariManagementControllerImplTest {
expect(clusters.getClusterById(1L)).andReturn(cluster).anyTimes();
expect(cluster.getClusterName()).andReturn("clusterOld").anyTimes();
expect(cluster.getConfigPropertiesTypes(anyObject(String.class))).andReturn(Maps.<PropertyInfo.PropertyType, Set<String>>newHashMap()).anyTimes();
- expect(cluster.getDesiredConfigByType(anyObject(String.class))).andReturn(new ConfigImpl("config-type") {
- @Override
- public Map<String, Map<String, String>> getPropertiesAttributes() {
- return Maps.newHashMap();
- }
-
- @Override
- public Map<String, String> getProperties() {
- return configReqProps;
- }
- }).anyTimes();
+ expect(config.getType()).andReturn("config-type").anyTimes();
+ expect(config.getProperties()).andReturn(configReqProps).anyTimes();
+ expect(config.getPropertiesAttributes()).andReturn(new HashMap<String,Map<String,String>>()).anyTimes();
+ expect(cluster.getDesiredConfigByType(anyObject(String.class))).andReturn(config).anyTimes();
cluster.addSessionAttributes(anyObject(Map.class));
expectLastCall().once();
@@ -652,7 +646,7 @@ public class AmbariManagementControllerImplTest {
expectLastCall();
// replay mocks
- replay(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+ replay(actionManager, cluster, clusters, config, injector, clusterRequest, sessionManager);
// test
AmbariManagementController controller = new AmbariManagementControllerImpl(actionManager, clusters, injector);
@@ -660,7 +654,7 @@ public class AmbariManagementControllerImplTest {
// assert and verify
assertSame(controller, controllerCapture.getValue());
- verify(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
+ verify(actionManager, cluster, clusters, config, injector, clusterRequest, sessionManager);
}
/**
[10/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a58c39c9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a58c39c9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a58c39c9
Branch: refs/heads/trunk
Commit: a58c39c9c177da52c32dd6f004cefa9658e9019d
Parents: 087de8b 0c837a6
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Dec 2 16:19:40 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Dec 2 16:19:40 2016 -0500
----------------------------------------------------------------------
.../admin-web/app/scripts/services/Cluster.js | 51 --
.../app/scripts/services/RoleDetailsModal.js | 31 +-
.../app/views/modals/RoleDetailsModal.html | 6 +-
.../src/main/python/ambari_agent/ActionQueue.py | 11 +
.../ambari_agent/CustomServiceOrchestrator.py | 7 +-
.../src/main/python/ambari_agent/FileCache.py | 12 +-
.../test/python/ambari_agent/TestActionQueue.py | 48 ++
.../TestCustomServiceOrchestrator.py | 30 +-
.../test/python/ambari_agent/TestFileCache.py | 10 +-
.../ambari_commons/ambari_metrics_helper.py | 45 +-
.../timeline/AbstractTimelineMetricsSink.java | 6 +-
.../AbstractTimelineMetricSinkTest.java | 10 +-
.../conf/unix/ambari-metrics-grafana | 6 +-
.../timeline/HadoopTimelineMetricsSink.java | 4 +-
.../timeline/HadoopTimelineMetricsSinkTest.java | 6 +-
.../src/main/python/core/config_reader.py | 9 +-
.../src/test/python/core/TestEmitter.py | 2 +-
.../ambari/server/agent/ExecutionCommand.java | 1 +
.../AmbariManagementControllerImpl.java | 9 +
.../controller/internal/CalculatedStatus.java | 52 +-
.../internal/RequestResourceProvider.java | 34 +-
.../internal/StageResourceProvider.java | 2 +-
.../internal/UpgradeResourceProvider.java | 24 +-
.../orm/AmbariJpaLocalTxnInterceptor.java | 9 +-
.../ambari/server/state/ComponentInfo.java | 14 +
.../stack/upgrade/ConfigurationCondition.java | 72 +-
.../server/upgrade/UpgradeCatalog211.java | 24 +-
.../server/upgrade/UpgradeCatalog250.java | 6 +
.../ambari/server/utils/RequestUtils.java | 10 +
.../python/ambari_server/serverConfiguration.py | 14 +-
.../src/main/python/ambari_server/utils.py | 23 +-
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 11 +-
.../1.6.1.2.2.0/package/scripts/params.py | 2 +-
.../AMBARI_METRICS/0.1.0/metainfo.xml | 1 +
.../0.1.0/package/scripts/params.py | 2 +-
.../0.1.0/package/scripts/service_check.py | 2 +-
.../FLUME/1.4.0.2.0/package/scripts/params.py | 2 +-
.../0.96.0.2.0/package/scripts/params_linux.py | 2 +-
.../package/alerts/alert_metrics_deviation.py | 2 +-
.../KAFKA/0.8.1/package/scripts/params.py | 2 +-
.../STORM/0.9.1/package/scripts/params_linux.py | 2 +-
.../2.1.0.2.0/package/scripts/service_check.py | 66 +-
.../ZOOKEEPER/3.4.6/metainfo.xml | 2 +-
.../2.0.6/hooks/before-START/scripts/params.py | 2 +-
.../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml | 27 +-
.../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 27 +-
.../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 27 +-
.../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 27 +-
.../services/HDFS/configuration/hadoop-env.xml | 176 ++++
.../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml | 27 +-
.../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 27 +-
.../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 27 +-
.../HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml | 27 +-
.../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 27 +-
.../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml | 27 +-
.../2.1/hooks/before-START/scripts/params.py | 4 +-
.../src/main/resources/upgrade-pack.xsd | 8 +-
.../AmbariManagementControllerTest.java | 9 +
.../internal/CalculatedStatusTest.java | 31 +
.../internal/RequestResourceProviderTest.java | 12 +-
.../internal/UpgradeResourceProviderTest.java | 2 +-
.../orm/AmbariJpaLocalTxnInterceptorTest.java | 155 ++++
.../server/orm/InMemoryDefaultTestModule.java | 5 +
.../server/upgrade/UpgradeCatalog250Test.java | 17 +-
.../ambari/server/utils/RequestUtilsTest.java | 18 +
ambari-server/src/test/python/TestMpacks.py | 102 ++-
.../2.0.6/YARN/test_yarn_service_check.py | 111 +--
.../src/test/resources/dashboards/README.txt | 18 +
.../stacks/HDP/0.1/services/HDFS/metainfo.xml | 1 +
.../controllers/main/service/info/summary.js | 37 +-
.../main/service/reassign/step4_controller.js | 19 +-
.../mixins/main/dashboard/widgets/editable.js | 47 +-
.../main/dashboard/edit_widget_popup.hbs | 4 +-
.../edit_widget_popup_single_threshold.hbs | 2 +-
.../main/dashboard/plus_button_filter.hbs | 2 +-
.../app/templates/main/dashboard/widgets.hbs | 8 +-
.../main/dashboard/widgets/pie_chart.hbs | 5 +-
.../main/service/info/service_alert_popup.hbs | 15 +-
.../app/views/common/not-scrollable-textarea.js | 2 +-
ambari-web/app/views/main/dashboard/widget.js | 159 ++--
ambari-web/app/views/main/dashboard/widgets.js | 824 +++++++++----------
.../main/dashboard/widgets/datanode_live.js | 7 -
.../main/dashboard/widgets/flume_agent_live.js | 7 -
.../main/dashboard/widgets/hawqsegment_live.js | 7 -
.../dashboard/widgets/hbase_average_load.js | 11 +-
.../views/main/dashboard/widgets/hbase_links.js | 4 -
.../main/dashboard/widgets/hbase_master_heap.js | 4 -
.../dashboard/widgets/hbase_master_uptime.js | 5 -
.../widgets/hbase_regions_in_transition.js | 10 +-
.../main/dashboard/widgets/hdfs_capacity.js | 4 -
.../views/main/dashboard/widgets/hdfs_links.js | 4 -
.../views/main/dashboard/widgets/metrics_cpu.js | 3 -
.../main/dashboard/widgets/metrics_load.js | 3 -
.../main/dashboard/widgets/metrics_memory.js | 3 -
.../main/dashboard/widgets/metrics_network.js | 3 -
.../main/dashboard/widgets/namenode_cpu.js | 4 -
.../main/dashboard/widgets/namenode_heap.js | 4 -
.../main/dashboard/widgets/namenode_rpc.js | 10 +-
.../main/dashboard/widgets/namenode_uptime.js | 5 -
.../dashboard/widgets/node_managers_live.js | 7 -
.../main/dashboard/widgets/pie_chart_widget.js | 16 +-
.../views/main/dashboard/widgets/pxf_live.js | 6 -
.../dashboard/widgets/resource_manager_heap.js | 4 -
.../widgets/resource_manager_uptime.js | 5 -
.../main/dashboard/widgets/supervisor_live.js | 7 -
.../views/main/dashboard/widgets/text_widget.js | 4 +-
.../widgets/text_widget_single_threshold.js | 6 +-
.../dashboard/widgets/uptime_text_widget.js | 2 -
.../views/main/dashboard/widgets/yarn_links.js | 4 -
.../views/main/dashboard/widgets/yarn_memory.js | 4 -
.../service/reassign/step4_controller_test.js | 2 +-
.../test/views/main/dashboard/widget_test.js | 200 ++---
.../widgets/hbase_average_load_test.js | 4 +-
.../widgets/hbase_regions_in_transition_test.js | 6 +-
.../main/dashboard/widgets/namenode_rpc_test.js | 13 +-
.../text_widget_single_threshold_test.js | 10 +-
.../main/dashboard/widgets/text_widget_test.js | 10 +-
.../widgets/uptime_text_widget_test.js | 2 +-
.../test/views/main/dashboard/widgets_test.js | 669 ++++++---------
contrib/views/pom.xml | 1 -
contrib/views/zeppelin/pom.xml | 190 -----
.../view/zeppelin/ZeppelinServiceCheck.java | 55 --
.../ambari/view/zeppelin/ZeppelinServlet.java | 113 ---
.../zeppelin/src/main/resources/WEB-INF/web.xml | 40 -
.../src/main/resources/view.log4j.properties | 27 -
.../views/zeppelin/src/main/resources/view.xml | 48 --
126 files changed, 2017 insertions(+), 2285 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a58c39c9/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a58c39c9/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a58c39c9/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
[13/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/88c28925
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/88c28925
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/88c28925
Branch: refs/heads/trunk
Commit: 88c2892527b0a6ee97a3e237ebc76a896ce865e9
Parents: 803f44b d1293e0
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Dec 7 16:16:53 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Dec 7 16:16:53 2016 -0500
----------------------------------------------------------------------
ambari-agent/conf/unix/ambari-agent.ini | 1 +
.../src/main/python/ambari_agent/Hardware.py | 52 +-
.../src/main/python/ambari_agent/HostInfo.py | 12 +-
.../TestCustomServiceOrchestrator.py | 4 +-
.../test/python/ambari_agent/TestHardware.py | 70 ++
.../ambari_commons/ambari_metrics_helper.py | 26 +-
.../main/python/ambari_commons/logging_utils.py | 5 +-
.../libraries/functions/copy_tarball.py | 2 +-
.../dynamic_variable_interpretation.py | 2 +-
.../timeline/AbstractTimelineMetricsSink.java | 40 +-
.../availability/MetricCollectorHAHelper.java | 1 -
.../availability/MetricCollectorHATest.java | 1 -
.../src/main/python/core/config_reader.py | 2 +-
.../timeline/HBaseTimelineMetricStore.java | 18 +-
.../timeline/TimelineMetricConfiguration.java | 14 +
.../aggregators/AbstractTimelineAggregator.java | 6 +-
.../MetricCollectorHAController.java | 4 +-
.../TestApplicationHistoryServer.java | 14 +-
.../MetricCollectorHAControllerTest.java | 4 +-
ambari-project/pom.xml | 41 +
ambari-server/checkstyle.xml | 17 +
ambari-server/pom.xml | 4 +
.../ambari/server/agent/AgentRequests.java | 2 +-
.../ambari/server/agent/HeartbeatProcessor.java | 15 +-
.../ambari/server/checks/CheckDescription.java | 4 +-
.../internal/AlertTargetResourceProvider.java | 2 +-
.../ClusterStackVersionResourceProvider.java | 4 +-
.../internal/UpgradeResourceProvider.java | 2 +-
.../logging/LogSearchDataRetrievalService.java | 7 +-
.../apache/ambari/server/orm/dao/AlertsDAO.java | 6 +-
.../ambari/server/orm/entities/GroupEntity.java | 5 +-
.../server/security/authorization/Users.java | 10 +-
.../apache/ambari/server/state/ServiceImpl.java | 2 +-
.../server/state/cluster/ClustersImpl.java | 2 +-
.../ambari/server/state/host/HostImpl.java | 2 +-
.../services/RetryUpgradeActionService.java | 2 +-
.../ambari/server/state/stack/UpgradePack.java | 19 +-
.../svccomphost/ServiceComponentHostImpl.java | 2 +-
.../server/upgrade/AbstractUpgradeCatalog.java | 25 +-
.../server/upgrade/UpgradeCatalog240.java | 4 +-
.../main/python/ambari_server/serverUpgrade.py | 38 +-
.../AMBARI_INFRA/0.1.0/metainfo.xml | 1 +
.../0.1.0/package/scripts/setup_infra_solr.py | 3 +-
.../0.1.0/configuration/ams-env.xml | 2 +-
.../0.1.0/configuration/ams-site.xml | 12 +
.../AMBARI_METRICS/0.1.0/package/scripts/ams.py | 1 +
.../0.1.0/package/scripts/metrics_collector.py | 2 +
.../package/scripts/metrics_grafana_util.py | 7 +-
.../0.1.0/package/scripts/params.py | 22 +-
.../0.1.0/package/scripts/service_check.py | 2 +-
.../LOGSEARCH/0.5.0/metainfo.xml | 1 +
.../0.5.0/package/scripts/setup_logsearch.py | 3 +-
.../stacks/HDP/2.3/services/stack_advisor.py | 4 +-
.../HDP/3.0/configuration/cluster-env.xml | 293 +++++++
.../HDP/3.0/hooks/after-INSTALL/scripts/hook.py | 37 +
.../3.0/hooks/after-INSTALL/scripts/params.py | 97 +++
.../scripts/shared_initialization.py | 111 +++
.../hooks/before-ANY/files/changeToSecureUid.sh | 53 ++
.../HDP/3.0/hooks/before-ANY/scripts/hook.py | 36 +
.../HDP/3.0/hooks/before-ANY/scripts/params.py | 231 ++++++
.../before-ANY/scripts/shared_initialization.py | 226 +++++
.../3.0/hooks/before-INSTALL/scripts/hook.py | 37 +
.../3.0/hooks/before-INSTALL/scripts/params.py | 113 +++
.../scripts/repo_initialization.py | 68 ++
.../scripts/shared_initialization.py | 37 +
.../3.0/hooks/before-RESTART/scripts/hook.py | 29 +
.../hooks/before-START/files/checkForFormat.sh | 65 ++
.../before-START/files/fast-hdfs-resource.jar | Bin 0 -> 19285850 bytes
.../before-START/files/task-log4j.properties | 134 +++
.../hooks/before-START/files/topology_script.py | 66 ++
.../HDP/3.0/hooks/before-START/scripts/hook.py | 39 +
.../3.0/hooks/before-START/scripts/params.py | 326 ++++++++
.../before-START/scripts/rack_awareness.py | 47 ++
.../scripts/shared_initialization.py | 191 +++++
.../templates/commons-logging.properties.j2 | 43 +
.../templates/exclude_hosts_list.j2 | 21 +
.../templates/hadoop-metrics2.properties.j2 | 105 +++
.../before-START/templates/health_check.j2 | 81 ++
.../templates/include_hosts_list.j2 | 21 +
.../templates/topology_mappings.data.j2 | 24 +
.../main/resources/stacks/HDP/3.0/kerberos.json | 78 ++
.../main/resources/stacks/HDP/3.0/metainfo.xml | 24 +
.../HDP/3.0/properties/stack_features.json | 323 ++++++++
.../stacks/HDP/3.0/properties/stack_tools.json | 4 +
.../resources/stacks/HDP/3.0/repos/repoinfo.xml | 132 +++
.../services/HDFS/configuration/core-site.xml | 56 ++
.../services/HDFS/configuration/hadoop-env.xml | 200 +++++
.../services/HDFS/configuration/hdfs-log4j.xml | 226 +++++
.../services/HDFS/configuration/hdfs-site.xml | 153 ++++
.../HDFS/configuration/ranger-hdfs-audit.xml | 217 +++++
.../ranger-hdfs-plugin-properties.xml | 98 +++
.../configuration/ranger-hdfs-policymgr-ssl.xml | 67 ++
.../HDFS/configuration/ranger-hdfs-security.xml | 65 ++
.../services/HDFS/configuration/widgets.json | 649 +++++++++++++++
.../stacks/HDP/3.0/services/HDFS/kerberos.json | 246 ++++++
.../stacks/HDP/3.0/services/HDFS/metainfo.xml | 190 +++++
.../services/HDFS/quicklinks/quicklinks.json | 80 ++
.../HDP/3.0/services/HDFS/themes/theme.json | 179 ++++
.../HDP/3.0/services/YARN/YARN_widgets.json | 670 +++++++++++++++
.../YARN/configuration-mapred/mapred-env.xml | 51 ++
.../YARN/configuration-mapred/mapred-site.xml | 134 +++
.../YARN/configuration/capacity-scheduler.xml | 71 ++
.../YARN/configuration/ranger-yarn-audit.xml | 177 ++++
.../ranger-yarn-plugin-properties.xml | 82 ++
.../configuration/ranger-yarn-policymgr-ssl.xml | 66 ++
.../YARN/configuration/ranger-yarn-security.xml | 58 ++
.../services/YARN/configuration/yarn-env.xml | 200 +++++
.../services/YARN/configuration/yarn-log4j.xml | 103 +++
.../services/YARN/configuration/yarn-site.xml | 814 +++++++++++++++++++
.../stacks/HDP/3.0/services/YARN/kerberos.json | 278 +++++++
.../stacks/HDP/3.0/services/YARN/metainfo.xml | 173 ++++
.../YARN/quicklinks-mapred/quicklinks.json | 80 ++
.../services/YARN/quicklinks/quicklinks.json | 80 ++
.../3.0/services/YARN/themes-mapred/theme.json | 132 +++
.../HDP/3.0/services/YARN/themes/theme.json | 250 ++++++
.../HDP/3.0/services/ZOOKEEPER/metainfo.xml | 54 ++
.../main/resources/stacks/HDP/3.0/widgets.json | 95 +++
.../2.1/hooks/before-START/scripts/params.py | 2 +-
.../LogSearchDataRetrievalServiceTest.java | 142 ++--
.../ldap/AmbariLdapDataPopulatorTest.java | 3 +-
.../ambari/server/state/UpgradeHelperTest.java | 39 +-
.../server/upgrade/UpgradeCatalog210Test.java | 2 +
.../stacks/2.3/common/test_stack_advisor.py | 29 +-
.../stacks/2.4/AMBARI_INFRA/test_infra_solr.py | 2 +-
.../stacks/2.4/LOGSEARCH/test_logsearch.py | 2 +-
.../app/controllers/global/update_controller.js | 5 +-
ambari-web/app/controllers/installer.js | 3 +-
.../journalNode/step4_controller.js | 6 +-
.../journalNode/wizard_controller.js | 28 +-
.../main/admin/stack_and_upgrade_controller.js | 13 +-
.../app/controllers/wizard/step1_controller.js | 21 +-
ambari-web/app/messages.js | 2 -
.../common/widgets/export_metrics_mixin.js | 28 +-
.../main/host/details/actions/check_host.js | 3 +-
.../mixins/wizard/assign_master_components.js | 1 +
.../app/routes/manage_journalnode_routes.js | 4 +-
.../app/styles/theme/bootstrap-ambari.css | 29 +-
ambari-web/app/styles/wizard.less | 13 +-
.../templates/common/host_progress_popup.hbs | 8 +-
.../highAvailability/journalNode/wizard.hbs | 13 +-
.../admin/stack_upgrade/upgrade_options.hbs | 58 +-
.../app/templates/main/host/host_alerts.hbs | 8 +-
ambari-web/app/templates/wizard/step8.hbs | 2 +-
ambari-web/app/views/common/controls_view.js | 3 +-
.../highAvailability/journalNode/step2_view.js | 5 +-
.../admin/stack_upgrade/upgrade_wizard_view.js | 6 +-
.../app/views/main/host/host_alerts_view.js | 41 +-
ambari-web/app/views/main/host/menu.js | 4 +-
ambari-web/test/controllers/installer_test.js | 13 +-
.../test/views/common/controls_view_test.js | 74 +-
.../stack_upgrade/upgrade_wizard_view_test.js | 2 +-
.../views/main/host/host_alerts_view_test.js | 3 +-
.../ui/app/templates/components/job-details.hbs | 2 +-
pom.xml | 1 +
utility/pom.xml | 33 +-
...AvoidTransactionalOnPrivateMethodsCheck.java | 55 ++
.../src/main/resources/checkstyle_packages.xml | 15 +
...dTransactionalOnPrivateMethodsCheckTest.java | 49 ++
.../InputTransactionalOnPrivateMethods.java | 46 ++
159 files changed, 10080 insertions(+), 377 deletions(-)
----------------------------------------------------------------------
[08/14] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-18456
Posted by jo...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-18456
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/276d1244
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/276d1244
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/276d1244
Branch: refs/heads/trunk
Commit: 276d1244e1b1d8200cf644b7cccfaa0a757f8146
Parents: 1f804d1 85c9104
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Nov 29 16:51:58 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Nov 29 17:00:59 2016 -0500
----------------------------------------------------------------------
.../views/stackVersions/stackVersionPage.html | 2 +-
.../org/apache/ambari/logsearch/LogSearch.java | 15 +-
.../logsearch/common/ExternalServerClient.java | 12 +-
.../logsearch/common/LogSearchConstants.java | 1 +
.../AbstractOperationHolderConverter.java | 9 +
...actServiceLogRequestFacetQueryConverter.java | 44 ++
.../BaseServiceLogRequestQueryConverter.java | 1 +
...ServiceLogAnyGraphRequestQueryConverter.java | 13 +
...eLogComponentLevelRequestQueryConverter.java | 15 +-
...eLogComponentRequestFacetQueryConverter.java | 15 +-
...rviceLogLevelCountRequestQueryConverter.java | 16 +
...eLogLevelDateRangeRequestQueryConverter.java | 16 +-
...erviceLogTreeRequestFacetQueryConverter.java | 17 +-
.../ambari/logsearch/doc/DocConstants.java | 1 +
.../logsearch/graph/GraphDataGenerator.java | 26 +-
.../logsearch/manager/ServiceLogsManager.java | 2 +-
.../request/ServiceLogParamDefinition.java | 6 +
.../request/impl/BaseServiceLogRequest.java | 13 +
.../LogsearchKRBAuthenticationFilter.java | 4 +-
.../src/main/resources/logsearch.properties | 2 +-
.../dashboard/BubbleGraphTableLayoutView.js | 4 +-
.../views/dashboard/ComponentListView.js | 2 +-
.../scripts/views/dashboard/HostListView.js | 14 +-
.../main/webapp/scripts/views/tabs/TreeView.js | 10 +-
.../webapp/templates/tabs/TreeView_tmpl.html | 115 ++--
...ComponentLevelRequestQueryConverterTest.java | 7 +-
...ComponentRequestFacetQueryConverterTest.java | 4 +-
.../conf/unix/ambari-metrics-grafana | 34 +-
.../ambari/server/checks/CheckDescription.java | 13 +
.../server/checks/ServicePresenceCheck.java | 177 ++++++
.../internal/ClusterControllerImpl.java | 2 +-
.../ServiceConfigVersionResourceProvider.java | 3 +-
.../logging/LoggingSearchPropertyProvider.java | 11 +-
.../ambari/server/orm/dao/ClusterDAO.java | 24 +
.../ambari/server/orm/dao/ServiceConfigDAO.java | 13 +-
.../orm/entities/ClusterConfigEntity.java | 7 +-
.../entities/ClusterConfigMappingEntity.java | 6 +
.../orm/entities/ServiceConfigEntity.java | 5 +-
.../server/orm/helpers/dbms/MySqlHelper.java | 18 +
.../upgrades/RangerKmsProxyConfig.java | 94 +++
.../server/state/cluster/ClusterImpl.java | 42 +-
.../server/upgrade/SchemaUpgradeHelper.java | 35 +-
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 1 +
.../FLUME/1.4.0.2.0/package/scripts/flume.py | 1 +
.../package/files/hbaseSmokeVerify.sh | 2 +-
.../HDFS/2.1.0.2.0/kerberos.json | 2 +-
.../HIVE/0.12.0.2.0/configuration/hive-env.xml | 19 +
.../HIVE/0.12.0.2.0/package/scripts/webhcat.py | 10 -
.../configuration/logfeeder-ambari-config.xml | 37 ++
.../configuration/logfeeder-output-config.xml | 37 ++
.../configuration/logsearch-properties.xml | 40 ++
.../LOGSEARCH/0.5.0/metainfo.xml | 5 +
.../LOGSEARCH/0.5.0/package/scripts/params.py | 17 +-
.../0.5.0/package/scripts/setup_logfeeder.py | 10 +
.../templates/input.config-ambari.json.j2 | 602 -------------------
.../package/templates/output.config.json.j2 | 61 --
.../properties/input.config-ambari.json.j2 | 602 +++++++++++++++++++
.../0.5.0/properties/output.config.json.j2 | 61 ++
.../OOZIE/4.0.0.2.0/configuration/oozie-env.xml | 19 +
.../0.6.0.2.5/package/scripts/master.py | 15 +-
.../0.6.0.2.5/package/scripts/params.py | 28 +-
.../ZEPPELIN/0.6.0.2.5/role_command_order.json | 2 +-
.../HDP/2.0.6/configuration/cluster-env.xml | 6 +
.../HDP/2.2/services/HIVE/themes/theme.json | 107 +++-
.../services/HIVE/configuration/hive-env.xml | 35 ++
.../services/OOZIE/configuration/oozie-env.xml | 33 +
.../stacks/HDP/2.3/services/OOZIE/metainfo.xml | 6 +
.../HDP/2.3/services/OOZIE/themes/theme.json | 116 ++++
.../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 7 +
.../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 7 +
.../stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 11 +
.../stacks/HDP/2.3/upgrades/upgrade-2.6.xml | 11 +
.../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 7 +
.../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 7 +
.../stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 11 +
.../stacks/HDP/2.4/upgrades/upgrade-2.6.xml | 11 +
.../stacks/HDP/2.5/services/HDFS/kerberos.json | 2 +-
.../stacks/HDP/2.5/services/OOZIE/metainfo.xml | 6 +
.../HDP/2.5/services/OOZIE/themes/theme.json | 116 ++++
.../HDP/2.5/upgrades/host-ordered-upgrade.xml | 11 +-
.../server/checks/ServicePresenceCheckTest.java | 217 +++++++
.../upgrades/RangerKmsProxyConfigTest.java | 125 ++++
.../stacks/2.0.6/HIVE/test_webhcat_server.py | 4 -
.../stacks/2.4/LOGSEARCH/test_logfeeder.py | 14 +-
.../test/python/stacks/2.4/configs/default.json | 6 +
ambari-web/app/app.js | 4 -
.../main/admin/service_auto_start.js | 2 +-
.../controllers/main/service/add_controller.js | 11 +-
.../data/configs/services/hive_properties.js | 4 -
.../configs/services/logsearch_properties.js | 51 +-
.../data/configs/services/oozie_properties.js | 4 -
ambari-web/app/messages.js | 3 +
ambari-web/app/styles/application.less | 10 +-
ambari-web/app/styles/bootstrap_overrides.less | 9 +
.../app/styles/enhanced_service_dashboard.less | 343 ++++-------
ambari-web/app/styles/hosts.less | 3 -
.../app/styles/theme/bootstrap-ambari.css | 5 +-
.../modal_popups/widget_browser_popup.hbs | 108 ++--
.../templates/main/admin/service_auto_start.hbs | 2 +-
.../main/service/widgets/create/expression.hbs | 36 +-
.../main/service/widgets/create/step1.hbs | 18 +-
.../main/service/widgets/create/step2.hbs | 22 +-
.../service/widgets/create/step2_add_metric.hbs | 12 +-
.../main/service/widgets/create/step2_graph.hbs | 4 +-
.../service/widgets/create/step2_number.hbs | 4 +-
.../service/widgets/create/step2_template.hbs | 4 +-
.../main/service/widgets/create/step3.hbs | 42 +-
.../create/widget_property_threshold.hbs | 18 +-
ambari-web/app/views/common/chosen_plugin.js | 21 +-
ambari-web/app/views/common/controls_view.js | 2 +-
ambari-web/app/views/main/dashboard/widgets.js | 17 +-
.../main/service/widgets/create/step2_view.js | 2 +
.../main/service/add_controller_test.js | 29 +-
.../test/views/main/dashboard/widgets_test.js | 11 +-
contrib/utils/perf/deploy-gce-perf-cluster.py | 243 +++++---
docs/pom.xml | 4 +-
116 files changed, 2957 insertions(+), 1408 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/276d1244/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java
index 0000000,bb88f55..25387cc
mode 000000,100644..100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java
@@@ -1,0 -1,95 +1,94 @@@
+ /**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ package org.apache.ambari.server.serveraction.upgrades;
+
+ import java.text.MessageFormat;
+ import java.util.Map;
+ import java.util.concurrent.ConcurrentMap;
+
+ import org.apache.ambari.server.AmbariException;
+ import org.apache.ambari.server.actionmanager.HostRoleStatus;
+ import org.apache.ambari.server.agent.CommandReport;
+ import org.apache.ambari.server.serveraction.AbstractServerAction;
+ import org.apache.ambari.server.state.Cluster;
+ import org.apache.ambari.server.state.Clusters;
+ import org.apache.ambari.server.state.Config;
+ import org.apache.ambari.server.state.SecurityType;
-import org.apache.commons.lang.StringUtils;
+
+ import com.google.inject.Inject;
+
+ /**
+ * Computes Ranger KMS Proxy properties in kms-site
+ */
+
+ public class RangerKmsProxyConfig extends AbstractServerAction {
+ private static final String RANGER_ENV_CONFIG_TYPE = "ranger-env";
+ private static final String RANGER_KMS_SITE_CONFIG_TYPE = "kms-site";
+
+ @Inject
+ private Clusters m_clusters;
+
+ @Override
+ public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext)
+ throws AmbariException, InterruptedException {
+
+ String clusterName = getExecutionCommand().getClusterName();
+ Cluster cluster = m_clusters.getCluster(clusterName);
+ String outputMsg = "";
+
+ Config rangerEnv = cluster.getDesiredConfigByType(RANGER_ENV_CONFIG_TYPE);
+
+ if (null == rangerEnv) {
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+ MessageFormat.format("Config source type {0} not found, skipping adding properties to {1}.", RANGER_ENV_CONFIG_TYPE, RANGER_KMS_SITE_CONFIG_TYPE), "");
+ }
+
+ String rangerUserProp = "ranger_user";
+ String rangerUser = rangerEnv.getProperties().get(rangerUserProp);
+
+ if (null == rangerUser) {
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+ MessageFormat.format("Required user service user value from {0}/{1} not found, skipping adding properties to {2}.", RANGER_ENV_CONFIG_TYPE, rangerUserProp, RANGER_KMS_SITE_CONFIG_TYPE), "");
+ }
+
+ Config kmsSite = cluster.getDesiredConfigByType(RANGER_KMS_SITE_CONFIG_TYPE);
+
+ if (null == kmsSite) {
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+ MessageFormat.format("Config type {0} not found, skipping adding properties to it.", RANGER_KMS_SITE_CONFIG_TYPE), "");
+ }
+
+ Map<String, String> targetValues = kmsSite.getProperties();
+ if (cluster.getSecurityType() == SecurityType.KERBEROS) {
+ String userProp = "hadoop.kms.proxyuser." + rangerUser + ".users";
+ String groupProp = "hadoop.kms.proxyuser." + rangerUser + ".groups";
+ String hostProp = "hadoop.kms.proxyuser." + rangerUser + ".hosts";
+ targetValues.put(userProp, "*");
+ targetValues.put(groupProp, "*");
+ targetValues.put(hostProp, "*");
+ kmsSite.setProperties(targetValues);
- kmsSite.persist(false);
++ kmsSite.save();
+ outputMsg = outputMsg + MessageFormat.format("Successfully added properties to {0}", RANGER_KMS_SITE_CONFIG_TYPE);
+ } else {
+ outputMsg = outputMsg + MessageFormat.format("Kerberos not enable, not setting proxy properties to {0}", RANGER_KMS_SITE_CONFIG_TYPE);
+ }
+
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outputMsg, "");
+
+ }
+ }
http://git-wip-us.apache.org/repos/asf/ambari/blob/276d1244/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/276d1244/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java
index 0000000,e000c65..7a0d66f
mode 000000,100644..100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfigTest.java
@@@ -1,0 -1,141 +1,125 @@@
+ /**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ package org.apache.ambari.server.serveraction.upgrades;
+
+ import static org.easymock.EasyMock.anyObject;
+ import static org.easymock.EasyMock.expect;
+ import static org.easymock.EasyMock.replay;
+ import static org.junit.Assert.assertEquals;
+ import static org.junit.Assert.assertNotNull;
+ import static org.junit.Assert.assertTrue;
+
+ import java.lang.reflect.Field;
+ import java.util.HashMap;
+ import java.util.Map;
+
+ import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+ import org.apache.ambari.server.actionmanager.HostRoleCommand;
+ import org.apache.ambari.server.agent.CommandReport;
+ import org.apache.ambari.server.agent.ExecutionCommand;
+ import org.apache.ambari.server.state.Cluster;
+ import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.SecurityType;
+ import org.apache.ambari.server.state.Config;
-import org.apache.ambari.server.state.ConfigImpl;
++import org.apache.ambari.server.state.SecurityType;
+ import org.easymock.EasyMock;
+ import org.junit.Before;
+ import org.junit.Test;
+
+ import com.google.inject.Injector;
+
+
+ public class RangerKmsProxyConfigTest {
+ private Injector m_injector;
+ private Clusters m_clusters;
+ private Field m_clusterField;
+
+ @Before
+ public void setup() throws Exception {
+ m_injector = EasyMock.createMock(Injector.class);
+ m_clusters = EasyMock.createMock(Clusters.class);
+ Cluster cluster = EasyMock.createMock(Cluster.class);
+
- Config rangerEnv = new ConfigImpl("ranger-env") {
- Map<String, String> mockProperties = new HashMap<String, String>() {{
++ Map<String, String> mockProperties = new HashMap<String, String>() {
++ {
+ put("ranger_user", "ranger");
- }};
-
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
+ }
+ };
+
- Config kmsSite = new ConfigImpl("kms-site") {
- Map<String, String> mockProperties = new HashMap<String, String>();
- @Override
- public Map<String, String> getProperties() {
- return mockProperties;
- }
-
- @Override
- public void setProperties(Map<String, String> properties) {
- mockProperties.putAll(properties);
- }
++ Config rangerEnv = EasyMock.createNiceMock(Config.class);
++ expect(rangerEnv.getType()).andReturn("ranger-env").anyTimes();
++ expect(rangerEnv.getProperties()).andReturn(mockProperties).anyTimes();
+
- @Override
- public void persist(boolean newConfig) {
- // no-op
- }
- };
++ Config kmsSite = EasyMock.createNiceMock(Config.class);
++ expect(kmsSite.getType()).andReturn("kms-site").anyTimes();
++ expect(kmsSite.getProperties()).andReturn(mockProperties).anyTimes();
+
+ expect(cluster.getDesiredConfigByType("ranger-env")).andReturn(rangerEnv).atLeastOnce();
+ expect(cluster.getDesiredConfigByType("kms-site")).andReturn(kmsSite).atLeastOnce();
+ expect(m_clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
+ expect(m_injector.getInstance(Clusters.class)).andReturn(m_clusters).atLeastOnce();
+ expect(cluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
+
- replay(m_injector, m_clusters, cluster);
++ replay(m_injector, m_clusters, cluster, rangerEnv, kmsSite);
+
+ m_clusterField = RangerKmsProxyConfig.class.getDeclaredField("m_clusters");
+ m_clusterField.setAccessible(true);
+ }
+
+ @Test
+ public void testAction() throws Exception {
+
+ Map<String, String> commandParams = new HashMap<String, String>();
+ commandParams.put("clusterName", "c1");
+
+ ExecutionCommand executionCommand = new ExecutionCommand();
+ executionCommand.setCommandParams(commandParams);
+ executionCommand.setClusterName("c1");
+
+ HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
+ expect(hrc.getRequestId()).andReturn(1L).anyTimes();
+ expect(hrc.getStageId()).andReturn(2L).anyTimes();
+ expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
+ replay(hrc);
+
+ RangerKmsProxyConfig action = new RangerKmsProxyConfig();
+ m_clusterField.set(action, m_clusters);
+
+ action.setExecutionCommand(executionCommand);
+ action.setHostRoleCommand(hrc);
+
+ CommandReport report = action.execute(null);
+ assertNotNull(report);
+
+ Cluster c = m_clusters.getCluster("c1");
+ Config config = c.getDesiredConfigByType("kms-site");
+ Map<String, String> map = config.getProperties();
+
+ assertTrue(map.containsKey("hadoop.kms.proxyuser.ranger.users"));
+ assertTrue(map.containsKey("hadoop.kms.proxyuser.ranger.groups"));
+ assertTrue(map.containsKey("hadoop.kms.proxyuser.ranger.hosts"));
+
+
+ assertEquals("*", map.get("hadoop.kms.proxyuser.ranger.users"));
+ assertEquals("*", map.get("hadoop.kms.proxyuser.ranger.groups"));
+ assertEquals("*", map.get("hadoop.kms.proxyuser.ranger.hosts"));
+
+ report = action.execute(null);
+ assertNotNull(report);
+
+ }
+ }