You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2015/12/06 14:09:30 UTC
[1/2] ambari git commit: AMBARI-14233. Ranger LDAP configs messed up
after Ambari upgrade from 2.1.2 to 2.1.3 (dlysnichenko)
Repository: ambari
Updated Branches:
refs/heads/branch-2.2 c5d7ef2c6 -> e50b5b874
refs/heads/trunk db63c2b4f -> ec2ba540e
AMBARI-14233. Ranger LDAP configs messed up after Ambari upgrade from 2.1.2 to 2.1.3 (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e50b5b87
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e50b5b87
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e50b5b87
Branch: refs/heads/branch-2.2
Commit: e50b5b8745643ae2d85d8b04d5e23a172582581c
Parents: c5d7ef2
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Sun Dec 6 15:08:00 2015 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Sun Dec 6 15:08:00 2015 +0200
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog213.java | 27 +++++++++++
.../server/upgrade/UpgradeCatalog213Test.java | 51 ++++++++++++++++++++
2 files changed, 78 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e50b5b87/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index 7df99e3..4126228 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -102,6 +102,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
private static final String HIVE_SITE_CONFIG = "hive-site";
private static final String HIVE_ENV_CONFIG = "hive-env";
private static final String RANGER_ENV_CONFIG = "ranger-env";
+ private static final String RANGER_UGSYNC_SITE_CONFIG = "ranger-ugsync-site";
private static final String ZOOKEEPER_LOG4J_CONFIG = "zookeeper-log4j";
private static final String HADOOP_ENV_CONFIG = "hadoop-env";
private static final String NIMBS_MONITOR_FREQ_SECS_PROPERTY = "nimbus.monitor.freq.secs";
@@ -136,6 +137,8 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
private static final String RANGER_YARN_PLUGIN_ENABLED_PROPERTY = "ranger-yarn-plugin-enabled";
private static final String RANGER_KAFKA_PLUGIN_ENABLED_PROPERTY = "ranger-kafka-plugin-enabled";
+ private static final String RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY = "ranger.usersync.source.impl.class";
+
private static final String BLUEPRINT_TABLE = "blueprint";
private static final String SECURITY_TYPE_COLUMN = "security_type";
private static final String SECURITY_DESCRIPTOR_REF_COLUMN = "security_descriptor_reference";
@@ -304,6 +307,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
updateAlertDefinitions();
updateKafkaConfigs();
updateRangerEnvConfig();
+ updateRangerUgsyncSiteConfig();
updateZookeeperLog4j();
updateHiveConfig();
updateAccumuloConfigs();
@@ -1231,6 +1235,29 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
}
}
+ protected void updateRangerUgsyncSiteConfig() throws AmbariException {
+ AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+
+ for (final Cluster cluster : getCheckedClusterMap(ambariManagementController.getClusters()).values()) {
+ Config rangerUgsyncSiteProperties = cluster.getDesiredConfigByType(RANGER_UGSYNC_SITE_CONFIG);
+ if (rangerUgsyncSiteProperties != null && rangerUgsyncSiteProperties.getProperties().containsKey(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY)) {
+ if (rangerUgsyncSiteProperties.getProperties().get(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY).equals("ldap")) {
+ Map<String, String> updates = Collections.singletonMap(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY,
+ "org.apache.ranger.ldapusersync.process.LdapUserGroupBuilder");
+ updateConfigurationPropertiesForCluster(cluster, RANGER_UGSYNC_SITE_CONFIG, updates, true, false);
+ } else if (rangerUgsyncSiteProperties.getProperties().get(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY).equals("unix")) {
+ Map<String, String> updates = Collections.singletonMap(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY,
+ "org.apache.ranger.unixusersync.process.UnixUserGroupBuilder");
+ updateConfigurationPropertiesForCluster(cluster, RANGER_UGSYNC_SITE_CONFIG, updates, true, false);
+ } else if (rangerUgsyncSiteProperties.getProperties().get(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY).equals("file")) {
+ Map<String, String> updates = Collections.singletonMap(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY,
+ "org.apache.ranger.unixusersync.process.FileSourceUserGroupBuilder");
+ updateConfigurationPropertiesForCluster(cluster, RANGER_UGSYNC_SITE_CONFIG, updates, true, false);
+ }
+ }
+ }
+ }
+
protected String updateHiveEnvContent(String hiveEnvContent) {
if(hiveEnvContent == null) {
return null;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e50b5b87/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index 8100a84..d83f0a8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -253,6 +253,7 @@ public class UpgradeCatalog213Test {
Method updateZookeeperLog4j = UpgradeCatalog213.class.getDeclaredMethod("updateZookeeperLog4j");
Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
Method updateRangerEnvConfig = UpgradeCatalog213.class.getDeclaredMethod("updateRangerEnvConfig");
+ Method updateRangerUgsyncSiteConfig = UpgradeCatalog213.class.getDeclaredMethod("updateRangerUgsyncSiteConfig");
Method updateHiveConfig = UpgradeCatalog213.class.getDeclaredMethod("updateHiveConfig");
Method updateAccumuloConfigs = UpgradeCatalog213.class.getDeclaredMethod("updateAccumuloConfigs");
Method updateKerberosDescriptorArtifacts = AbstractUpgradeCatalog.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
@@ -271,6 +272,7 @@ public class UpgradeCatalog213Test {
.addMockedMethod(updateHadoopEnv)
.addMockedMethod(updateZookeeperLog4j)
.addMockedMethod(updateRangerEnvConfig)
+ .addMockedMethod(updateRangerUgsyncSiteConfig)
.addMockedMethod(updateHiveConfig)
.addMockedMethod(updateAccumuloConfigs)
.addMockedMethod(updateKerberosDescriptorArtifacts)
@@ -299,6 +301,8 @@ public class UpgradeCatalog213Test {
expectLastCall().once();
upgradeCatalog213.updateRangerEnvConfig();
expectLastCall().once();
+ upgradeCatalog213.updateRangerUgsyncSiteConfig();
+ expectLastCall().once();
upgradeCatalog213.updateHiveConfig();
expectLastCall().once();
upgradeCatalog213.updateAccumuloConfigs();
@@ -1100,6 +1104,53 @@ public class UpgradeCatalog213Test {
}
@Test
+ public void testUpdateRangerUgsyncSiteConfig() throws Exception {
+ EasyMockSupport easyMockSupport = new EasyMockSupport();
+ final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+ final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+ final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+ final Map<String, String> propertiesRangerUgsyncSite = new HashMap<String, String>() {{
+ put("ranger.usersync.source.impl.class", "ldap");
+ }};
+
+ final Config mockRangerUgsyncSite = easyMockSupport.createNiceMock(Config.class);
+ final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+ @Override
+ protected void configure() {
+ bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+ bind(Clusters.class).toInstance(mockClusters);
+ bind(EntityManager.class).toInstance(entityManager);
+
+ bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+ bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+ }
+ });
+
+ expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+ expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+ put("normal", mockClusterExpected);
+ }}).atLeastOnce();
+ expect(mockClusterExpected.getDesiredConfigByType("ranger-ugsync-site")).andReturn(mockRangerUgsyncSite).atLeastOnce();
+
+ expect(mockRangerUgsyncSite.getProperties()).andReturn(propertiesRangerUgsyncSite).atLeastOnce();
+
+ Map<String, String> updates = Collections.singletonMap("ranger.usersync.source.impl.class", "org.apache.ranger.ldapusersync.process.LdapUserGroupBuilder");
+ UpgradeCatalog213 upgradeCatalog213 = createMockBuilder(UpgradeCatalog213.class)
+ .withConstructor(Injector.class)
+ .withArgs(mockInjector)
+ .addMockedMethod("updateConfigurationPropertiesForCluster", Cluster.class, String.class,
+ Map.class, boolean.class, boolean.class)
+ .createMock();
+ upgradeCatalog213.updateConfigurationPropertiesForCluster(mockClusterExpected,
+ "ranger-ugsync-site", updates, true, false);
+ expectLastCall().once();
+
+ easyMockSupport.replayAll();
+ mockInjector.getInstance(UpgradeCatalog213.class).updateRangerUgsyncSiteConfig();
+ easyMockSupport.verifyAll();
+ }
+
+ @Test
public void testGetSourceVersion() {
final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor);
[2/2] ambari git commit: AMBARI-14233. Ranger LDAP configs messed up
after Ambari upgrade from 2.1.2 to 2.1.3 (dlysnichenko)
Posted by dm...@apache.org.
AMBARI-14233. Ranger LDAP configs messed up after Ambari upgrade from 2.1.2 to 2.1.3 (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ec2ba540
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ec2ba540
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ec2ba540
Branch: refs/heads/trunk
Commit: ec2ba540eb88765878912e78bf3315606b6966b2
Parents: db63c2b
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Sun Dec 6 15:09:01 2015 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Sun Dec 6 15:09:01 2015 +0200
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog213.java | 27 +++++++++++
.../server/upgrade/UpgradeCatalog213Test.java | 51 ++++++++++++++++++++
2 files changed, 78 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ec2ba540/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index c979a49..956f4cb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -102,6 +102,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
private static final String HIVE_SITE_CONFIG = "hive-site";
private static final String HIVE_ENV_CONFIG = "hive-env";
private static final String RANGER_ENV_CONFIG = "ranger-env";
+ private static final String RANGER_UGSYNC_SITE_CONFIG = "ranger-ugsync-site";
private static final String ZOOKEEPER_LOG4J_CONFIG = "zookeeper-log4j";
private static final String NIMBS_MONITOR_FREQ_SECS_PROPERTY = "nimbus.monitor.freq.secs";
private static final String HIVE_SERVER2_OPERATION_LOG_LOCATION_PROPERTY = "hive.server2.logging.operation.log.location";
@@ -136,6 +137,8 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
private static final String RANGER_YARN_PLUGIN_ENABLED_PROPERTY = "ranger-yarn-plugin-enabled";
private static final String RANGER_KAFKA_PLUGIN_ENABLED_PROPERTY = "ranger-kafka-plugin-enabled";
+ private static final String RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY = "ranger.usersync.source.impl.class";
+
private static final String BLUEPRINT_TABLE = "blueprint";
private static final String SECURITY_TYPE_COLUMN = "security_type";
private static final String SECURITY_DESCRIPTOR_REF_COLUMN = "security_descriptor_reference";
@@ -304,6 +307,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
updateHadoopEnv();
updateKafkaConfigs();
updateRangerEnvConfig();
+ updateRangerUgsyncSiteConfig();
updateZookeeperLog4j();
updateHiveConfig();
updateAccumuloConfigs();
@@ -1234,6 +1238,29 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
}
}
+ protected void updateRangerUgsyncSiteConfig() throws AmbariException {
+ AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+
+ for (final Cluster cluster : getCheckedClusterMap(ambariManagementController.getClusters()).values()) {
+ Config rangerUgsyncSiteProperties = cluster.getDesiredConfigByType(RANGER_UGSYNC_SITE_CONFIG);
+ if (rangerUgsyncSiteProperties != null && rangerUgsyncSiteProperties.getProperties().containsKey(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY)) {
+ if (rangerUgsyncSiteProperties.getProperties().get(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY).equals("ldap")) {
+ Map<String, String> updates = Collections.singletonMap(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY,
+ "org.apache.ranger.ldapusersync.process.LdapUserGroupBuilder");
+ updateConfigurationPropertiesForCluster(cluster, RANGER_UGSYNC_SITE_CONFIG, updates, true, false);
+ } else if (rangerUgsyncSiteProperties.getProperties().get(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY).equals("unix")) {
+ Map<String, String> updates = Collections.singletonMap(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY,
+ "org.apache.ranger.unixusersync.process.UnixUserGroupBuilder");
+ updateConfigurationPropertiesForCluster(cluster, RANGER_UGSYNC_SITE_CONFIG, updates, true, false);
+ } else if (rangerUgsyncSiteProperties.getProperties().get(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY).equals("file")) {
+ Map<String, String> updates = Collections.singletonMap(RANGER_USERSYNC_SOURCE_IMPL_CLASS_PROPERTY,
+ "org.apache.ranger.unixusersync.process.FileSourceUserGroupBuilder");
+ updateConfigurationPropertiesForCluster(cluster, RANGER_UGSYNC_SITE_CONFIG, updates, true, false);
+ }
+ }
+ }
+ }
+
protected String updateHiveEnvContent(String hiveEnvContent) {
if(hiveEnvContent == null) {
return null;
http://git-wip-us.apache.org/repos/asf/ambari/blob/ec2ba540/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index ca9bfa5..35f5816 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -244,6 +244,7 @@ public class UpgradeCatalog213Test {
Method updateHadoopEnvConfig = UpgradeCatalog213.class.getDeclaredMethod("updateHadoopEnv");
Method updateAlertDefinitions = UpgradeCatalog213.class.getDeclaredMethod("updateAlertDefinitions");
Method updateRangerEnvConfig = UpgradeCatalog213.class.getDeclaredMethod("updateRangerEnvConfig");
+ Method updateRangerUgsyncSiteConfig = UpgradeCatalog213.class.getDeclaredMethod("updateRangerUgsyncSiteConfig");
Method updateHiveConfig = UpgradeCatalog213.class.getDeclaredMethod("updateHiveConfig");
Method updateAccumuloConfigs = UpgradeCatalog213.class.getDeclaredMethod("updateAccumuloConfigs");
Method updateKerberosDescriptorArtifacts = AbstractUpgradeCatalog.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
@@ -261,6 +262,7 @@ public class UpgradeCatalog213Test {
.addMockedMethod(updateZookeeperLog4j)
.addMockedMethod(updateHadoopEnvConfig)
.addMockedMethod(updateRangerEnvConfig)
+ .addMockedMethod(updateRangerUgsyncSiteConfig)
.addMockedMethod(updateHiveConfig)
.addMockedMethod(updateAccumuloConfigs)
.addMockedMethod(updateKerberosDescriptorArtifacts)
@@ -288,6 +290,8 @@ public class UpgradeCatalog213Test {
expectLastCall().once();
upgradeCatalog213.updateRangerEnvConfig();
expectLastCall().once();
+ upgradeCatalog213.updateRangerUgsyncSiteConfig();
+ expectLastCall().once();
upgradeCatalog213.updateHiveConfig();
expectLastCall().once();
upgradeCatalog213.updateAccumuloConfigs();
@@ -1137,6 +1141,53 @@ public class UpgradeCatalog213Test {
}
@Test
+ public void testUpdateRangerUgsyncSiteConfig() throws Exception {
+ EasyMockSupport easyMockSupport = new EasyMockSupport();
+ final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+ final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+ final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+ final Map<String, String> propertiesRangerUgsyncSite = new HashMap<String, String>() {{
+ put("ranger.usersync.source.impl.class", "ldap");
+ }};
+
+ final Config mockRangerUgsyncSite = easyMockSupport.createNiceMock(Config.class);
+ final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+ @Override
+ protected void configure() {
+ bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+ bind(Clusters.class).toInstance(mockClusters);
+ bind(EntityManager.class).toInstance(entityManager);
+
+ bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+ bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+ }
+ });
+
+ expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+ expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+ put("normal", mockClusterExpected);
+ }}).atLeastOnce();
+ expect(mockClusterExpected.getDesiredConfigByType("ranger-ugsync-site")).andReturn(mockRangerUgsyncSite).atLeastOnce();
+
+ expect(mockRangerUgsyncSite.getProperties()).andReturn(propertiesRangerUgsyncSite).atLeastOnce();
+
+ Map<String, String> updates = Collections.singletonMap("ranger.usersync.source.impl.class", "org.apache.ranger.ldapusersync.process.LdapUserGroupBuilder");
+ UpgradeCatalog213 upgradeCatalog213 = createMockBuilder(UpgradeCatalog213.class)
+ .withConstructor(Injector.class)
+ .withArgs(mockInjector)
+ .addMockedMethod("updateConfigurationPropertiesForCluster", Cluster.class, String.class,
+ Map.class, boolean.class, boolean.class)
+ .createMock();
+ upgradeCatalog213.updateConfigurationPropertiesForCluster(mockClusterExpected,
+ "ranger-ugsync-site", updates, true, false);
+ expectLastCall().once();
+
+ easyMockSupport.replayAll();
+ mockInjector.getInstance(UpgradeCatalog213.class).updateRangerUgsyncSiteConfig();
+ easyMockSupport.verifyAll();
+ }
+
+ @Test
public void testShouldDDLsBeExecutedOnUpgrade() throws Exception {
// GIVEN
Injector mockedInjector = mocksControl.createMock(Injector.class);