You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mr...@apache.org on 2017/11/27 23:29:14 UTC
[02/30] ambari git commit: Merge trunk with feature branch and fix
some UT compilation issues (mradhakrishnan)
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
index f12f3af..8b7f5c5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
@@ -17,6 +17,10 @@
*/
package org.apache.ambari.server.upgrade;
+import static org.apache.ambari.server.upgrade.UpgradeCatalog300.AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN;
+import static org.apache.ambari.server.upgrade.UpgradeCatalog300.AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN;
+import static org.apache.ambari.server.upgrade.UpgradeCatalog300.AMBARI_CONFIGURATION_PROPERTY_VALUE_COLUMN;
+import static org.apache.ambari.server.upgrade.UpgradeCatalog300.AMBARI_CONFIGURATION_TABLE;
import static org.apache.ambari.server.upgrade.UpgradeCatalog300.COMPONENT_DESIRED_STATE_TABLE;
import static org.apache.ambari.server.upgrade.UpgradeCatalog300.COMPONENT_STATE_TABLE;
import static org.apache.ambari.server.upgrade.UpgradeCatalog300.SECURITY_STATE_COLUMN;
@@ -25,6 +29,7 @@ import static org.easymock.EasyMock.anyLong;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createMockBuilder;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.eq;
@@ -36,19 +41,24 @@ import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertTrue;
+import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
+import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
+import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
@@ -133,13 +143,15 @@ public class UpgradeCatalog300Test {
Method showHcatDeletedUserMessage = UpgradeCatalog300.class.getDeclaredMethod("showHcatDeletedUserMessage");
Method setStatusOfStagesAndRequests = UpgradeCatalog300.class.getDeclaredMethod("setStatusOfStagesAndRequests");
Method updateLogSearchConfigs = UpgradeCatalog300.class.getDeclaredMethod("updateLogSearchConfigs");
-
- UpgradeCatalog300 upgradeCatalog300 = createMockBuilder(UpgradeCatalog300.class)
- .addMockedMethod(showHcatDeletedUserMessage)
- .addMockedMethod(addNewConfigurationsFromXml)
- .addMockedMethod(setStatusOfStagesAndRequests)
- .addMockedMethod(updateLogSearchConfigs)
- .createMock();
+ Method updateKerberosConfigurations = UpgradeCatalog300.class.getDeclaredMethod("updateKerberosConfigurations");
+
+ UpgradeCatalog300 upgradeCatalog300 = createMockBuilder(UpgradeCatalog300.class)
+ .addMockedMethod(showHcatDeletedUserMessage)
+ .addMockedMethod(addNewConfigurationsFromXml)
+ .addMockedMethod(setStatusOfStagesAndRequests)
+ .addMockedMethod(updateLogSearchConfigs)
+ .addMockedMethod(updateKerberosConfigurations)
+ .createMock();
upgradeCatalog300.addNewConfigurationsFromXml();
@@ -149,6 +161,9 @@ public class UpgradeCatalog300Test {
upgradeCatalog300.updateLogSearchConfigs();
expectLastCall().once();
+ upgradeCatalog300.updateKerberosConfigurations();
+ expectLastCall().once();
+
replay(upgradeCatalog300);
upgradeCatalog300.executeDMLUpdates();
@@ -171,9 +186,21 @@ public class UpgradeCatalog300Test {
Capture<DBAccessor.DBColumnInfo> hrcOpsDisplayNameColumn = newCapture();
dbAccessor.addColumn(eq(UpgradeCatalog300.HOST_ROLE_COMMAND_TABLE), capture(hrcOpsDisplayNameColumn));
- dbAccessor.dropColumn(COMPONENT_DESIRED_STATE_TABLE, SECURITY_STATE_COLUMN); expectLastCall().once();
- dbAccessor.dropColumn(COMPONENT_STATE_TABLE, SECURITY_STATE_COLUMN); expectLastCall().once();
- dbAccessor.dropColumn(SERVICE_DESIRED_STATE_TABLE, SECURITY_STATE_COLUMN); expectLastCall().once();
+ dbAccessor.dropColumn(COMPONENT_DESIRED_STATE_TABLE, SECURITY_STATE_COLUMN);
+ expectLastCall().once();
+ dbAccessor.dropColumn(COMPONENT_STATE_TABLE, SECURITY_STATE_COLUMN);
+ expectLastCall().once();
+ dbAccessor.dropColumn(SERVICE_DESIRED_STATE_TABLE, SECURITY_STATE_COLUMN);
+ expectLastCall().once();
+
+ // Ambari configuration table addition...
+ Capture<List<DBAccessor.DBColumnInfo>> ambariConfigurationTableColumns = newCapture();
+
+ dbAccessor.createTable(eq(AMBARI_CONFIGURATION_TABLE), capture(ambariConfigurationTableColumns));
+ expectLastCall().once();
+ dbAccessor.addPKConstraint(AMBARI_CONFIGURATION_TABLE, "PK_ambari_configuration", AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN, AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN);
+ expectLastCall().once();
+ // Ambari configuration table addition...
replay(dbAccessor, configuration);
@@ -186,6 +213,35 @@ public class UpgradeCatalog300Test {
Assert.assertEquals(null, capturedOpsDisplayNameColumn.getDefaultValue());
Assert.assertEquals(String.class, capturedOpsDisplayNameColumn.getType());
+ // Ambari configuration table addition...
+ Assert.assertTrue(ambariConfigurationTableColumns.hasCaptured());
+ List<DBAccessor.DBColumnInfo> columns = ambariConfigurationTableColumns.getValue();
+ Assert.assertEquals(3, columns.size());
+
+ for (DBAccessor.DBColumnInfo column : columns) {
+ String columnName = column.getName();
+
+ if (AMBARI_CONFIGURATION_CATEGORY_NAME_COLUMN.equals(columnName)) {
+ Assert.assertEquals(String.class, column.getType());
+ Assert.assertEquals(Integer.valueOf(100), column.getLength());
+ Assert.assertEquals(null, column.getDefaultValue());
+ Assert.assertFalse(column.isNullable());
+ } else if (AMBARI_CONFIGURATION_PROPERTY_NAME_COLUMN.equals(columnName)) {
+ Assert.assertEquals(String.class, column.getType());
+ Assert.assertEquals(Integer.valueOf(100), column.getLength());
+ Assert.assertEquals(null, column.getDefaultValue());
+ Assert.assertFalse(column.isNullable());
+ } else if (AMBARI_CONFIGURATION_PROPERTY_VALUE_COLUMN.equals(columnName)) {
+ Assert.assertEquals(String.class, column.getType());
+ Assert.assertEquals(Integer.valueOf(255), column.getLength());
+ Assert.assertEquals(null, column.getDefaultValue());
+ Assert.assertTrue(column.isNullable());
+ } else {
+ Assert.fail("Unexpected column name: " + columnName);
+ }
+ }
+ // Ambari configuration table addition...
+
verify(dbAccessor);
}
@@ -227,9 +283,9 @@ public class UpgradeCatalog300Test {
Collection<Config> configs = Arrays.asList(confSomethingElse1, confLogSearchConf1, confSomethingElse2, confLogSearchConf2);
expect(cluster.getAllConfigs()).andReturn(configs).atLeastOnce();
- configHelper.removeConfigsByType(cluster,"service-1-logsearch-conf");
+ configHelper.removeConfigsByType(cluster, "service-1-logsearch-conf");
expectLastCall().once();
- configHelper.removeConfigsByType(cluster,"service-2-logsearch-conf");
+ configHelper.removeConfigsByType(cluster, "service-2-logsearch-conf");
expectLastCall().once();
configHelper.createConfigType(anyObject(Cluster.class), anyObject(StackId.class), eq(controller),
eq("logsearch-common-properties"), eq(Collections.emptyMap()), eq("ambari-upgrade"),
@@ -313,23 +369,23 @@ public class UpgradeCatalog300Test {
Map<String, String> oldLogFeederOutputConf = ImmutableMap.of(
"content",
" \"zk_connect_string\":\"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}\",\n" +
- " \"collection\":\"{{logsearch_solr_collection_service_logs}}\",\n" +
- " \"number_of_shards\": \"{{logsearch_collection_service_logs_numshards}}\",\n" +
- " \"splits_interval_mins\": \"{{logsearch_service_logs_split_interval_mins}}\",\n" +
- "\n" +
- " \"zk_connect_string\":\"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}\",\n" +
- " \"collection\":\"{{logsearch_solr_collection_audit_logs}}\",\n" +
- " \"number_of_shards\": \"{{logsearch_collection_audit_logs_numshards}}\",\n" +
- " \"splits_interval_mins\": \"{{logsearch_audit_logs_split_interval_mins}}\",\n"
+ " \"collection\":\"{{logsearch_solr_collection_service_logs}}\",\n" +
+ " \"number_of_shards\": \"{{logsearch_collection_service_logs_numshards}}\",\n" +
+ " \"splits_interval_mins\": \"{{logsearch_service_logs_split_interval_mins}}\",\n" +
+ "\n" +
+ " \"zk_connect_string\":\"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}\",\n" +
+ " \"collection\":\"{{logsearch_solr_collection_audit_logs}}\",\n" +
+ " \"number_of_shards\": \"{{logsearch_collection_audit_logs_numshards}}\",\n" +
+ " \"splits_interval_mins\": \"{{logsearch_audit_logs_split_interval_mins}}\",\n"
);
Map<String, String> expectedLogFeederOutputConf = ImmutableMap.of(
"content",
" \"zk_connect_string\":\"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}\",\n" +
- " \"type\": \"service\",\n" +
- "\n" +
- " \"zk_connect_string\":\"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}\",\n" +
- " \"type\": \"audit\",\n"
+ " \"type\": \"service\",\n" +
+ "\n" +
+ " \"zk_connect_string\":\"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}\",\n" +
+ " \"type\": \"audit\",\n"
);
Config logFeederOutputConf = easyMockSupport.createNiceMock(Config.class);
@@ -362,10 +418,10 @@ public class UpgradeCatalog300Test {
upgradeCatalog300.updateLogSearchConfigs();
easyMockSupport.verifyAll();
- Map<String,String> newLogFeederProperties = logFeederPropertiesCapture.getValue();
+ Map<String, String> newLogFeederProperties = logFeederPropertiesCapture.getValue();
assertTrue(Maps.difference(expectedLogFeederProperties, newLogFeederProperties).areEqual());
- Map<String,String> newLogSearchProperties = logSearchPropertiesCapture.getValue();
+ Map<String, String> newLogSearchProperties = logSearchPropertiesCapture.getValue();
assertTrue(Maps.difference(Collections.emptyMap(), newLogSearchProperties).areEqual());
Map<String, String> updatedLogFeederLog4j = logFeederLog4jCapture.getValue();
@@ -383,4 +439,90 @@ public class UpgradeCatalog300Test {
Map<String, String> updatedLogFeederOutputConf = logFeederOutputConfCapture.getValue();
assertTrue(Maps.difference(expectedLogFeederOutputConf, updatedLogFeederOutputConf).areEqual());
}
+
+ @Test
+ public void testUpdateKerberosConfigurations() throws AmbariException, NoSuchFieldException, IllegalAccessException {
+ StackId stackId = new StackId("HDP", "2.6.0.0");
+
+ Map<String, Cluster> clusterMap = new HashMap<>();
+
+ Map<String, String> propertiesWithGroup = new HashMap<>();
+ propertiesWithGroup.put("group", "ambari_managed_identities");
+ propertiesWithGroup.put("kdc_host", "host1.example.com");
+
+ Config newConfig = createMock(Config.class);
+ expect(newConfig.getTag()).andReturn("version2").atLeastOnce();
+ expect(newConfig.getType()).andReturn("kerberos-env").atLeastOnce();
+
+ ServiceConfigVersionResponse response = createMock(ServiceConfigVersionResponse.class);
+
+ Config configWithGroup = createMock(Config.class);
+ expect(configWithGroup.getProperties()).andReturn(propertiesWithGroup).atLeastOnce();
+ expect(configWithGroup.getPropertiesAttributes()).andReturn(Collections.emptyMap()).atLeastOnce();
+ expect(configWithGroup.getTag()).andReturn("version1").atLeastOnce();
+
+ Cluster cluster1 = createMock(Cluster.class);
+ expect(cluster1.getDesiredConfigByType("kerberos-env")).andReturn(configWithGroup).atLeastOnce();
+ expect(cluster1.getConfigsByType("kerberos-env")).andReturn(Collections.singletonMap("v1", configWithGroup)).atLeastOnce();
+ expect(cluster1.getServiceByConfigType("kerberos-env").getName()).andReturn("KERBEROS").atLeastOnce();
+ expect(cluster1.getClusterName()).andReturn("c1").atLeastOnce();
+ expect(cluster1.getDesiredStackVersion()).andReturn(stackId).atLeastOnce();
+ expect(cluster1.getConfig(eq("kerberos-env"), anyString())).andReturn(newConfig).atLeastOnce();
+ expect(cluster1.addDesiredConfig("ambari-upgrade", Collections.singleton(newConfig), "Updated kerberos-env during Ambari Upgrade from 2.6.0 to 3.0.0.")).andReturn(response).once();
+
+ Map<String, String> propertiesWithoutGroup = new HashMap<>();
+ propertiesWithoutGroup.put("kdc_host", "host2.example.com");
+
+ Config configWithoutGroup = createMock(Config.class);
+ expect(configWithoutGroup.getProperties()).andReturn(propertiesWithoutGroup).atLeastOnce();
+
+ Cluster cluster2 = createMock(Cluster.class);
+ expect(cluster2.getDesiredConfigByType("kerberos-env")).andReturn(configWithoutGroup).atLeastOnce();
+
+ Cluster cluster3 = createMock(Cluster.class);
+ expect(cluster3.getDesiredConfigByType("kerberos-env")).andReturn(null).atLeastOnce();
+
+ clusterMap.put("c1", cluster1);
+ clusterMap.put("c2", cluster2);
+ clusterMap.put("c3", cluster3);
+
+ Clusters clusters = createMock(Clusters.class);
+ expect(clusters.getClusters()).andReturn(clusterMap).anyTimes();
+
+ Capture<Map<String, String>> capturedProperties = newCapture();
+
+ AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+ .addMockedMethod("createConfiguration")
+ .addMockedMethod("getClusters", new Class[]{})
+ .addMockedMethod("createConfig")
+ .createMock();
+ expect(controller.getClusters()).andReturn(clusters).anyTimes();
+ expect(controller.createConfig(eq(cluster1), eq(stackId), eq("kerberos-env"), capture(capturedProperties), anyString(), anyObject(Map.class), 1L)).andReturn(newConfig).once();
+
+
+ Injector injector = createNiceMock(Injector.class);
+ expect(injector.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+
+ replay(controller, clusters, cluster1, cluster2, configWithGroup, configWithoutGroup, newConfig, response, injector);
+
+ Field field = AbstractUpgradeCatalog.class.getDeclaredField("configuration");
+
+ UpgradeCatalog300 upgradeCatalog300 = new UpgradeCatalog300(injector);
+ field.set(upgradeCatalog300, configuration);
+ upgradeCatalog300.updateKerberosConfigurations();
+
+ verify(controller, clusters, cluster1, cluster2, configWithGroup, configWithoutGroup, newConfig, response, injector);
+
+
+ Assert.assertEquals(1, capturedProperties.getValues().size());
+
+ Map<String, String> properties = capturedProperties.getValue();
+ Assert.assertEquals(2, properties.size());
+ Assert.assertEquals("ambari_managed_identities", properties.get("ipa_user_group"));
+ Assert.assertEquals("host1.example.com", properties.get("kdc_host"));
+
+ Assert.assertEquals(2, propertiesWithGroup.size());
+ Assert.assertEquals("ambari_managed_identities", propertiesWithGroup.get("group"));
+ Assert.assertEquals("host1.example.com", propertiesWithGroup.get("kdc_host"));
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/utils/TestVersionUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestVersionUtils.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestVersionUtils.java
index 9d20a01..42d321a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestVersionUtils.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestVersionUtils.java
@@ -48,6 +48,9 @@ public class TestVersionUtils {
Assert.assertEquals(0, VersionUtils.compareVersions("2.2", "2.2.VER"));
Assert.assertEquals(0, VersionUtils.compareVersions("2.2.VAR", "2.2.VER"));
Assert.assertEquals(0, VersionUtils.compareVersions("2.2.3", "2.2.3.VER1.V"));
+
+ Assert.assertEquals(0, VersionUtils.compareVersions("2.2.0.1-200", "2.2.0.1-100"));
+ Assert.assertEquals(1, VersionUtils.compareVersionsWithBuild("2.2.0.1-200", "2.2.0.1-100", 4));
}
@Test
@@ -118,6 +121,7 @@ public class TestVersionUtils {
//Assert.assertEquals(-1, VersionUtils.compareVersions("1.2.3_MYAMBARI_000000", "1.2.4_MYAMBARI_000000"));
Assert.assertEquals(1, VersionUtils.compareVersions("1.2.4_MYAMBARI_000000", "1.2.3_MYAMBARI_000000"));
Assert.assertEquals(0, VersionUtils.compareVersions("1.2.3_MYAMBARI_000000", "1.2.3_MYAMBARI_000000"));
+ Assert.assertEquals(0, VersionUtils.compareVersions("2.99.99.0", "2.99.99"));
Assert.assertEquals(-1, VersionUtils.compareVersions("1.2.3_MYAMBARI_000000", "1.2.4_MYAMBARI_000000", 3));
Assert.assertEquals(1, VersionUtils.compareVersions("1.2.4_MYAMBARI_000000", "1.2.3_MYAMBARI_000000", 3));
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 66df2d8..ad4e371 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -3718,7 +3718,8 @@ class TestAmbariServer(TestCase):
@patch("ambari_server.serverSetup.service_setup")
@patch("ambari_server.serverSetup.read_ambari_user")
@patch("ambari_server.serverSetup.expand_jce_zip_file")
- def test_setup_linux(self, expand_jce_zip_file_mock, read_ambari_user_mock,
+ @patch("ambari_server.serverSetup.write_gpl_license_accepted")
+ def test_setup_linux(self, write_gpl_license_accepted_mock, expand_jce_zip_file_mock, read_ambari_user_mock,
service_setup_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_root_mock,
disable_security_enhancements_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
download_jdk_mock, configure_os_settings_mock, get_ambari_properties_mock,
@@ -3831,6 +3832,7 @@ class TestAmbariServer(TestCase):
check_jdbc_drivers_mock.return_value = 0
download_jdk_mock.return_value = 0
configure_os_settings_mock.return_value = 0
+ write_gpl_license_accepted_mock.return_value = 0
result = setup(args)
@@ -4970,7 +4972,7 @@ class TestAmbariServer(TestCase):
ensure_can_start_under_current_user_mock, get_jdbc_mock,
ensure_jdbc_driver_is_installed_mock):
java_exe_path_mock.return_value = "/usr/lib/java/bin/java"
- run_os_command_mock.return_value = (0, None, None)
+ run_os_command_mock.return_value = (0, '{"lzo_enabled":"false"}', None)
get_conf_dir_mock.return_value = '/etc/conf'
command = '/usr/lib/java/bin/java -cp /etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12' + \
os.pathsep +'/path/to/jdbc.jar ' \
@@ -6834,7 +6836,6 @@ class TestAmbariServer(TestCase):
read_ambari_user_method, read_master_key_method,
get_is_persisted_method, get_is_secure_method, exists_mock,
save_passwd_for_alias_method):
-
is_root_method.return_value = True
p = Properties()
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index 1baed03..bacfe3c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -62,8 +62,7 @@ class TestHiveMetastore(RMFTestCase):
self.assert_configure_default()
self.assert_init_schema()
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive',
+ environment = { 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -123,8 +122,7 @@ class TestHiveMetastore(RMFTestCase):
self.assert_configure_secured()
self.assert_init_schema()
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop',
- 'HIVE_BIN': 'hive',
+ environment = { 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -552,7 +550,7 @@ class TestHiveMetastore(RMFTestCase):
user = 'hive')
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': '/usr/hdp/2.3.0.0-1234/hadoop', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': '/usr/hdp/current/hive-server2/bin/hive'},
+ environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive'},
not_if = None,
user = 'hive',
path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin'])
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index c1fb5a2..87467ac 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -93,8 +93,7 @@ class TestHiveServer(RMFTestCase):
user='hive'
)
self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': 'mock_hadoop_dir',
- 'HIVE_BIN': 'hive',
+ environment = { 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -124,8 +123,7 @@ class TestHiveServer(RMFTestCase):
self.assert_configure_default(default_fs_default='hcfs://c6401.ambari.apache.org:8020')
self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': 'mock_hadoop_dir',
- 'HIVE_BIN': 'hive',
+ environment = {'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -155,8 +153,7 @@ class TestHiveServer(RMFTestCase):
user = 'hive',
)
self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': 'mock_hadoop_dir',
- 'HIVE_BIN': 'hive',
+ environment = { 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -186,8 +183,7 @@ class TestHiveServer(RMFTestCase):
user = 'hive',
)
self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': 'mock_hadoop_dir',
- 'HIVE_BIN': 'hive',
+ environment = { 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -217,8 +213,7 @@ class TestHiveServer(RMFTestCase):
user = 'hive',
)
self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': 'mock_hadoop_dir',
- 'HIVE_BIN': 'hive',
+ environment = { 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -290,8 +285,7 @@ class TestHiveServer(RMFTestCase):
self.assert_configure_secured()
self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': 'mock_hadoop_dir',
- 'HIVE_BIN': 'hive',
+ environment = { 'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index 7efb5fd..157b25c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -340,7 +340,7 @@ class TestServiceCheck(RMFTestCase):
# LLAP call
self.assertResourceCalled('Execute',
"! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10500/;transportMode=binary' --hiveconf \"hiveLlapServiceCheck=\" -f /usr/hdp/current/hive-server2-hive2/scripts/llap/sql/serviceCheckScript.sql -e '' 2>&1| awk '{print}'|grep -i -e 'Invalid status\|Invalid URL\|command not found\|Connection refused'",
- path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', '/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin:/usr/hdp/2.3.0.0-1234/hive2/bin'],
+ path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', '/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin:/usr/hdp/current/hive-server2-hive2/bin'],
tries = 1,
stderr = -1,
wait_for_finish = True,
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index f9480ee..b4652ac 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -65,7 +65,6 @@ class TestWebHCatServer(RMFTestCase):
self.assert_configure_default()
self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh start',
- environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
not_if = "ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1",
user = 'hcat',
)
@@ -82,7 +81,6 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh stop',
user = 'hcat',
- environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client' }
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/webhcat/webhcat.pid`',
@@ -148,7 +146,6 @@ class TestWebHCatServer(RMFTestCase):
self.assert_configure_secured()
self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh start',
- environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop'},
not_if = "ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1",
user = 'hcat',
)
@@ -165,7 +162,6 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh stop',
user = 'hcat',
- environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop' }
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/webhcat/webhcat.pid`',
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 7f2ed46..e15cfdb 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -49,7 +49,6 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-config.sh server falcon',
path = ['/usr/bin'],
user = 'falcon',
- environment = {'HADOOP_HOME': '/usr/lib/hadoop'},
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
)
@@ -61,7 +60,6 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-start -port 15000',
path = ['/usr/bin'],
user = 'falcon',
- environment = {'HADOOP_HOME': '/usr/lib/hadoop'},
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
)
@@ -78,8 +76,7 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-stop',
path = ['/usr/bin'],
- user = 'falcon',
- environment = {'HADOOP_HOME': '/usr/lib/hadoop'})
+ user = 'falcon')
self.assertResourceCalled('File', '/var/run/falcon/falcon.pid',
action = ['delete'])
@@ -236,8 +233,7 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute',
'/usr/hdp/current/falcon-server/bin/falcon-stop',
- path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'], user='falcon',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'})
+ path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'], user='falcon')
self.assertResourceCalled('File', '/var/run/falcon/falcon.pid',
action = ['delete'])
@@ -252,7 +248,7 @@ class TestFalconServer(RMFTestCase):
sudo = True,
)
self.assertResourceCalled('Execute', ('tar',
- '-xvf',
+ '-xf',
'/tmp/falcon-upgrade-backup/falcon-local-backup.tar',
'-C',
u'/hadoop/falcon/'),
@@ -406,14 +402,12 @@ class TestFalconServer(RMFTestCase):
)
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-config.sh server falcon',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
user = 'falcon',
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
)
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-start -port 15000',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
user = 'falcon',
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
@@ -439,7 +433,7 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute',
('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'falcon-server', version), sudo=True,)
self.assertResourceCalled('Execute', ('tar',
- '-xvf',
+ '-xf',
'/tmp/falcon-upgrade-backup/falcon-local-backup.tar',
'-C',
u'/hadoop/falcon/'),
@@ -479,7 +473,7 @@ class TestFalconServer(RMFTestCase):
('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'falcon-server', version), sudo=True,)
self.assertResourceCalled('Execute', ('tar',
- '-xvf',
+ '-xf',
'/tmp/falcon-upgrade-backup/falcon-local-backup.tar',
'-C',
u'/hadoop/falcon/'),
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index db4e2a1..d91bcf4 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -70,8 +70,7 @@ class TestHiveMetastore(RMFTestCase):
self.assert_init_schema('aaa')
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
- 'HIVE_BIN': 'hive',
+ environment = {'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -101,8 +100,7 @@ class TestHiveMetastore(RMFTestCase):
self.assert_init_schema('aaa')
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
- 'HIVE_BIN': 'hive',
+ environment = {'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
@@ -164,8 +162,7 @@ class TestHiveMetastore(RMFTestCase):
self.assert_configure_secured()
self.assert_init_schema('asd')
self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
- environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive',
+ environment = {'HIVE_CMD': '/usr/hdp/current/hive-server2/bin/hive',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user = 'hive',
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
index fad99f6..59b2166 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
@@ -130,6 +130,6 @@ class TestTezClient(RMFTestCase):
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None, ''), (0, None)],
+ call_mocks = [(0, None, ''),(0, None, ''), (0, None)],
mocks_dict = mocks_dict)
# for now, it's enough to know the method didn't fail
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 922ace2..8801746 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -165,13 +165,15 @@ class TestJobHistoryServer(RMFTestCase):
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/spark',
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('HdfsResource', '/user/spark',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
@@ -246,13 +248,15 @@ class TestJobHistoryServer(RMFTestCase):
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/spark',
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('HdfsResource', '/user/spark',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index b4535cd..e15d5a4 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -58,13 +58,15 @@ class TestSparkClient(RMFTestCase):
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/spark',
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
owner = 'spark',
@@ -108,13 +110,15 @@ class TestSparkClient(RMFTestCase):
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/spark',
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
owner = 'spark',
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py b/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
index 92dd634..a199d00 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
@@ -185,4 +185,16 @@ class TestConfSelect(RMFTestCase):
conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234", packages["hadoop"])
- self.assertEqual(pprint.pformat(self.env.resource_list), "[]")
\ No newline at end of file
+ self.assertEqual(pprint.pformat(self.env.resource_list), "[]")
+
+
+ def test_restrictions(self):
+
+ Script.config.update({'roleParameters': {'cluster_version_summary': {'services': {'HIVE': {'upgrade': True}}}}})
+
+ restricted = conf_select.get_restricted_packages()
+ self.assertTrue("hive" in restricted)
+ self.assertTrue("hive-hcatalog" in restricted)
+ self.assertTrue("hive2" in restricted)
+ self.assertTrue("tez_hive2" in restricted)
+ self.assertTrue("hadoop" not in restricted)
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json
index 86ca03a..29cbddc 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/oozie-upgrade.json
@@ -55,7 +55,8 @@
"mysql_jdbc_url": "http://c6401.ambari.apache.org:8080/resources//mysql-connector-java.jar",
"custom_mysql_jdbc_name" : "mysql-connector-java.jar",
"custom_oracle_jdbc_name" : "oracle-jdbc-driver.jar",
- "custom_postgres_jdbc_name" : "test-postgres-jdbc.jar"
+ "custom_postgres_jdbc_name" : "test-postgres-jdbc.jar",
+ "gpl_license_accepted": "true"
},
"commandType": "EXECUTION_COMMAND",
"roleParams": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index b80476c..8695653 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -109,9 +109,7 @@ class TestMahoutClient(RMFTestCase):
self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
'--charset utf-8',
- environment = {'HADOOP_CONF_DIR': '/usr/hdp/2.2.1.0-2067/hadoop/conf',
- 'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
- 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45',
+ environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45',
'MAHOUT_HOME': '/usr/hdp/current/mahout-client'},
path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
tries = 3,
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index fbe5403..78d45cf 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -85,13 +85,15 @@ class TestSparkThriftServer(RMFTestCase):
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/spark',
owner = 'spark',
group = 'hadoop',
create_parents = True,
- mode = 0775
+ mode = 0775,
+ cd_access = 'a',
)
self.assertResourceCalled('HdfsResource', '/user/spark',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.5/HIVE/running_withMOTDmsg_andTrailingMsg.txt
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/HIVE/running_withMOTDmsg_andTrailingMsg.txt b/ambari-server/src/test/python/stacks/2.5/HIVE/running_withMOTDmsg_andTrailingMsg.txt
new file mode 100644
index 0000000..394faef
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/running_withMOTDmsg_andTrailingMsg.txt
@@ -0,0 +1,46 @@
+######## Hortonworks #############
+This is MOTD message, added for testing in qe infra
+{
+ "amInfo" : {
+ "appName" : "llap",
+ "appType" : "org-apache-slider",
+ "appId" : "application_1455662455106_10882",
+ "containerId" : "container_e14_1455662455106_10882_01_000001",
+ "hostname" : "HOST_REPLACED",
+ "amWebUrl" : "http://HOST_REPLACED:1025/"
+ },
+ "state" : "RUNNING_ALL",
+ "originalConfigurationPath" : "hdfs://HOST_REPLACED:8020/user/USER_REPLACED/.slider/cluster/llap/snapshot",
+ "generatedConfigurationPath" : "hdfs://HOST_REPLACED:8020/user/USER_REPLACED/.slider/cluster/llap/generated",
+ "desiredInstances" : 3,
+ "liveInstances" : 3,
+ "appStartTime" : 1459625802169,
+ "llapInstances" : [ {
+ "hostname" : "HOST_REPLACED",
+ "containerId" : "container_e14_1455662455106_10882_01_000003",
+ "statusUrl" : "http://HOST_REPLACED:15002/status",
+ "webUrl" : "http://HOST_REPLACED:15002",
+ "rpcPort" : 15001,
+ "mgmtPort" : 15004,
+ "shufflePort" : 15551
+ }, {
+ "hostname" : "HOST_REPLACED",
+ "containerId" : "container_e14_1455662455106_10882_01_000002",
+ "statusUrl" : "http://HOST_REPLACED:15002/status",
+ "webUrl" : "http://HOST_REPLACED:15002",
+ "rpcPort" : 15001,
+ "mgmtPort" : 15004,
+ "shufflePort" : 15551
+ }, {
+ "hostname" : "HOST_REPLACED",
+ "containerId" : "container_e14_1455662455106_10882_01_000004",
+ "statusUrl" : "http://HOST_REPLACED:15002/status",
+ "webUrl" : "http://HOST_REPLACED:15002",
+ "rpcPort" : 15001,
+ "mgmtPort" : 15004,
+ "shufflePort" : 15551
+ } ]
+}
+
+# THIS IS A DUMMY TRAILING MESSAGE 1
+# THIS IS A DUMMY TRAILING MESSAGE 2
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
index 4951c7e..4eb16c2 100644
--- a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -115,8 +115,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -178,8 +177,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -227,8 +225,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -305,8 +302,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -364,8 +360,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -920,6 +915,27 @@ class TestHiveServerInteractive(RMFTestCase):
+ # Tests for function '_make_valid_json()' : will be passed in with 'llapstatus' output which will be :
+ # (1). A string parseable as JSON, but has 2 and 3.
+ # (2). Has extra lines in beginning (eg: from MOTD logging embedded)
+ # AND/OR
+ # (3). Extra lines at the end.
+
+ # Begginning and end lines need to be removed before parsed as JSON
+ def test_make_valid_json_11(self):
+ # Setting up input for fn. '_make_valid_json()'
+ input_file_handle = open(self.get_src_folder() + "/test/python/stacks/2.5/HIVE/running_withMOTDmsg_andTrailingMsg.txt","r")
+ llap_app_info = input_file_handle.read()
+ llap_app_info_as_json = self.hsi._make_valid_json(llap_app_info)
+
+ # Set up expected output
+ expected_ouput_file_handle = open(self.get_src_folder() + "/test/python/stacks/2.5/HIVE/running.json","r")
+ expected_ouput_data = expected_ouput_file_handle.read()
+ expected_ouput_data_as_json = json.loads(expected_ouput_data)
+
+ # Verification
+ self.assertEqual(llap_app_info_as_json, expected_ouput_data_as_json)
+
# Tests for fn : 'check_llap_app_status_in_hdp_tp()'
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
index 288d155..69000df 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-admin-secured.json
@@ -432,9 +432,7 @@
"create_ambari_principal": "true",
"service_check_principal_name": "${cluster_name|toLower()}-${short_date}",
"executable_search_paths": "/usr/bin, /usr/kerberos/bin, /usr/sbin, /usr/lib/mit/bin, /usr/lib/mit/sbin",
- "password_chat_timeout": "5",
"kdc_type": "mit-kdc",
- "set_password_expiry": "false",
"password_min_punctuation": "1",
"container_dn": "",
"case_insensitive_username_rules": "false",
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
index f7f054a..daef35d 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
@@ -529,9 +529,7 @@
"create_ambari_principal": "true",
"service_check_principal_name": "${cluster_name|toLower()}-${short_date}",
"executable_search_paths": "/usr/bin, /usr/kerberos/bin, /usr/sbin, /usr/lib/mit/bin, /usr/lib/mit/sbin",
- "password_chat_timeout": "5",
"kdc_type": "mit-kdc",
- "set_password_expiry": "false",
"password_min_punctuation": "1",
"container_dn": "",
"case_insensitive_username_rules": "false",
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
index 38b5906..06b247a 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-secured.json
@@ -456,9 +456,7 @@
"create_ambari_principal": "true",
"service_check_principal_name": "${cluster_name|toLower()}-${short_date}",
"executable_search_paths": "/usr/bin, /usr/kerberos/bin, /usr/sbin, /usr/lib/mit/bin, /usr/lib/mit/sbin",
- "password_chat_timeout": "5",
"kdc_type": "mit-kdc",
- "set_password_expiry": "false",
"password_min_punctuation": "1",
"container_dn": "",
"case_insensitive_username_rules": "false",
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/resources/PreconfigureActionTest_cluster_config.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/PreconfigureActionTest_cluster_config.json b/ambari-server/src/test/resources/PreconfigureActionTest_cluster_config.json
index 2a744c7..0b357e9 100644
--- a/ambari-server/src/test/resources/PreconfigureActionTest_cluster_config.json
+++ b/ambari-server/src/test/resources/PreconfigureActionTest_cluster_config.json
@@ -95,7 +95,6 @@
"manage_auth_to_local": "true",
"manage_identities": "true",
"master_kdc": "",
- "password_chat_timeout": "5",
"password_length": "20",
"password_min_digits": "1",
"password_min_lowercase_letters": "1",
@@ -104,7 +103,6 @@
"password_min_whitespace": "0",
"preconfigure_services": "DEFAULT",
"realm": "EXAMPLE.COM",
- "service_check_principal_name": "${cluster_name|toLower()}-${short_date}",
- "set_password_expiry": "false"
+ "service_check_principal_name": "${cluster_name|toLower()}-${short_date}"
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json
index e17e121..8c27a9a 100644
--- a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json
+++ b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_ranger_kms.json
@@ -104,6 +104,292 @@
]
}
]
+ },
+ {
+ "name": "YARN",
+ "identities": [
+ {
+ "name": "yarn_spnego",
+ "reference": "/spnego"
+ },
+ {
+ "name": "yarn_smokeuser",
+ "reference": "/smokeuser"
+ }
+ ],
+ "configurations": [
+ {
+ "yarn-site": {
+ "yarn.timeline-service.enabled": "true",
+ "yarn.timeline-service.http-authentication.type": "kerberos",
+ "yarn.acl.enable": "true",
+ "yarn.admin.acl": "${yarn-env/yarn_user},dr.who",
+ "yarn.timeline-service.http-authentication.signature.secret": "",
+ "yarn.timeline-service.http-authentication.signature.secret.file": "",
+ "yarn.timeline-service.http-authentication.signer.secret.provider": "",
+ "yarn.timeline-service.http-authentication.signer.secret.provider.object": "",
+ "yarn.timeline-service.http-authentication.token.validity": "",
+ "yarn.timeline-service.http-authentication.cookie.domain": "",
+ "yarn.timeline-service.http-authentication.cookie.path": "",
+ "yarn.timeline-service.http-authentication.proxyuser.*.hosts": "",
+ "yarn.timeline-service.http-authentication.proxyuser.*.users": "",
+ "yarn.timeline-service.http-authentication.proxyuser.*.groups": "",
+ "yarn.timeline-service.http-authentication.kerberos.name.rules": "",
+ "yarn.resourcemanager.proxyuser.*.groups": "",
+ "yarn.resourcemanager.proxyuser.*.hosts": "",
+ "yarn.resourcemanager.proxyuser.*.users": "",
+ "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
+ "yarn.resourcemanager.zk-acl" : "sasl:${principals/YARN/RESOURCEMANAGER/resource_manager_rm|principalPrimary()}:rwcda",
+ "hadoop.registry.secure" : "true",
+ "hadoop.registry.system.accounts" : "sasl:${principals/YARN/APP_TIMELINE_SERVER/app_timeline_server_yarn|principalPrimary()},sasl:${principals/MAPREDUCE2/HISTORYSERVER/history_server_jhs|principalPrimary()},sasl:${principals/HDFS/NAMENODE/hdfs|principalPrimary()},sasl:${principals/YARN/RESOURCEMANAGER/resource_manager_rm|principalPrimary()},sasl:${principals/HIVE/HIVE_SERVER/hive_server_hive|principalPrimary()}",
+ "hadoop.registry.client.auth" : "kerberos",
+ "hadoop.registry.jaas.context" : "Client"
+ }
+ },
+ {
+ "core-site": {
+ "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+ "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
+ }
+ },
+ {
+ "capacity-scheduler": {
+ "yarn.scheduler.capacity.root.acl_administer_queue": "${yarn-env/yarn_user}",
+ "yarn.scheduler.capacity.root.default.acl_administer_queue": "${yarn-env/yarn_user}",
+ "yarn.scheduler.capacity.root.acl_administer_jobs": "${yarn-env/yarn_user}",
+ "yarn.scheduler.capacity.root.default.acl_administer_jobs": "${yarn-env/yarn_user}",
+ "yarn.scheduler.capacity.root.default.acl_submit_applications": "${yarn-env/yarn_user}"
+ }
+ },
+ {
+ "ranger-yarn-audit": {
+ "xasecure.audit.jaas.Client.loginModuleName": "com.sun.security.auth.module.Krb5LoginModule",
+ "xasecure.audit.jaas.Client.loginModuleControlFlag": "required",
+ "xasecure.audit.jaas.Client.option.useKeyTab": "true",
+ "xasecure.audit.jaas.Client.option.storeKey": "false",
+ "xasecure.audit.jaas.Client.option.serviceName": "solr",
+ "xasecure.audit.destination.solr.force.use.inmemory.jaas.config": "true"
+ }
+ }
+ ],
+ "components": [
+ {
+ "name": "NODEMANAGER",
+ "identities": [
+ {
+ "name": "nodemanager_nm",
+ "principal": {
+ "value": "nm/_HOST@${realm}",
+ "type" : "service",
+ "configuration": "yarn-site/yarn.nodemanager.principal",
+ "local_username": "${yarn-env/yarn_user}"
+ },
+ "keytab": {
+ "file": "${keytab_dir}/nm.service.keytab",
+ "owner": {
+ "name": "${yarn-env/yarn_user}",
+ "access": "r"
+ },
+ "group": {
+ "name": "${cluster-env/user_group}",
+ "access": ""
+ },
+ "configuration": "yarn-site/yarn.nodemanager.keytab"
+ }
+ },
+ {
+ "name": "yarn_nodemanager_hive_server_hive",
+ "reference": "/HIVE/HIVE_SERVER/hive_server_hive",
+ "principal": {
+ "configuration": "hive-interactive-site/hive.llap.daemon.service.principal"
+ },
+ "keytab": {
+ "configuration": "hive-interactive-site/hive.llap.daemon.keytab.file"
+ },
+ "when" : {
+ "contains" : ["services", "HIVE"]
+ }
+ },
+ {
+ "name": "llap_task_hive",
+ "principal": {
+ "value": "hive/_HOST@${realm}",
+ "type" : "service",
+ "configuration": "hive-interactive-site/hive.llap.task.principal"
+ },
+ "keytab": {
+ "file": "${keytab_dir}/hive.llap.task.keytab",
+ "owner": {
+ "name": "${yarn-env/yarn_user}",
+ "access": "r"
+ },
+ "group": {
+ "name": "${cluster-env/user_group}",
+ "access": "r"
+ },
+ "configuration": "hive-interactive-site/hive.llap.task.keytab.file"
+ },
+ "when" : {
+ "contains" : ["services", "HIVE"]
+ }
+ },
+ {
+ "name": "llap_zk_hive",
+ "principal": {
+ "value": "hive/_HOST@${realm}",
+ "type" : "service",
+ "configuration": "hive-interactive-site/hive.llap.zk.sm.principal"
+ },
+ "keytab": {
+ "file": "${keytab_dir}/hive.llap.zk.sm.keytab",
+ "owner": {
+ "name": "${yarn-env/yarn_user}",
+ "access": "r"
+ },
+ "group": {
+ "name": "${cluster-env/user_group}",
+ "access": "r"
+ },
+ "configuration": "hive-interactive-site/hive.llap.zk.sm.keytab.file"
+ },
+ "when" : {
+ "contains" : ["services", "HIVE"]
+ }
+ },
+ {
+ "name": "yarn_nodemanager_spnego",
+ "reference": "/spnego",
+ "principal": {
+ "configuration": "yarn-site/yarn.nodemanager.webapp.spnego-principal"
+ },
+ "keytab": {
+ "configuration": "yarn-site/yarn.nodemanager.webapp.spnego-keytab-file"
+ }
+ }
+ ],
+ "configurations": [
+ {
+ "yarn-site": {
+ "yarn.nodemanager.container-executor.class": "org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "name": "HIVE",
+ "identities": [
+ {
+ "name": "hive_spnego",
+ "reference": "/spnego"
+ },
+ {
+ "name": "hive_smokeuser",
+ "reference": "/smokeuser"
+ }
+ ],
+ "configurations": [
+ {
+ "hive-site": {
+ "hive.metastore.sasl.enabled": "true",
+ "hive.server2.authentication": "KERBEROS"
+ }
+ },
+ {
+ "ranger-hive-audit": {
+ "xasecure.audit.jaas.Client.loginModuleName": "com.sun.security.auth.module.Krb5LoginModule",
+ "xasecure.audit.jaas.Client.loginModuleControlFlag": "required",
+ "xasecure.audit.jaas.Client.option.useKeyTab": "true",
+ "xasecure.audit.jaas.Client.option.storeKey": "false",
+ "xasecure.audit.jaas.Client.option.serviceName": "solr",
+ "xasecure.audit.destination.solr.force.use.inmemory.jaas.config": "true"
+ }
+ }
+ ],
+ "components": [
+ {
+ "name": "HIVE_SERVER",
+ "identities": [
+ {
+ "name": "hive_hive_server_hdfs",
+ "reference": "/HDFS/NAMENODE/hdfs"
+ },
+ {
+ "name": "hive_server_hive",
+ "principal": {
+ "value": "hive/_HOST@${realm}",
+ "type": "service",
+ "configuration": "hive-site/hive.server2.authentication.kerberos.principal",
+ "local_username": "${hive-env/hive_user}"
+ },
+ "keytab": {
+ "file": "${keytab_dir}/hive.service.keytab",
+ "owner": {
+ "name": "${hive-env/hive_user}",
+ "access": "r"
+ },
+ "group": {
+ "name": "${cluster-env/user_group}",
+ "access": "r"
+ },
+ "configuration": "hive-site/hive.server2.authentication.kerberos.keytab"
+ }
+ },
+ {
+ "name": "atlas_kafka",
+ "reference": "/HIVE/HIVE_SERVER/hive_server_hive",
+ "principal": {
+ "configuration": "hive-atlas-application.properties/atlas.jaas.KafkaClient.option.principal"
+ },
+ "keytab": {
+ "configuration": "hive-atlas-application.properties/atlas.jaas.KafkaClient.option.keyTab"
+ }
+ },
+ {
+ "name": "hive_hive_server_spnego",
+ "reference": "/spnego",
+ "principal": {
+ "configuration": "hive-site/hive.server2.authentication.spnego.principal"
+ },
+ "keytab": {
+ "configuration": "hive-site/hive.server2.authentication.spnego.keytab"
+ }
+ },
+ {
+ "name": "ranger_audit",
+ "reference": "/HIVE/HIVE_SERVER/hive_server_hive",
+ "principal": {
+ "configuration": "ranger-hive-audit/xasecure.audit.jaas.Client.option.principal"
+ },
+ "keytab": {
+ "configuration": "ranger-hive-audit/xasecure.audit.jaas.Client.option.keyTab"
+ }
+ }
+ ]
+ },
+ {
+ "name": "HIVE_SERVER_INTERACTIVE",
+ "identities": [
+ {
+ "name": "hive_hive_server_interactive_hdfs",
+ "reference": "/HDFS/NAMENODE/hdfs"
+ },
+ {
+ "name": "hive_hive_server_interactive_hive_server_hive",
+ "reference": "/HIVE/HIVE_SERVER/hive_server_hive"
+ },
+ {
+ "name": "hive_hive_server_interactive_spnego",
+ "reference": "/HIVE/HIVE_SERVER/spnego"
+ },
+ {
+ "name": "hive_hive_server_interactive_llap_zk_hive",
+ "reference": "/YARN/NODEMANAGER/llap_zk_hive"
+ }
+ ]
+ }
+ ]
}
]
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/resources/version_definition_with_tags.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/version_definition_with_tags.xml b/ambari-server/src/test/resources/version_definition_with_tags.xml
new file mode 100644
index 0000000..fe4266b
--- /dev/null
+++ b/ambari-server/src/test/resources/version_definition_with_tags.xml
@@ -0,0 +1,86 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<repository-version xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:noNamespaceSchemaLocation="version_definition.xsd">
+
+ <release>
+ <type>PATCH</type>
+ <stack-id>HDP-2.3</stack-id>
+ <version>2.3.4.1</version>
+ <build>1234</build>
+ <compatible-with>2.3.4.[1-9]</compatible-with>
+ <release-notes>http://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.3.4/</release-notes>
+ </release>
+
+ <manifest>
+ <service id="HDFS-271" name="HDFS" version="2.7.1" version-id="10" />
+ <service id="HIVE-110" name="HIVE" version="1.1.0" />
+ <service id="HIVE-200" name="HIVE" version="2.0.0" />
+ <service id="HBASE-899" name="HBASE" version="8.9.9" />
+ </manifest>
+
+ <available-services />
+
+ <repository-info>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0</baseurl>
+ <repoid>HDP-2.3</repoid>
+ <reponame>HDP</reponame>
+ <unique>true</unique>
+ </repo>
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6</baseurl>
+ <repoid>HDP-UTILS-1.1.0.20</repoid>
+ <reponame>HDP-UTILS</reponame>
+ <unique>false</unique>
+ </repo>
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP-GPL/repos/centos6</baseurl>
+ <repoid>HDP-GPL</repoid>
+ <reponame>HDP-GPL</reponame>
+ <unique>false</unique>
+ <tags>
+ <tag>GPL</tag>
+ </tags>
+ </repo>
+ </os>
+ <os family="redhat7">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.3.0.0</baseurl>
+ <repoid>HDP-2.3</repoid>
+ <reponame>HDP</reponame>
+ <unique>true</unique>
+ </repo>
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos7</baseurl>
+ <repoid>HDP-UTILS-1.1.0.20</repoid>
+ <reponame>HDP-UTILS</reponame>
+ <unique>false</unique>
+ </repo>
+ </os>
+
+ </repository-info>
+
+ <upgrade>
+ <configuration type="hdfs-site">
+ <set key="foo" value="bar" />
+ </configuration>
+ </upgrade>
+</repository-version>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-web/app/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/config.js b/ambari-web/app/config.js
index 0963f70..8dbbde3 100644
--- a/ambari-web/app/config.js
+++ b/ambari-web/app/config.js
@@ -82,7 +82,6 @@ App.supports = {
serviceAutoStart: true,
logSearch: true,
redhatSatellite: false,
- enableIpa: false,
addingNewRepository: false,
kerberosStackAdvisor: true,
logCountVizualization: false,
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-web/app/controllers/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/installer.js b/ambari-web/app/controllers/installer.js
index 8aea887..4db0232 100644
--- a/ambari-web/app/controllers/installer.js
+++ b/ambari-web/app/controllers/installer.js
@@ -836,6 +836,7 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
"repo_id": repository.get('repoId'),
"repo_name": repository.get('repoName'),
"components": repository.get('components'),
+ "tags": repository.get('tags'),
"distribution": repository.get('distribution')
}
});
@@ -856,7 +857,7 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
var dfd = $.Deferred();
if (selectedStack && selectedStack.get('operatingSystems')) {
this.set('validationCnt', selectedStack.get('operatingSystems').filterProperty('isSelected').filterProperty('isEmpty', false).map(function (os) {
- return os.get('repositories.length');
+ return os.get('repositories').filterProperty('showRepo', true).length;
}).reduce(Em.sum, 0));
var verifyBaseUrl = !wizardStep1Controller.get('skipValidationChecked') && !wizardStep1Controller.get('selectedStack.useRedhatSatellite');
if (!verifyBaseUrl) {
@@ -865,32 +866,34 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
selectedStack.get('operatingSystems').forEach(function (os) {
if (os.get('isSelected') && !os.get('isEmpty')) {
os.get('repositories').forEach(function (repo) {
- repo.setProperties({
- errorTitle: '',
- errorContent: '',
- validation: 'INPROGRESS'
- });
- this.set('content.isCheckInProgress', true);
- App.ajax.send({
- name: 'wizard.advanced_repositories.valid_url',
- sender: this,
- data: {
- stackName: stackName,
- stackVersion: stackVersion,
- repoId: repo.get('repoId'),
- osType: os.get('osType'),
- osId: os.get('id'),
- dfd: dfd,
+ if (repo.get('showRepo')) {
+ repo.setProperties({
+ errorTitle: '',
+ errorContent: '',
+ validation: 'INPROGRESS'
+ });
+ this.set('content.isCheckInProgress', true);
+ App.ajax.send({
+ name: 'wizard.advanced_repositories.valid_url',
+ sender: this,
data: {
- 'Repositories': {
- 'base_url': repo.get('baseUrl'),
- "verify_base_url": verifyBaseUrl
+ stackName: stackName,
+ stackVersion: stackVersion,
+ repoId: repo.get('repoId'),
+ osType: os.get('osType'),
+ osId: os.get('id'),
+ dfd: dfd,
+ data: {
+ 'Repositories': {
+ 'base_url': repo.get('baseUrl'),
+ "verify_base_url": verifyBaseUrl
+ }
}
- }
- },
- success: 'checkRepoURLSuccessCallback',
- error: 'checkRepoURLErrorCallback'
- });
+ },
+ success: 'checkRepoURLSuccessCallback',
+ error: 'checkRepoURLErrorCallback'
+ });
+ }
}, this);
} else if (os.get('isSelected') && os.get('isEmpty')) {
os.set('isSelected', false);
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js b/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
index 2e41e3d..9c864a8 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/step1_controller.js
@@ -25,29 +25,6 @@ App.KerberosWizardStep1Controller = Em.Controller.extend({
isSubmitDisabled: Em.computed.someBy('selectedOption.preConditions', 'checked', false),
- ipaOption: Em.Object.create({
- displayName: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa'),
- value: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa'),
- preConditions: [
- Em.Object.create({
- displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.1'),
- checked: false
- }),
- Em.Object.create({
- displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.2'),
- checked: false
- }),
- Em.Object.create({
- displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.3'),
- checked: false
- }),
- Em.Object.create({
- displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.4'),
- checked: false
- })
- ]
- }),
-
options: Em.A([
Em.Object.create({
displayName: Em.I18n.t('admin.kerberos.wizard.step1.option.kdc'),
@@ -94,6 +71,28 @@ App.KerberosWizardStep1Controller = Em.Controller.extend({
]
}),
Em.Object.create({
+ displayName: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa'),
+ value: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa'),
+ preConditions: [
+ Em.Object.create({
+ displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.1'),
+ checked: false
+ }),
+ Em.Object.create({
+ displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.2'),
+ checked: false
+ }),
+ Em.Object.create({
+ displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.3'),
+ checked: false
+ }),
+ Em.Object.create({
+ displayText: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa.condition.4'),
+ checked: false
+ })
+ ]
+ }),
+ Em.Object.create({
displayName: Em.I18n.t('admin.kerberos.wizard.step1.option.manual'),
value: Em.I18n.t('admin.kerberos.wizard.step1.option.manual'),
preConditions: [
@@ -135,13 +134,6 @@ App.KerberosWizardStep1Controller = Em.Controller.extend({
loadStep: function () {
- if (App.get('supports.enableIpa')) {
- var ipaOption = this.get('ipaOption');
- var options = this.get('options');
- if (options.indexOf(ipaOption) === -1){
- options.pushObject(ipaOption);
- }
- }
},
submit: function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js b/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
index 05b0b31..a97e04e 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
@@ -46,7 +46,7 @@ App.KerberosWizardStep2Controller = App.WizardStep7Controller.extend(App.KDCCred
type: Em.I18n.t('admin.kerberos.wizard.step1.option.kdc')
},
'ipa': {
- configNames: ['group', 'set_password_expiry', 'password_chat_timeout'],
+ configNames: ['ipa_user_group'],
type: Em.I18n.t('admin.kerberos.wizard.step1.option.ipa')
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-web/app/controllers/main/host/bulk_operations_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/bulk_operations_controller.js b/ambari-web/app/controllers/main/host/bulk_operations_controller.js
index bcd0af0..94894dc 100644
--- a/ambari-web/app/controllers/main/host/bulk_operations_controller.js
+++ b/ambari-web/app/controllers/main/host/bulk_operations_controller.js
@@ -1187,9 +1187,17 @@ App.BulkOperationsController = Em.Controller.extend({
hostNamesSkipped = this._getSkippedForPassiveStateHosts(hosts);
}
- var message = operationData.componentNameFormatted ?
- Em.I18n.t('hosts.bulkOperation.confirmation.hostComponents').format(operationData.message, operationData.componentNameFormatted, hostNames.length) :
- Em.I18n.t('hosts.bulkOperation.confirmation.hosts').format(operationData.message, hostNames.length);
+ var message = "";
+ if (operationData.componentNameFormatted) {
+ message = Em.I18n.t('hosts.bulkOperation.confirmation.hostComponents').format(operationData.message, operationData.componentNameFormatted, hostNames.length);
+ } else {
+ if (operationData.action == 'DELETE') {
+ message = Em.I18n.t('hosts.bulkOperation.confirmation.delete.hosts').format(hostNames.length);
+ } else {
+ message = Em.I18n.t('hosts.bulkOperation.confirmation.hosts').format(operationData.message, hostNames.length);
+ }
+ }
+
return App.ModalPopup.show({
header: Em.I18n.t('hosts.bulkOperation.confirmation.header'),
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-web/app/controllers/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/item.js b/ambari-web/app/controllers/main/service/item.js
index aff2743..fda0dda 100644
--- a/ambari-web/app/controllers/main/service/item.js
+++ b/ambari-web/app/controllers/main/service/item.js
@@ -988,10 +988,19 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
}
}.observes('App.router.backgroundOperationsController.serviceTimestamp'),
+ nonClientServiceComponents: function () {
+ return App.MasterComponent.find().toArray().concat(App.SlaveComponent.find().toArray()).filterProperty('service.serviceName', this.get('content.serviceName'));
+ }.property('content.serviceName'),
+
isStartDisabled: function () {
if(this.get('isPending')) return true;
- return !(this.get('content.healthStatus') == 'red');
- }.property('content.healthStatus','isPending'),
+
+ var isDisabled = true;
+ this.get('nonClientServiceComponents').forEach(function(component) {
+ isDisabled = isDisabled ? !(component.get('installedAndMaintenanceOffCount') > 0) : false;
+ });
+ return isDisabled;
+ }.property('isPending', 'nonClientServiceComponents'),
isStopDisabled: function () {
if(this.get('isPending')) return true;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-web/app/controllers/main/service/reassign/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/reassign/step3_controller.js b/ambari-web/app/controllers/main/service/reassign/step3_controller.js
index 4898b75..d994aaf 100644
--- a/ambari-web/app/controllers/main/service/reassign/step3_controller.js
+++ b/ambari-web/app/controllers/main/service/reassign/step3_controller.js
@@ -286,6 +286,9 @@ App.ReassignMasterWizardStep3Controller = Em.Controller.extend({
success: 'onLoadConfigsTags'
});
}
+ else{
+ this.set('isLoaded', true);
+ }
},
clearStep: function () {