You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by av...@apache.org on 2017/09/13 02:41:44 UTC
[1/2] ambari git commit: Revert "AMBARI-21939 : Ambari HDFS Heatmaps
are not showing data (Not Applicable) for bytes read,
bytes written & DataNode Process Network I/O Utilization. (avijayan)"
Repository: ambari
Updated Branches:
refs/heads/branch-2.6 e73b1fcf5 -> c69b750bd
Revert "AMBARI-21939 : Ambari HDFS Heatmaps are not showing data (Not Applicable) for bytes read, bytes written & DataNode Process Network I/O Utilization. (avijayan)"
This reverts commit e73b1fcf5b4ce944b21e7c662fbf403c87a8a6a8.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/559ee937
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/559ee937
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/559ee937
Branch: refs/heads/branch-2.6
Commit: 559ee937585ef5fb9cf0c6b3f16d7eae73b7d992
Parents: e73b1fc
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Sep 12 19:41:05 2017 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Sep 12 19:41:05 2017 -0700
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog260.java | 16 ----
.../common-services/HDFS/2.1.0.2.0/widgets.json | 12 +--
.../stacks/HDP/2.3/services/HDFS/widgets.json | 48 +++++------
.../PERF/1.0/services/FAKEHDFS/widgets.json | 42 +++++-----
.../server/upgrade/UpgradeCatalog260Test.java | 83 --------------------
5 files changed, 51 insertions(+), 150 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/559ee937/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index cf85a5c..426fe63 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -19,7 +19,6 @@ package org.apache.ambari.server.upgrade;
import java.sql.SQLException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@@ -400,7 +399,6 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
ensureZeppelinProxyUserConfigs();
updateKerberosDescriptorArtifacts();
updateAmsConfigs();
- updateHDFSWidgetDefinition();
}
/**
@@ -576,18 +574,4 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
}
}
}
-
- protected void updateHDFSWidgetDefinition() throws AmbariException {
- LOG.info("Updating HDFS widget definition.");
-
- Map<String, List<String>> widgetMap = new HashMap<>();
- Map<String, String> sectionLayoutMap = new HashMap<>();
-
- List<String> hdfsHeatmapWidgets = new ArrayList<>(Arrays.asList("HDFS Bytes Read", "HDFS Bytes Written",
- "DataNode Process Disk I/O Utilization", "DataNode Process Network I/O Utilization"));
- widgetMap.put("HDFS_HEATMAPS", hdfsHeatmapWidgets);
- sectionLayoutMap.put("HDFS_HEATMAPS", "default_hdfs_heatmap");
-
- updateWidgetDefinitionsForService("HDFS", widgetMap, sectionLayoutMap);
- }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/559ee937/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
index 39c6c0e..bcfb2cc 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
@@ -337,8 +337,8 @@
"is_visible": true,
"metrics": [
{
- "name": "dfs.datanode.BytesRead",
- "metric_path": "metrics/dfs/datanode/bytes_read",
+ "name": "dfs.datanode.BytesRead._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_read._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
}
@@ -346,7 +346,7 @@
"values": [
{
"name": "HDFS Bytes Read",
- "value": "${dfs.datanode.BytesRead}"
+ "value": "${dfs.datanode.BytesRead._rate}"
}
],
"properties": {
@@ -361,8 +361,8 @@
"is_visible": false,
"metrics": [
{
- "name": "dfs.datanode.BytesWritten",
- "metric_path": "metrics/dfs/datanode/bytes_written",
+ "name": "dfs.datanode.BytesWritten._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_written._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
}
@@ -370,7 +370,7 @@
"values": [
{
"name": "HDFS Bytes Written",
- "value": "${dfs.datanode.BytesWritten}"
+ "value": "${dfs.datanode.BytesWritten._rate}"
}
],
"properties": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/559ee937/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
index 2b01af0..eeb9ff8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
@@ -416,8 +416,8 @@
"is_visible": true,
"metrics": [
{
- "name": "dfs.datanode.BytesRead",
- "metric_path": "metrics/dfs/datanode/bytes_read",
+ "name": "dfs.datanode.BytesRead._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_read._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
}
@@ -425,7 +425,7 @@
"values": [
{
"name": "HDFS Bytes Read",
- "value": "${dfs.datanode.BytesRead}"
+ "value": "${dfs.datanode.BytesRead._rate}"
}
],
"properties": {
@@ -440,8 +440,8 @@
"is_visible": false,
"metrics": [
{
- "name": "dfs.datanode.BytesWritten",
- "metric_path": "metrics/dfs/datanode/bytes_written",
+ "name": "dfs.datanode.BytesWritten._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_written._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
}
@@ -449,7 +449,7 @@
"values": [
{
"name": "HDFS Bytes Written",
- "value": "${dfs.datanode.BytesWritten}"
+ "value": "${dfs.datanode.BytesWritten._rate}"
}
],
"properties": {
@@ -537,26 +537,26 @@
"is_visible": false,
"metrics": [
{
- "name": "dfs.datanode.BytesRead",
- "metric_path": "metrics/dfs/datanode/bytes_read",
+ "name": "dfs.datanode.BytesRead._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_read._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
},
{
- "name": "dfs.datanode.BytesWritten",
- "metric_path": "metrics/dfs/datanode/bytes_written",
+ "name": "dfs.datanode.BytesWritten._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_written._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
},
{
- "name": "dfs.datanode.TotalReadTime",
- "metric_path": "metrics/dfs/datanode/TotalReadTime",
+ "name": "dfs.datanode.TotalReadTime._rate",
+ "metric_path": "metrics/dfs/datanode/TotalReadTime._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
},
{
- "name": "dfs.datanode.TotalWriteTime",
- "metric_path": "metrics/dfs/datanode/TotalWriteTime",
+ "name": "dfs.datanode.TotalWriteTime._rate",
+ "metric_path": "metrics/dfs/datanode/TotalWriteTime._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
}
@@ -564,7 +564,7 @@
"values": [
{
"name": "DataNode Process Disk I/O Utilization",
- "value": "${((dfs.datanode.BytesRead/dfs.datanode.TotalReadTime)+(dfs.datanode.BytesWritten/dfs.datanode.TotalWriteTime))*50}"
+ "value": "${((dfs.datanode.BytesRead._rate/dfs.datanode.TotalReadTime._rate)+(dfs.datanode.BytesWritten._rate/dfs.datanode.TotalWriteTime._rate))*50}"
}
],
"properties": {
@@ -579,26 +579,26 @@
"is_visible": false,
"metrics": [
{
- "name": "dfs.datanode.RemoteBytesRead",
- "metric_path": "metrics/dfs/datanode/RemoteBytesRead",
+ "name": "dfs.datanode.RemoteBytesRead._rate",
+ "metric_path": "metrics/dfs/datanode/RemoteBytesRead._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
},
{
- "name": "dfs.datanode.ReadsFromRemoteClient",
- "metric_path": "metrics/dfs/datanode/reads_from_remote_client",
+ "name": "dfs.datanode.ReadsFromRemoteClient._rate",
+ "metric_path": "metrics/dfs/datanode/reads_from_remote_client._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
},
{
- "name": "dfs.datanode.RemoteBytesWritten",
- "metric_path": "metrics/dfs/datanode/RemoteBytesWritten",
+ "name": "dfs.datanode.RemoteBytesWritten._rate",
+ "metric_path": "metrics/dfs/datanode/RemoteBytesWritten._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
},
{
- "name": "dfs.datanode.WritesFromRemoteClient",
- "metric_path": "metrics/dfs/datanode/writes_from_remote_client",
+ "name": "dfs.datanode.WritesFromRemoteClient._rate",
+ "metric_path": "metrics/dfs/datanode/writes_from_remote_client._rate",
"service_name": "HDFS",
"component_name": "DATANODE"
}
@@ -606,7 +606,7 @@
"values": [
{
"name": "DataNode Process Network I/O Utilization",
- "value": "${((dfs.datanode.RemoteBytesRead/dfs.datanode.ReadsFromRemoteClient)+(dfs.datanode.RemoteBytesWritten/dfs.datanode.WritesFromRemoteClient))*50}"
+ "value": "${((dfs.datanode.RemoteBytesRead._rate/dfs.datanode.ReadsFromRemoteClient._rate)+(dfs.datanode.RemoteBytesWritten._rate/dfs.datanode.WritesFromRemoteClient._rate))*50}"
}
],
"properties": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/559ee937/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
index 94b604c..7a793f8 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/widgets.json
@@ -440,8 +440,8 @@
"is_visible": false,
"metrics": [
{
- "name": "dfs.datanode.BytesWritten",
- "metric_path": "metrics/dfs/datanode/bytes_written",
+ "name": "dfs.datanode.BytesWritten._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_written._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
}
@@ -449,7 +449,7 @@
"values": [
{
"name": "FAKEHDFS Bytes Written",
- "value": "${dfs.datanode.BytesWritten}"
+ "value": "${dfs.datanode.BytesWritten._rate}"
}
],
"properties": {
@@ -537,26 +537,26 @@
"is_visible": false,
"metrics": [
{
- "name": "dfs.datanode.BytesRead",
- "metric_path": "metrics/dfs/datanode/bytes_read",
+ "name": "dfs.datanode.BytesRead._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_read._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
},
{
- "name": "dfs.datanode.BytesWritten",
- "metric_path": "metrics/dfs/datanode/bytes_written",
+ "name": "dfs.datanode.BytesWritten._rate",
+ "metric_path": "metrics/dfs/datanode/bytes_written._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
},
{
- "name": "dfs.datanode.TotalReadTime",
- "metric_path": "metrics/dfs/datanode/TotalReadTime",
+ "name": "dfs.datanode.TotalReadTime._rate",
+ "metric_path": "metrics/dfs/datanode/TotalReadTime._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
},
{
- "name": "dfs.datanode.TotalWriteTime",
- "metric_path": "metrics/dfs/datanode/TotalWriteTime",
+ "name": "dfs.datanode.TotalWriteTime._rate",
+ "metric_path": "metrics/dfs/datanode/TotalWriteTime._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
}
@@ -564,7 +564,7 @@
"values": [
{
"name": "FAKEDataNode Process Disk I/O Utilization",
- "value": "${((dfs.datanode.BytesRead/dfs.datanode.TotalReadTime)+(dfs.datanode.BytesWritten/dfs.datanode.TotalWriteTime))*50}"
+ "value": "${((dfs.datanode.BytesRead._rate/dfs.datanode.TotalReadTime._rate)+(dfs.datanode.BytesWritten._rate/dfs.datanode.TotalWriteTime._rate))*50}"
}
],
"properties": {
@@ -579,26 +579,26 @@
"is_visible": false,
"metrics": [
{
- "name": "dfs.datanode.RemoteBytesRead",
- "metric_path": "metrics/dfs/datanode/RemoteBytesRead",
+ "name": "dfs.datanode.RemoteBytesRead._rate",
+ "metric_path": "metrics/dfs/datanode/RemoteBytesRead._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
},
{
- "name": "dfs.datanode.ReadsFromRemoteClient",
- "metric_path": "metrics/dfs/datanode/reads_from_remote_client",
+ "name": "dfs.datanode.ReadsFromRemoteClient._rate",
+ "metric_path": "metrics/dfs/datanode/reads_from_remote_client._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
},
{
- "name": "dfs.datanode.RemoteBytesWritten",
- "metric_path": "metrics/dfs/datanode/RemoteBytesWritten",
+ "name": "dfs.datanode.RemoteBytesWritten._rate",
+ "metric_path": "metrics/dfs/datanode/RemoteBytesWritten._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
},
{
- "name": "dfs.datanode.WritesFromRemoteClient",
- "metric_path": "metrics/dfs/datanode/writes_from_remote_client",
+ "name": "dfs.datanode.WritesFromRemoteClient._rate",
+ "metric_path": "metrics/dfs/datanode/writes_from_remote_client._rate",
"service_name": "FAKEHDFS",
"component_name": "FAKEDATANODE"
}
@@ -606,7 +606,7 @@
"values": [
{
"name": "FAKEDataNode Process Network I/O Utilization",
- "value": "${((dfs.datanode.RemoteBytesRead/dfs.datanode.ReadsFromRemoteClient)+(dfs.datanode.RemoteBytesWritten/dfs.datanode.WritesFromRemoteClient))*50}"
+ "value": "${((dfs.datanode.RemoteBytesRead._rate/dfs.datanode.ReadsFromRemoteClient._rate)+(dfs.datanode.RemoteBytesWritten._rate/dfs.datanode.WritesFromRemoteClient._rate))*50}"
}
],
"properties": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/559ee937/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index e363c8d..38ec46b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -50,10 +50,8 @@ import java.util.Set;
import javax.persistence.EntityManager;
import com.google.common.collect.Maps;
-import com.google.inject.AbstractModule;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.ActionManager;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
@@ -62,23 +60,16 @@ import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
import org.apache.ambari.server.orm.dao.ArtifactDAO;
-import org.apache.ambari.server.orm.dao.WidgetDAO;
import org.apache.ambari.server.orm.entities.ArtifactEntity;
-import org.apache.ambari.server.orm.entities.WidgetEntity;
-import org.apache.ambari.server.stack.StackManagerFactory;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.Service;
-import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.ambari.server.state.stack.OsFamily;
-import org.apache.commons.io.FileUtils;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.easymock.EasyMockRunner;
@@ -88,9 +79,7 @@ import org.easymock.MockType;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
-import org.junit.Rule;
import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import com.google.gson.Gson;
@@ -152,9 +141,6 @@ public class UpgradeCatalog260Test {
@Mock(type = MockType.NICE)
private Injector injector;
- @Rule
- public TemporaryFolder temporaryFolder = new TemporaryFolder();
-
@Before
public void init() {
reset(entityManagerProvider, injector);
@@ -749,73 +735,4 @@ public class UpgradeCatalog260Test {
assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
}
- @Test
- public void testHDFSWidgetUpdate() throws Exception {
- final Clusters clusters = createNiceMock(Clusters.class);
- final Cluster cluster = createNiceMock(Cluster.class);
- final AmbariManagementController controller = createNiceMock(AmbariManagementController.class);
- final Gson gson = new Gson();
- final WidgetDAO widgetDAO = createNiceMock(WidgetDAO.class);
- final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
- WidgetEntity widgetEntity = createNiceMock(WidgetEntity.class);
- StackId stackId = new StackId("HDP", "2.0.0");
- StackInfo stackInfo = createNiceMock(StackInfo.class);
- ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
-
- String widgetStr = "{\n" +
- " \"layouts\": [\n" +
- " {\n" +
- " \"layout_name\": \"default_hdfs_heatmap\",\n" +
- " \"display_name\": \"Standard HDFS HeatMaps\",\n" +
- " \"section_name\": \"HDFS_HEATMAPS\",\n" +
- " \"widgetLayoutInfo\": [\n" +
- " {\n" +
- " \"widget_name\": \"HDFS Bytes Read\",\n" +
- " \"metrics\": [],\n" +
- " \"values\": []\n" +
- " }\n" +
- " ]\n" +
- " }\n" +
- " ]\n" +
- "}";
-
- File dataDirectory = temporaryFolder.newFolder();
- File file = new File(dataDirectory, "hdfs_widget.json");
- FileUtils.writeStringToFile(file, widgetStr);
-
- final Injector mockInjector = Guice.createInjector(new AbstractModule() {
- @Override
- protected void configure() {
- bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
- bind(AmbariManagementController.class).toInstance(controller);
- bind(Clusters.class).toInstance(clusters);
- bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
- bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
- bind(Gson.class).toInstance(gson);
- bind(WidgetDAO.class).toInstance(widgetDAO);
- bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
- bind(AmbariMetaInfo.class).toInstance(metaInfo);
- }
- });
- expect(controller.getClusters()).andReturn(clusters).anyTimes();
- expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
- put("normal", cluster);
- }}).anyTimes();
- expect(cluster.getClusterId()).andReturn(1L).anyTimes();
- expect(stackInfo.getService("HDFS")).andReturn(serviceInfo);
- expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
- expect(metaInfo.getStack("HDP", "2.0.0")).andReturn(stackInfo).anyTimes();
- expect(serviceInfo.getWidgetsDescriptorFile()).andReturn(file).anyTimes();
-
- expect(widgetDAO.findByName(1L, "HDFS Bytes Read", "ambari", "HDFS_HEATMAPS"))
- .andReturn(Collections.singletonList(widgetEntity));
- expect(widgetDAO.merge(widgetEntity)).andReturn(null);
- expect(widgetEntity.getWidgetName()).andReturn("HDFS Bytes Read").anyTimes();
-
- replay(clusters, cluster, controller, widgetDAO, metaInfo, widgetEntity, stackInfo, serviceInfo);
-
- mockInjector.getInstance(UpgradeCatalog260.class).updateHDFSWidgetDefinition();
-
- verify(clusters, cluster, controller, widgetDAO, widgetEntity, stackInfo, serviceInfo);
- }
}
[2/2] ambari git commit: Revert "AMBARI-21893 : NameNode Heap Usage
(Daily) metric alert status flips to UNKNOWN intermittently when AMS HTTPS is
enabled. (avijayan)"
Posted by av...@apache.org.
Revert "AMBARI-21893 : NameNode Heap Usage (Daily) metric alert status flips to UNKNOWN intermittently when AMS HTTPS is enabled. (avijayan)"
This reverts commit 01e8e50a216c3494a00a53626eb7386be1cb5ebc.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c69b750b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c69b750b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c69b750b
Branch: refs/heads/branch-2.6
Commit: c69b750bde1c6964bda4894898d28495c6370221
Parents: 559ee93
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Sep 12 19:41:16 2017 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Sep 12 19:41:16 2017 -0700
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog260.java | 29 --------
.../0.1.0/configuration/ams-ssl-client.xml | 9 +++
.../AMBARI_METRICS/0.1.0/package/scripts/ams.py | 10 ++-
.../0.1.0/package/scripts/params.py | 10 +--
.../server/upgrade/UpgradeCatalog260Test.java | 70 +-------------------
5 files changed, 19 insertions(+), 109 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c69b750b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index 426fe63..9e145c0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -19,7 +19,6 @@ package org.apache.ambari.server.upgrade;
import java.sql.SQLException;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -28,7 +27,6 @@ import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.orm.dao.ArtifactDAO;
import org.apache.ambari.server.orm.entities.ArtifactEntity;
@@ -123,8 +121,6 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
public static final String HOST_COMPONENT_STATE = "hostcomponentstate";
private static final String CORE_SITE = "core-site";
- public static final String AMS_SSL_CLIENT = "ams-ssl-client";
- public static final String METRIC_TRUSTSTORE_ALIAS = "ssl.client.truststore.alias";
/**
* Logger.
*/
@@ -398,7 +394,6 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
removeSupersetFromDruid();
ensureZeppelinProxyUserConfigs();
updateKerberosDescriptorArtifacts();
- updateAmsConfigs();
}
/**
@@ -550,28 +545,4 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
}
}
}
-
- protected void updateAmsConfigs() throws AmbariException {
- AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
- Clusters clusters = ambariManagementController.getClusters();
- if (clusters != null) {
- Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters);
- if (clusterMap != null && !clusterMap.isEmpty()) {
- for (final Cluster cluster : clusterMap.values()) {
-
-
- Config amsSslClient = cluster.getDesiredConfigByType(AMS_SSL_CLIENT);
- if (amsSslClient != null) {
- Map<String, String> amsSslClientProperties = amsSslClient.getProperties();
-
- if (amsSslClientProperties.containsKey(METRIC_TRUSTSTORE_ALIAS)) {
- LOG.info("Removing " + METRIC_TRUSTSTORE_ALIAS + " from " + AMS_SSL_CLIENT);
- removeConfigurationPropertiesFromCluster(cluster, AMS_SSL_CLIENT, Collections.singleton(METRIC_TRUSTSTORE_ALIAS));
- }
-
- }
- }
- }
- }
- }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c69b750b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-client.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-client.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-client.xml
index d75bba2..cac39de 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-client.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-ssl-client.xml
@@ -39,4 +39,13 @@
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
+ <property>
+ <name>ssl.client.truststore.alias</name>
+ <value></value>
+ <description>Alias used to create certificate for AMS. (Default is hostname)</description>
+ <value-attributes>
+ <empty-value-valid>true</empty-value-valid>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c69b750b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
index 88ac15f..4672501 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
@@ -459,12 +459,10 @@ def export_ca_certs(dir_path):
truststore_p12 = os.path.join(tmpdir,'truststore.p12')
if (params.metric_truststore_type.lower() == 'jks'):
- if not params.metric_truststore_alias:
- for alias in params.metric_truststore_alias_list:
- # Convert truststore from JKS to PKCS12
- cmd = format("{sudo} {java64_home}/bin/keytool -importkeystore -srckeystore {metric_truststore_path} -destkeystore {truststore_p12} -srcalias " + alias + " -deststoretype PKCS12 -srcstorepass {metric_truststore_password} -deststorepass {metric_truststore_password}")
- Execute(cmd,
- )
+ # Convert truststore from JKS to PKCS12
+ cmd = format("{sudo} {java64_home}/bin/keytool -importkeystore -srckeystore {metric_truststore_path} -destkeystore {truststore_p12} -srcalias {metric_truststore_alias} -deststoretype PKCS12 -srcstorepass {metric_truststore_password} -deststorepass {metric_truststore_password}")
+ Execute(cmd,
+ )
truststore = truststore_p12
# Export all CA certificates from the truststore to the conf directory
http://git-wip-us.apache.org/repos/asf/ambari/blob/c69b750b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index f7fd545..071882b 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -94,15 +94,11 @@ else:
metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+metric_truststore_alias = default("/configurations/ams-ssl-client/ssl.client.truststore.alias", None)
+if not metric_truststore_alias:
+ metric_truststore_alias = metric_collector_host
metric_truststore_ca_certs='ca.pem'
-metric_truststore_alias_list = []
-for host in ams_collector_hosts.split(","):
- metric_truststore_alias = default("/configurations/ams-ssl-client/{host}.ssl.client.truststore.alias", None)
- if not metric_truststore_alias:
- metric_truststore_alias = host
- metric_truststore_alias_list.append(metric_truststore_alias)
-
agent_cache_dir = config['hostLevelParams']['agentCacheDir']
service_package_folder = config['commandParams']['service_package_folder']
stack_name = default("/hostLevelParams/stack_name", None)
http://git-wip-us.apache.org/repos/asf/ambari/blob/c69b750b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index 38ec46b..db6ebc1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -23,7 +23,6 @@ import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
@@ -31,7 +30,6 @@ import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertTrue;
import java.io.File;
import java.net.URL;
@@ -42,19 +40,16 @@ import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.EntityManager;
-import com.google.common.collect.Maps;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
import org.apache.ambari.server.controller.KerberosHelper;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.orm.DBAccessor;
@@ -71,9 +66,7 @@ import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.ambari.server.state.stack.OsFamily;
import org.easymock.Capture;
-import org.easymock.EasyMock;
import org.easymock.EasyMockRunner;
-import org.easymock.EasyMockSupport;
import org.easymock.Mock;
import org.easymock.MockType;
import org.junit.After;
@@ -486,8 +479,8 @@ public class UpgradeCatalog260Test {
}
public void verifyGetCurrentVersionID(Capture<String[]> scdcaptureKey, Capture<String[]> scdcaptureValue) {
- assertTrue(Arrays.equals(scdcaptureKey.getValue(), new String[]{UpgradeCatalog260.STATE_COLUMN}));
- assertTrue(Arrays.equals(scdcaptureValue.getValue(), new String[]{UpgradeCatalog260.CURRENT}));
+ Assert.assertTrue(Arrays.equals(scdcaptureKey.getValue(), new String[]{UpgradeCatalog260.STATE_COLUMN}));
+ Assert.assertTrue(Arrays.equals(scdcaptureValue.getValue(), new String[]{UpgradeCatalog260.CURRENT}));
}
public void expectUpdateServiceComponentDesiredStateTable(Capture<DBColumnInfo> scdstadd1, Capture<DBColumnInfo> scdstalter1, Capture<DBColumnInfo> scdstadd2, Capture<DBColumnInfo> scdstalter2) throws SQLException {
@@ -630,7 +623,7 @@ public class UpgradeCatalog260Test {
verify(clusters, cluster, zeppelinEnvConf, coreSiteConf, coreSiteConfNew, controller);
- assertTrue(captureCoreSiteConfProperties.hasCaptured());
+ Assert.assertTrue(captureCoreSiteConfProperties.hasCaptured());
Assert.assertEquals("existing_value", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.hosts"));
Assert.assertEquals("*", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.groups"));
}
@@ -678,61 +671,4 @@ public class UpgradeCatalog260Test {
Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("/smokeuser"));
}
-
- @Test
- public void testUpdateAmsConfigs() throws Exception{
-
- Map<String, String> oldProperties = new HashMap<String, String>() {
- {
- put("ssl.client.truststore.location", "/some/location");
- put("ssl.client.truststore.alias", "test_alias");
- }
- };
- Map<String, String> newProperties = new HashMap<String, String>() {
- {
- put("ssl.client.truststore.location", "/some/location");
- }
- };
-
- EasyMockSupport easyMockSupport = new EasyMockSupport();
-
- Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
- final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
- Config mockAmsSslClient = easyMockSupport.createNiceMock(Config.class);
-
- expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
- put("normal", cluster);
- }}).once();
- expect(cluster.getDesiredConfigByType("ams-ssl-client")).andReturn(mockAmsSslClient).atLeastOnce();
- expect(mockAmsSslClient.getProperties()).andReturn(oldProperties).anyTimes();
-
- Injector injector = easyMockSupport.createNiceMock(Injector.class);
- expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
- expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
-
- replay(injector, clusters, mockAmsSslClient, cluster);
-
- AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
- .addMockedMethod("createConfiguration")
- .addMockedMethod("getClusters", new Class[] { })
- .addMockedMethod("createConfig")
- .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
- .createNiceMock();
-
- Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
- Capture<Map> propertiesCapture = EasyMock.newCapture();
-
- expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
- expect(controller.getClusters()).andReturn(clusters).anyTimes();
- expect(controller.createConfig(anyObject(Cluster.class), anyString(), capture(propertiesCapture), anyString(),
- anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
-
- replay(controller, injector2);
- new UpgradeCatalog260(injector2).updateAmsConfigs();
- easyMockSupport.verifyAll();
-
- Map<String, String> updatedProperties = propertiesCapture.getValue();
- assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
- }
-
}