You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2014/08/20 17:30:02 UTC
[01/50] [abbrv] git commit: AMBARI-6893 Fix UI unit tests. (atkach)
Repository: ambari
Updated Branches:
refs/heads/branch-alerts-dev 68042051b -> 31f9ff836 (forced update)
AMBARI-6893 Fix UI unit tests. (atkach)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ec1707fd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ec1707fd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ec1707fd
Branch: refs/heads/branch-alerts-dev
Commit: ec1707fd7dee51ceb106435402931b4cff9b3a2f
Parents: cbf4fc8
Author: atkach <at...@hortonworks.com>
Authored: Mon Aug 18 18:08:51 2014 +0300
Committer: atkach <at...@hortonworks.com>
Committed: Mon Aug 18 18:08:51 2014 +0300
----------------------------------------------------------------------
ambari-web/app/utils/config.js | 2 +-
.../main/dashboard/widgets/namenode_cpu.js | 6 ++--
ambari-web/test/utils/config_test.js | 2 +-
.../main/dashboard/widgets/namenode_cpu_test.js | 32 +++++++-------------
4 files changed, 16 insertions(+), 26 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ec1707fd/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index b18bff6..f576f38 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -106,7 +106,7 @@ App.config = Em.Object.create({
* @method getConfigTagFromFileName
**/
getConfigTagFromFileName: function(fileName) {
- return fileName.endsWith('.xml') ? fileName.slice(0,-4) : filename;
+ return fileName.endsWith('.xml') ? fileName.slice(0,-4) : fileName;
},
setPreDefinedServiceConfigs: function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/ec1707fd/ambari-web/app/views/main/dashboard/widgets/namenode_cpu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/namenode_cpu.js b/ambari-web/app/views/main/dashboard/widgets/namenode_cpu.js
index 6fca69e..cd3b7f8 100644
--- a/ambari-web/app/views/main/dashboard/widgets/namenode_cpu.js
+++ b/ambari-web/app/views/main/dashboard/widgets/namenode_cpu.js
@@ -78,12 +78,12 @@ App.NameNodeCpuPieChartView = App.PieChartDashboardWidgetView.extend({
},
calcIsPieExists: function() {
- return (this.get('cpuWio') != null);
+ return (!Em.isNone(this.get('cpuWio')));
},
- calcDataForPieChart: function() {
+ calcDataForPieChart: function () {
var value = this.get('cpuWio');
- value = value >= 100 ? 100: value;
+ value = value >= 100 ? 100 : value;
var percent = (value + 0).toFixed(1);
var percent_precise = (value + 0).toFixed(2);
return [ percent, percent_precise];
http://git-wip-us.apache.org/repos/asf/ambari/blob/ec1707fd/ambari-web/test/utils/config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/config_test.js b/ambari-web/test/utils/config_test.js
index 9a9bd1e..451f748 100644
--- a/ambari-web/test/utils/config_test.js
+++ b/ambari-web/test/utils/config_test.js
@@ -337,7 +337,7 @@ describe('App.config', function () {
App.config.addAdvancedConfigs(this.storedConfigs, modelSetup.setupAdvancedConfigsObject(), 'ZOOKEEPER');
var property = this.storedConfigs.findProperty('name', 'custom.zoo.cfg');
expect(property).to.be.ok;
- expect(property.category).to.eql('Advanced');
+ expect(property.category).to.eql('Advanced zoo.cfg');
});
it('`capacity-scheduler.xml` property with name `content` should have `displayType` `multiLine`', function() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/ec1707fd/ambari-web/test/views/main/dashboard/widgets/namenode_cpu_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/namenode_cpu_test.js b/ambari-web/test/views/main/dashboard/widgets/namenode_cpu_test.js
index 7d1ffec..347d5d0 100644
--- a/ambari-web/test/views/main/dashboard/widgets/namenode_cpu_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/namenode_cpu_test.js
@@ -43,21 +43,17 @@ describe('App.NameNodeCpuPieChartView', function() {
describe('#calcIsPieExists', function() {
var tests = [
{
- model: Em.Object.create({
- used: 1
- }),
+ cpuWio: 1,
e: true,
m: 'Exists'
},
{
- model: Em.Object.create({
- used: 0
- }),
- e: true,
- m: 'Exists'
+ cpuWio: null,
+ e: false,
+ m: 'Not exists'
},
{
- model: Em.Object.create({}),
+ cpuWio: undefined,
e: false,
m: 'Not exists'
}
@@ -65,32 +61,26 @@ describe('App.NameNodeCpuPieChartView', function() {
tests.forEach(function(test) {
it(test.m, function() {
- nameNodeCpuPieChartView.set('model', test.model);
+ nameNodeCpuPieChartView.set('cpuWio', test.cpuWio);
expect(nameNodeCpuPieChartView.calcIsPieExists()).to.equal(test.e);
});
});
});
- describe('calcDataForPieChart', function() {
+ describe('calcDataForPieChart', function () {
var tests = [
{
- model: Em.Object.create({
- used: 0
- }),
+ cpuWio: 0,
e: ['0.0', '0.00'],
m: 'Nothing is used'
},
{
- model: Em.Object.create({
- used: 100
- }),
+ cpuWio: 100,
e: ['100.0', '100.00'],
m: 'All is used'
},
{
- model: Em.Object.create({
- used: 50
- }),
+ cpuWio: 50,
e: ['50.0', '50.00'],
m: 'Half is used'
}
@@ -98,7 +88,7 @@ describe('App.NameNodeCpuPieChartView', function() {
tests.forEach(function(test) {
it(test.m, function() {
- nameNodeCpuPieChartView.set('model', test.model);
+ nameNodeCpuPieChartView.set('cpuWio', test.cpuWio);
expect(nameNodeCpuPieChartView.calcDataForPieChart()).to.eql(test.e);
});
});
[09/50] [abbrv] git commit: AMBARI-6900. Upgrade DB Tests Fail. value
column problem. (mahadev)
Posted by jo...@apache.org.
AMBARI-6900. Upgrade DB Tests Fail. value column problem. (mahadev)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1a9abc4e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1a9abc4e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1a9abc4e
Branch: refs/heads/branch-alerts-dev
Commit: 1a9abc4ede0618f512d51390d8fdb175a0c96911
Parents: 50e7983
Author: Mahadev Konar <ma...@apache.org>
Authored: Mon Aug 18 13:00:52 2014 -0700
Committer: Mahadev Konar <ma...@apache.org>
Committed: Mon Aug 18 13:00:52 2014 -0700
----------------------------------------------------------------------
.../server/orm/entities/AlertCurrentEntity.java | 2 +-
.../orm/entities/AlertDefinitionEntity.java | 2 +-
.../server/orm/entities/AlertGroupEntity.java | 2 +-
.../server/orm/entities/AlertHistoryEntity.java | 2 +-
.../server/orm/entities/AlertNoticeEntity.java | 2 +-
.../server/orm/entities/AlertTargetEntity.java | 2 +-
.../orm/entities/ClusterConfigEntity.java | 2 +-
.../server/orm/entities/ClusterEntity.java | 2 +-
.../server/orm/entities/ConfigGroupEntity.java | 2 +-
.../ambari/server/orm/entities/GroupEntity.java | 2 +-
.../orm/entities/HostRoleCommandEntity.java | 2 +-
.../server/orm/entities/MemberEntity.java | 2 +-
.../server/orm/entities/PermissionEntity.java | 2 +-
.../server/orm/entities/PrincipalEntity.java | 2 +-
.../orm/entities/PrincipalTypeEntity.java | 2 +-
.../server/orm/entities/PrivilegeEntity.java | 2 +-
.../entities/RequestOperationLevelEntity.java | 2 +-
.../entities/RequestResourceFilterEntity.java | 2 +-
.../orm/entities/RequestScheduleEntity.java | 2 +-
.../server/orm/entities/ResourceEntity.java | 2 +-
.../server/orm/entities/ResourceTypeEntity.java | 2 +-
.../orm/entities/ServiceConfigEntity.java | 2 +-
.../ambari/server/orm/entities/UserEntity.java | 2 +-
.../server/orm/entities/ViewEntityEntity.java | 2 +-
.../server/orm/entities/ViewInstanceEntity.java | 2 +-
.../server/upgrade/UpgradeCatalog170.java | 85 +++++++++-----------
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 52 ++++++------
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 52 ++++++------
.../resources/Ambari-DDL-Postgres-CREATE.sql | 4 +-
.../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql | 4 +-
30 files changed, 121 insertions(+), 126 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertCurrentEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertCurrentEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertCurrentEntity.java
index d00cbc7..cde61f2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertCurrentEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertCurrentEntity.java
@@ -43,7 +43,7 @@ import org.apache.ambari.server.state.MaintenanceState;
*/
@Entity
@Table(name = "alert_current")
-@TableGenerator(name = "alert_current_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value", pkColumnValue = "alert_current_id_seq", initialValue = 0, allocationSize = 1)
+@TableGenerator(name = "alert_current_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value", pkColumnValue = "alert_current_id_seq", initialValue = 0, allocationSize = 1)
@NamedQueries({
@NamedQuery(name = "AlertCurrentEntity.findAll", query = "SELECT alert FROM AlertCurrentEntity alert"),
@NamedQuery(name = "AlertCurrentEntity.findByService", query = "SELECT alert FROM AlertCurrentEntity alert JOIN alert.alertHistory history WHERE history.clusterId = :clusterId AND history.serviceName = :serviceName"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
index de30921..23ad8f4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
@@ -48,7 +48,7 @@ import org.apache.ambari.server.state.alert.Scope;
@Entity
@Table(name = "alert_definition", uniqueConstraints = @UniqueConstraint(columnNames = {
"cluster_id", "definition_name" }))
-@TableGenerator(name = "alert_definition_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value", pkColumnValue = "alert_definition_id_seq", initialValue = 0, allocationSize = 1)
+@TableGenerator(name = "alert_definition_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value", pkColumnValue = "alert_definition_id_seq", initialValue = 0, allocationSize = 1)
@NamedQueries({
@NamedQuery(name = "AlertDefinitionEntity.findAll", query = "SELECT alertDefinition FROM AlertDefinitionEntity alertDefinition"),
@NamedQuery(name = "AlertDefinitionEntity.findAllInCluster", query = "SELECT alertDefinition FROM AlertDefinitionEntity alertDefinition WHERE alertDefinition.clusterId = :clusterId"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
index 89f040c..976855e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertGroupEntity.java
@@ -43,7 +43,7 @@ import javax.persistence.UniqueConstraint;
@Entity
@Table(name = "alert_group", uniqueConstraints = @UniqueConstraint(columnNames = {
"cluster_id", "group_name" }))
-@TableGenerator(name = "alert_group_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value", pkColumnValue = "alert_group_id_seq", initialValue = 0, allocationSize = 1)
+@TableGenerator(name = "alert_group_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value", pkColumnValue = "alert_group_id_seq", initialValue = 0, allocationSize = 1)
@NamedQueries({
@NamedQuery(name = "AlertGroupEntity.findAll", query = "SELECT alertGroup FROM AlertGroupEntity alertGroup"),
@NamedQuery(name = "AlertGroupEntity.findAllInCluster", query = "SELECT alertGroup FROM AlertGroupEntity alertGroup WHERE alertGroup.clusterId = :clusterId"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
index 3e8b15b..502aca9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
@@ -42,7 +42,7 @@ import org.apache.ambari.server.state.AlertState;
*/
@Entity
@Table(name = "alert_history")
-@TableGenerator(name = "alert_history_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value", pkColumnValue = "alert_history_id_seq", initialValue = 0, allocationSize = 1)
+@TableGenerator(name = "alert_history_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value", pkColumnValue = "alert_history_id_seq", initialValue = 0, allocationSize = 1)
@NamedQueries({
@NamedQuery(name = "AlertHistoryEntity.findAll", query = "SELECT alertHistory FROM AlertHistoryEntity alertHistory"),
@NamedQuery(name = "AlertHistoryEntity.findAllInCluster", query = "SELECT alertHistory FROM AlertHistoryEntity alertHistory WHERE alertHistory.clusterId = :clusterId"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertNoticeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertNoticeEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertNoticeEntity.java
index 41bc1d8..af541cd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertNoticeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertNoticeEntity.java
@@ -42,7 +42,7 @@ import org.apache.ambari.server.state.NotificationState;
*/
@Entity
@Table(name = "alert_notice")
-@TableGenerator(name = "alert_notice_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value", pkColumnValue = "alert_notice_id_seq", initialValue = 0, allocationSize = 1)
+@TableGenerator(name = "alert_notice_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value", pkColumnValue = "alert_notice_id_seq", initialValue = 0, allocationSize = 1)
@NamedQueries({
@NamedQuery(name = "AlertNoticeEntity.findAll", query = "SELECT notice FROM AlertNoticeEntity notice"),
@NamedQuery(name = "AlertNoticeEntity.removeByDefinitionId", query = "DELETE FROM AlertNoticeEntity notice WHERE notice.alertHistory.alertDefinition.definitionId = :definitionId") })
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertTargetEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertTargetEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertTargetEntity.java
index 74da92c..89728e5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertTargetEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertTargetEntity.java
@@ -43,7 +43,7 @@ import javax.persistence.TableGenerator;
*/
@Entity
@Table(name = "alert_target")
-@TableGenerator(name = "alert_target_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value", pkColumnValue = "alert_target_id_seq", initialValue = 0, allocationSize = 1)
+@TableGenerator(name = "alert_target_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value", pkColumnValue = "alert_target_id_seq", initialValue = 0, allocationSize = 1)
@NamedQueries({
@NamedQuery(name = "AlertTargetEntity.findAll", query = "SELECT alertTarget FROM AlertTargetEntity alertTarget"),
@NamedQuery(name = "AlertTargetEntity.findByName", query = "SELECT alertTarget FROM AlertTargetEntity alertTarget WHERE alertTarget.targetName = :targetName"), })
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
index f513ee2..da8fac6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterConfigEntity.java
@@ -26,7 +26,7 @@ import java.util.Collection;
uniqueConstraints = {@UniqueConstraint(name = "UQ_config_type_tag", columnNames = {"cluster_id", "type_name", "version_tag"}),
@UniqueConstraint(name = "UQ_config_type_version", columnNames = {"cluster_id", "type_name", "version"})})
@TableGenerator(name = "config_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "config_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
index 942c868..292a4d3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ClusterEntity.java
@@ -38,7 +38,7 @@ import static org.apache.commons.lang.StringUtils.defaultString;
})
@Entity
@TableGenerator(name = "cluster_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "cluster_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigGroupEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigGroupEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigGroupEntity.java
index 705adfd..a5e293c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigGroupEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ConfigGroupEntity.java
@@ -47,7 +47,7 @@ import java.util.Collection;
"WHERE configgroup.tag=:tagName")
})
@TableGenerator(name = "configgroup_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "configgroup_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/GroupEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/GroupEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/GroupEntity.java
index 5349f1e..9c3411e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/GroupEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/GroupEntity.java
@@ -40,7 +40,7 @@ import javax.persistence.UniqueConstraint;
@TableGenerator(name = "group_id_generator",
table = "ambari_sequences",
pkColumnName = "sequence_name",
- valueColumnName = "value",
+ valueColumnName = "sequence_value",
pkColumnValue = "group_id_seq",
initialValue = 1,
allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
index 490b289..599156a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
@@ -48,7 +48,7 @@ import org.apache.commons.lang.ArrayUtils;
@Table(name = "host_role_command")
@Entity
@TableGenerator(name = "host_role_command_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "host_role_command_id_seq"
, initialValue = 1
, allocationSize = 50
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/MemberEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/MemberEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/MemberEntity.java
index 04b1a87..5d9e6b1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/MemberEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/MemberEntity.java
@@ -33,7 +33,7 @@ import javax.persistence.UniqueConstraint;
@TableGenerator(name = "member_id_generator",
table = "ambari_sequences",
pkColumnName = "sequence_name",
- valueColumnName = "value",
+ valueColumnName = "sequence_value",
pkColumnValue = "member_id_seq",
initialValue = 1,
allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
index 4702d05..6d1d873 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
@@ -36,7 +36,7 @@ import javax.persistence.TableGenerator;
@Table(name = "adminpermission")
@Entity
@TableGenerator(name = "permission_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "permission_id_seq"
, initialValue = 5
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalEntity.java
index 51438cd..e0767a3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalEntity.java
@@ -39,7 +39,7 @@ import javax.persistence.TableGenerator;
@Table(name = "adminprincipal")
@Entity
@TableGenerator(name = "principal_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "principal_id_seq"
, initialValue = 2
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalTypeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalTypeEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalTypeEntity.java
index 2f37592..f7c0aa1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalTypeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrincipalTypeEntity.java
@@ -26,7 +26,7 @@ import javax.persistence.*;
@Table(name = "adminprincipaltype")
@Entity
@TableGenerator(name = "principal_type_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "principal_type_id_seq"
, initialValue = 3
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrivilegeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrivilegeEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrivilegeEntity.java
index fe97c7d..d86a22c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrivilegeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PrivilegeEntity.java
@@ -36,7 +36,7 @@ import javax.persistence.TableGenerator;
@Table(name = "adminprivilege")
@Entity
@TableGenerator(name = "privilege_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "privilege_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java
index b7b3133..9f8453c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestOperationLevelEntity.java
@@ -36,7 +36,7 @@ import javax.persistence.TableGenerator;
@Entity
@Table(name = "requestoperationlevel")
@TableGenerator(name = "operation_level_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "operation_level_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
index 4307d08..0dc7807 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
@@ -32,7 +32,7 @@ import javax.persistence.TableGenerator;
@Entity
@Table(name = "requestresourcefilter")
@TableGenerator(name = "resourcefilter_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "resourcefilter_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestScheduleEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestScheduleEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestScheduleEntity.java
index 60df7b4..5ed3480 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestScheduleEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestScheduleEntity.java
@@ -43,7 +43,7 @@ import java.util.List;
"WHERE reqSchedule.status=:status")
})
@TableGenerator(name = "schedule_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "requestschedule_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceEntity.java
index 28c7fde..0b7e591 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceEntity.java
@@ -26,7 +26,7 @@ import javax.persistence.*;
@Table(name = "adminresource")
@Entity
@TableGenerator(name = "resource_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "resource_id_seq"
, initialValue = 2
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceTypeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceTypeEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceTypeEntity.java
index 0bcc6ab..6cebf08 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceTypeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ResourceTypeEntity.java
@@ -26,7 +26,7 @@ import javax.persistence.*;
@Table(name = "adminresourcetype")
@Entity
@TableGenerator(name = "resource_type_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "resource_type_id_seq"
, initialValue = 4
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
index ce47e28..86557ac 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
@@ -37,7 +37,7 @@ import java.util.List;
@Entity
@Table(name = "serviceconfig")
@TableGenerator(name = "service_config_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "service_config_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UserEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UserEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UserEntity.java
index 90410be..a6c5548 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UserEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UserEntity.java
@@ -29,7 +29,7 @@ import java.util.Set;
@NamedQuery(name = "ldapUserByName", query = "SELECT user FROM UserEntity user where lower(user.userName)=:username AND user.ldapUser=true")
})
@TableGenerator(name = "user_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "user_id_seq"
, initialValue = 2
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntityEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntityEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntityEntity.java
index cae57fc..30d6e23 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntityEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewEntityEntity.java
@@ -36,7 +36,7 @@ import javax.persistence.TableGenerator;
@Table(name = "viewentity")
@Entity
@TableGenerator(name = "viewentity_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "viewentity_id_seq"
, initialValue = 1
, allocationSize = 50
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewInstanceEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewInstanceEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewInstanceEntity.java
index 3f1cd8f..66917e7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewInstanceEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ViewInstanceEntity.java
@@ -61,7 +61,7 @@ import org.apache.ambari.view.ViewInstanceDefinition;
@NamedQuery(name = "allViewInstances",
query = "SELECT viewInstance FROM ViewInstanceEntity viewInstance")
@TableGenerator(name = "view_instance_id_generator",
- table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "value"
+ table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value"
, pkColumnValue = "view_instance_id_seq"
, initialValue = 1
, allocationSize = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 33a87a7..bef01c1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -129,8 +129,11 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
@Override
protected void executeDDLUpdates() throws AmbariException, SQLException {
- List<DBColumnInfo> columns;
+ // needs to be executed first
+ renameSequenceValueColumnName();
+
String dbType = getDbType();
+ List<DBColumnInfo> columns;
// add group and members tables
columns = new ArrayList<DBColumnInfo>();
@@ -342,62 +345,37 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
new String[]{"cluster_id", "type_name", "version_tag"}, true);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('alert_definition_id_seq', 0)", false);
- //service config version sequences
- String valueColumnName = "\"value\"";
- if (Configuration.ORACLE_DB_NAME.equals(dbType)
- || Configuration.MYSQL_DB_NAME.equals(dbType)) {
- valueColumnName = "value";
- }
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_definition_id_seq', 0)",
- false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_group_id_seq', 0)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('alert_group_id_seq', 0)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_target_id_seq', 0)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('alert_target_id_seq', 0)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_history_id_seq', 0)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('alert_history_id_seq', 0)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_notice_id_seq', 0)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('alert_notice_id_seq', 0)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_current_id_seq', 0)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('alert_current_id_seq', 0)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('group_id_seq', 1)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('group_id_seq', 1)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('member_id_seq', 1)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('member_id_seq', 1)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('resource_type_id_seq', 4)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('resource_type_id_seq', 4)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('resource_id_seq', 2)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('resource_id_seq', 2)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('principal_type_id_seq', 3)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('principal_type_id_seq', 3)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('principal_id_seq', 2)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('principal_id_seq', 2)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('permission_id_seq', 5)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('permission_id_seq', 5)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('privilege_id_seq', 1)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('privilege_id_seq', 1)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('service_config_id_seq', 1)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('service_config_id_seq', 1)", false);
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('service_config_application_id_seq', 1)", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('service_config_application_id_seq', 1)", false);
long count = 1;
ResultSet resultSet = null;
@@ -412,8 +390,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
}
}
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('config_id_seq', " + count + ")", false);
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('config_id_seq', " + count + ")", false);
dbAccessor.addFKConstraint("users", "FK_users_principal_id", "principal_id", "adminprincipal", "principal_id", true);
dbAccessor.addFKConstraint("clusters", "FK_clusters_resource_id", "resource_id", "adminresource", "resource_id", true);
@@ -433,6 +410,24 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
dbAccessor.executeQuery("ALTER TABLE adminpermission ADD CONSTRAINT UQ_perm_name_resource_type_id UNIQUE (permission_name, resource_type_id)");
}
+ /**
+ * Note that you can't use dbAccessor.renameColumn(...) here as the column name is a reserved word and
+ * thus requires custom approach for every database type.
+ */
+ private void renameSequenceValueColumnName() throws AmbariException, SQLException {
+ final String dbType = getDbType();
+ if (Configuration.MYSQL_DB_NAME.equals(dbType)) {
+ dbAccessor.executeQuery("ALTER TABLE ambari_sequences RENAME COLUMN \"value\" to sequence_value DECIMAL(38) NOT NULL");
+ } else if (Configuration.DERBY_DB_NAME.equals(dbType)) {
+ dbAccessor.executeQuery("RENAME COLUMN ambari_sequences.\"value\" to sequence_value");
+ } else if (Configuration.ORACLE_DB_NAME.equals(dbType)) {
+ dbAccessor.executeQuery("ALTER TABLE ambari_sequences RENAME COLUMN value to sequence_value");
+ } else {
+ // Postgres
+ dbAccessor.executeQuery("ALTER TABLE ambari_sequences RENAME COLUMN \"value\" to sequence_value");
+ }
+ }
+
private void populateConfigVersions() throws SQLException {
ResultSet resultSet = dbAccessor.executeSelect("SELECT DISTINCT type_name FROM clusterconfig ");
Set<String> configTypes = new HashSet<String>();
@@ -742,7 +737,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
updateConfigurationProperties("hbase-env",
Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), false,
false);
-
+
updateConfigurationProperties("yarn-env",
Collections.singletonMap("min_user_id", "1000"), false,
false);
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 00f0e7c..b39ca5d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -55,7 +55,7 @@ CREATE TABLE hostconfigmapping (create_timestamp BIGINT NOT NULL, host_name VARC
CREATE TABLE metainfo (`metainfo_key` VARCHAR(255), `metainfo_value` LONGTEXT, PRIMARY KEY (`metainfo_key`));
CREATE TABLE ClusterHostMapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
CREATE TABLE user_roles (role_name VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, PRIMARY KEY (role_name, user_id));
-CREATE TABLE ambari_sequences (sequence_name VARCHAR(255), value DECIMAL(38) NOT NULL, PRIMARY KEY (sequence_name));
+CREATE TABLE ambari_sequences (sequence_name VARCHAR(255), sequence_value DECIMAL(38) NOT NULL, PRIMARY KEY (sequence_name));
CREATE TABLE confgroupclusterconfigmapping (config_group_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, config_type VARCHAR(255) NOT NULL, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', create_timestamp BIGINT NOT NULL, PRIMARY KEY(config_group_id, cluster_id, config_type));
CREATE TABLE configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
CREATE TABLE configgrouphostmapping (config_group_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
@@ -248,31 +248,31 @@ CREATE INDEX idx_alert_history_state on alert_history(alert_state);
CREATE INDEX idx_alert_group_name on alert_group(group_name);
CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
-INSERT INTO ambari_sequences(sequence_name, value) values ('cluster_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('host_role_command_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 2);
-INSERT INTO ambari_sequences(sequence_name, value) values ('group_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('member_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('configgroup_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('requestschedule_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('resourcefilter_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('viewentity_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('operation_level_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('view_instance_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('resource_type_id_seq', 4);
-INSERT INTO ambari_sequences(sequence_name, value) values ('resource_id_seq', 2);
-INSERT INTO ambari_sequences(sequence_name, value) values ('principal_type_id_seq', 3);
-INSERT INTO ambari_sequences(sequence_name, value) values ('principal_id_seq', 2);
-INSERT INTO ambari_sequences(sequence_name, value) values ('permission_id_seq', 5);
-INSERT INTO ambari_sequences(sequence_name, value) values ('privilege_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('config_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('service_config_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_definition_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_group_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_target_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_history_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_notice_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_current_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('cluster_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_role_command_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('user_id_seq', 2);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('group_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('member_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('configgroup_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('requestschedule_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('resourcefilter_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('viewentity_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('operation_level_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('view_instance_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('resource_type_id_seq', 4);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('resource_id_seq', 2);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('principal_type_id_seq', 3);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('principal_id_seq', 2);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('permission_id_seq', 5);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('privilege_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('config_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('service_config_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_definition_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_group_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_target_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_history_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_notice_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_current_id_seq', 0);
insert into adminresourcetype (resource_type_id, resource_type_name)
select 1, 'AMBARI'
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index ccf99af..500313b 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -46,7 +46,7 @@ CREATE TABLE hostconfigmapping (create_timestamp NUMBER(19) NOT NULL, host_name
CREATE TABLE metainfo ("metainfo_key" VARCHAR2(255) NOT NULL, "metainfo_value" CLOB NULL, PRIMARY KEY ("metainfo_key"));
CREATE TABLE ClusterHostMapping (cluster_id NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
CREATE TABLE user_roles (role_name VARCHAR2(255) NOT NULL, user_id NUMBER(10) NOT NULL, PRIMARY KEY (role_name, user_id));
-CREATE TABLE ambari_sequences (sequence_name VARCHAR2(50) NOT NULL, value NUMBER(38) NULL, PRIMARY KEY (sequence_name));
+CREATE TABLE ambari_sequences (sequence_name VARCHAR2(50) NOT NULL, sequence_value NUMBER(38) NULL, PRIMARY KEY (sequence_name));
CREATE TABLE configgroup (group_id NUMBER(19), cluster_id NUMBER(19) NOT NULL, group_name VARCHAR2(255) NOT NULL, tag VARCHAR2(1024) NOT NULL, description VARCHAR2(1024), create_timestamp NUMBER(19) NOT NULL, PRIMARY KEY(group_id));
CREATE TABLE confgroupclusterconfigmapping (config_group_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NOT NULL, config_type VARCHAR2(255) NOT NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR2(255) DEFAULT '_db', create_timestamp NUMBER(19) NOT NULL, PRIMARY KEY(config_group_id, cluster_id, config_type));
CREATE TABLE configgrouphostmapping (config_group_id NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, PRIMARY KEY(config_group_id, host_name));
@@ -240,31 +240,31 @@ CREATE INDEX idx_alert_group_name on alert_group(group_name);
CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
---------inserting some data-----------
-INSERT INTO ambari_sequences(sequence_name, value) values ('host_role_command_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('group_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('member_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('cluster_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('configgroup_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('requestschedule_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('resourcefilter_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('viewentity_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('operation_level_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('view_instance_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('resource_type_id_seq', 4);
-INSERT INTO ambari_sequences(sequence_name, value) values ('resource_id_seq', 2);
-INSERT INTO ambari_sequences(sequence_name, value) values ('principal_type_id_seq', 3);
-INSERT INTO ambari_sequences(sequence_name, value) values ('principal_id_seq', 2);
-INSERT INTO ambari_sequences(sequence_name, value) values ('permission_id_seq', 5);
-INSERT INTO ambari_sequences(sequence_name, value) values ('privilege_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('config_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('service_config_id_seq', 1);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_definition_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_group_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_target_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_history_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_notice_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, value) values ('alert_current_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('host_role_command_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('user_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('group_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('member_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('cluster_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('configgroup_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('requestschedule_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('resourcefilter_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('viewentity_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('operation_level_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('view_instance_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('resource_type_id_seq', 4);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('resource_id_seq', 2);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('principal_type_id_seq', 3);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('principal_id_seq', 2);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('permission_id_seq', 5);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('privilege_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('config_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('service_config_id_seq', 1);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_definition_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_group_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_target_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_history_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_notice_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('alert_current_id_seq', 0);
INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', '${ambariVersion}');
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 9178336..a13f415 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -75,7 +75,7 @@ CREATE TABLE hostconfigmapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(25
CREATE TABLE metainfo ("metainfo_key" VARCHAR(255), "metainfo_value" VARCHAR, PRIMARY KEY ("metainfo_key"));
-CREATE TABLE ambari_sequences (sequence_name VARCHAR(255) PRIMARY KEY, "value" BIGINT NOT NULL);
+CREATE TABLE ambari_sequences (sequence_name VARCHAR(255) PRIMARY KEY, sequence_value BIGINT NOT NULL);
CREATE TABLE configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
@@ -274,7 +274,7 @@ CREATE INDEX idx_alert_notice_state on alert_notice(notify_state);
---------inserting some data-----------
BEGIN;
- INSERT INTO ambari_sequences (sequence_name, "value")
+ INSERT INTO ambari_sequences (sequence_name, sequence_value)
SELECT 'cluster_id_seq', 1
UNION ALL
SELECT 'user_id_seq', 2
http://git-wip-us.apache.org/repos/asf/ambari/blob/1a9abc4e/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 2dfea87..ff38b24 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -115,7 +115,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.hostconfigmapping TO :username;
CREATE TABLE ambari.metainfo ("metainfo_key" VARCHAR(255), "metainfo_value" VARCHAR, PRIMARY KEY ("metainfo_key"));
GRANT ALL PRIVILEGES ON TABLE ambari.metainfo TO :username;
-CREATE TABLE ambari.ambari_sequences (sequence_name VARCHAR(255) PRIMARY KEY, "value" BIGINT NOT NULL);
+CREATE TABLE ambari.ambari_sequences (sequence_name VARCHAR(255) PRIMARY KEY, sequence_value BIGINT NOT NULL);
GRANT ALL PRIVILEGES ON TABLE ambari.ambari_sequences TO :username;
CREATE TABLE ambari.configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
@@ -348,7 +348,7 @@ CREATE INDEX idx_alert_notice_state on ambari.alert_notice(notify_state);
---------inserting some data-----------
BEGIN;
-INSERT INTO ambari.ambari_sequences (sequence_name, "value")
+INSERT INTO ambari.ambari_sequences (sequence_name, sequence_value)
SELECT 'cluster_id_seq', 1
UNION ALL
SELECT 'user_id_seq', 2
[37/50] [abbrv] git commit: AMBARI-6919. Start/Stop services fails
after ambari upgrade 1.6.0->1.7.0,
1.6.1->1.7.0. On clean cluster too.(vbrodetskyi)
Posted by jo...@apache.org.
AMBARI-6919. Start/Stop services fails after ambari upgrade 1.6.0->1.7.0, 1.6.1->1.7.0. On clean cluster too.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/127eec22
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/127eec22
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/127eec22
Branch: refs/heads/branch-alerts-dev
Commit: 127eec229ed03414776d16cccad7d486a9bb112c
Parents: f72b323
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Wed Aug 20 13:05:17 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Wed Aug 20 13:05:17 2014 +0300
----------------------------------------------------------------------
ambari-agent/conf/unix/install-helper.sh | 6 +++++
ambari-agent/pom.xml | 4 ++++
.../src/main/package/rpm/posttrans_agent.sh | 25 ++++++++++++++++++++
ambari-server/conf/unix/install-helper.sh | 6 +++++
ambari-server/pom.xml | 4 ++++
.../src/main/package/rpm/posttrans_server.sh | 25 ++++++++++++++++++++
6 files changed, 70 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/127eec22/ambari-agent/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/install-helper.sh b/ambari-agent/conf/unix/install-helper.sh
index c2d2a3d..497db8d 100644
--- a/ambari-agent/conf/unix/install-helper.sh
+++ b/ambari-agent/conf/unix/install-helper.sh
@@ -34,6 +34,12 @@ do_install(){
if [ ! -d "$COMMON_DIR" ]; then
ln -s "$COMMON_DIR_AGENT" "$COMMON_DIR"
fi
+ # remove RESOURCE_MANAGEMENT_DIR if it's a directory
+ if [ -d "$RESOURCE_MANAGEMENT_DIR" ]; then # resource_management dir exists
+ if [ ! -L "$RESOURCE_MANAGEMENT_DIR" ]; then # resource_management dir is not link
+ rm -rf "$RESOURCE_MANAGEMENT_DIR"
+ fi
+ fi
# setting resource_management shared resource
if [ ! -d "$RESOURCE_MANAGEMENT_DIR" ]; then
ln -s "$RESOURCE_MANAGEMENT_DIR_AGENT" "$RESOURCE_MANAGEMENT_DIR"
http://git-wip-us.apache.org/repos/asf/ambari/blob/127eec22/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index c98af34..ebf30fa 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -198,6 +198,10 @@
<scriptFile>src/main/package/rpm/preremove.sh</scriptFile>
<fileEncoding>utf-8</fileEncoding>
</preremoveScriptlet>
+ <posttransScriptlet>
+ <scriptFile>src/main/package/rpm/posttrans_agent.sh</scriptFile>
+ <fileEncoding>utf-8</fileEncoding>
+ </posttransScriptlet>
<needarch>x86_64</needarch>
<autoRequires>false</autoRequires>
http://git-wip-us.apache.org/repos/asf/ambari/blob/127eec22/ambari-agent/src/main/package/rpm/posttrans_agent.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/package/rpm/posttrans_agent.sh b/ambari-agent/src/main/package/rpm/posttrans_agent.sh
new file mode 100644
index 0000000..15f824b
--- /dev/null
+++ b/ambari-agent/src/main/package/rpm/posttrans_agent.sh
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+
+RESOURCE_MANAGEMENT_DIR="/usr/lib/python2.6/site-packages/resource_management"
+RESOURCE_MANAGEMENT_DIR_AGENT="/usr/lib/ambari-agent/lib/resource_management"
+
+# setting resource_management shared resource
+if [ ! -d "$RESOURCE_MANAGEMENT_DIR" ]; then
+ ln -s "$RESOURCE_MANAGEMENT_DIR_AGENT" "$RESOURCE_MANAGEMENT_DIR"
+fi
+
+exit 0
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/127eec22/ambari-server/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/install-helper.sh b/ambari-server/conf/unix/install-helper.sh
index dafe987..30946a5 100644
--- a/ambari-server/conf/unix/install-helper.sh
+++ b/ambari-server/conf/unix/install-helper.sh
@@ -33,6 +33,12 @@ do_install(){
if [ ! -d "$COMMON_DIR" ]; then
ln -s "$COMMON_DIR_SERVER" "$COMMON_DIR"
fi
+ # remove RESOURCE_MANAGEMENT_DIR if it's a directory
+ if [ -d "$RESOURCE_MANAGEMENT_DIR" ]; then # resource_management dir exists
+ if [ ! -L "$RESOURCE_MANAGEMENT_DIR" ]; then # resource_management dir is not link
+ rm -rf "$RESOURCE_MANAGEMENT_DIR"
+ fi
+ fi
# setting resource_management shared resource
if [ ! -d "$RESOURCE_MANAGEMENT_DIR" ]; then
ln -s "$RESOURCE_MANAGEMENT_DIR_SERVER" "$RESOURCE_MANAGEMENT_DIR"
http://git-wip-us.apache.org/repos/asf/ambari/blob/127eec22/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 2ec71ab..2302c62 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -230,6 +230,10 @@
<scriptFile>src/main/package/rpm/preremove.sh</scriptFile>
<fileEncoding>utf-8</fileEncoding>
</preremoveScriptlet>
+ <posttransScriptlet>
+ <scriptFile>src/main/package/rpm/posttrans_server.sh</scriptFile>
+ <fileEncoding>utf-8</fileEncoding>
+ </posttransScriptlet>
<defaultFilemode>644</defaultFilemode>
<defaultDirmode>755</defaultDirmode>
<defaultUsername>root</defaultUsername>
http://git-wip-us.apache.org/repos/asf/ambari/blob/127eec22/ambari-server/src/main/package/rpm/posttrans_server.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/package/rpm/posttrans_server.sh b/ambari-server/src/main/package/rpm/posttrans_server.sh
new file mode 100644
index 0000000..43478f4
--- /dev/null
+++ b/ambari-server/src/main/package/rpm/posttrans_server.sh
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+
+RESOURCE_MANAGEMENT_DIR="/usr/lib/python2.6/site-packages/resource_management"
+RESOURCE_MANAGEMENT_DIR_SERVER="/usr/lib/ambari-server/lib/resource_management"
+
+# setting resource_management shared resource
+if [ ! -d "$RESOURCE_MANAGEMENT_DIR" ]; then
+ ln -s "$RESOURCE_MANAGEMENT_DIR_SERVER" "$RESOURCE_MANAGEMENT_DIR"
+fi
+
+exit 0
\ No newline at end of file
[07/50] [abbrv] git commit: AMBARI-6885. stack_advisor.py from one
stack-version contains another's logic
Posted by jo...@apache.org.
AMBARI-6885. stack_advisor.py from one stack-version contains another's logic
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/32caa435
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/32caa435
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/32caa435
Branch: refs/heads/branch-alerts-dev
Commit: 32caa4359b7fd777fc7ca9b74e6d8a1212d6f6a0
Parents: eefa2fd
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Fri Aug 15 17:14:51 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Aug 18 10:36:04 2014 -0700
----------------------------------------------------------------------
.../stacks/HDP/2.0.6/services/stack_advisor.py | 274 ++++++++-----------
.../stacks/HDP/2.1/services/stack_advisor.py | 68 +++++
.../stacks/2.0.6/common/test_stack_advisor.py | 49 ++--
3 files changed, 211 insertions(+), 180 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/32caa435/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 025caa2..77c57e2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -41,15 +41,6 @@ class HDP206StackAdvisor(StackAdvisor):
"services": servicesList,
"recommendations": {
"blueprint": {
- "configurations": {
- "global": {
- "properties": { }
- },
- "core-site": { },
- "hdfs-site": { },
- "yarn-site": { },
- "hbase-site": { }
- },
"host_groups": [ ]
},
"blueprint_cluster_binding": {
@@ -71,7 +62,7 @@ class HDP206StackAdvisor(StackAdvisor):
hostsForComponent = component["StackServiceComponents"]["hostnames"]
else:
availableHosts = hostsList
- if len(hostsList) > 1 and isNotPreferableOnAmbariServerHost(component):
+ if len(hostsList) > 1 and self.isNotPreferableOnAmbariServerHost(component):
availableHosts = [hostName for hostName in hostsList if not isLocalHost(hostName)]
if isMasterWithMultipleInstances(component):
@@ -81,9 +72,9 @@ class HDP206StackAdvisor(StackAdvisor):
hostsCount = len(availableHosts)
hostsForComponent = availableHosts[:hostsCount]
else:
- hostsForComponent = [getHostForComponent(component, availableHosts)]
+ hostsForComponent = [self.getHostForComponent(component, availableHosts)]
else:
- hostsForComponent = [getHostForComponent(component, availableHosts)]
+ hostsForComponent = [self.getHostForComponent(component, availableHosts)]
#extend 'hostsComponentsMap' with 'hostsForComponent'
for hostName in hostsForComponent:
@@ -94,7 +85,7 @@ class HDP206StackAdvisor(StackAdvisor):
#extend 'hostsComponentsMap' with Slave and Client Components
componentsListList = [service["components"] for service in services["services"]]
componentsList = [item for sublist in componentsListList for item in sublist]
- usedHostsListList = [component["StackServiceComponents"]["hostnames"] for component in componentsList if not isNotValuable(component)]
+ usedHostsListList = [component["StackServiceComponents"]["hostnames"] for component in componentsList if not self.isNotValuable(component)]
utilizedHosts = [item for sublist in usedHostsListList for item in sublist]
freeHosts = [hostName for hostName in hostsList if hostName not in utilizedHosts]
@@ -135,6 +126,46 @@ class HDP206StackAdvisor(StackAdvisor):
return recommendations
pass
+ def getHostForComponent(self, component, hostsList):
+ componentName = component["StackServiceComponents"]["component_name"]
+ scheme = self.defineSelectionScheme(componentName)
+
+ if len(hostsList) == 1:
+ return hostsList[0]
+ else:
+ for key in scheme.keys():
+ if isinstance(key, ( int, long )):
+ if len(hostsList) < key:
+ return hostsList[scheme[key]]
+ return hostsList[scheme['else']]
+
+ def defineSelectionScheme(self, componentName):
+ scheme = self.selectionScheme(componentName)
+ if scheme is None:
+ scheme = {"else": 0}
+ return scheme
+
+ def selectionScheme(self, componentName):
+ return {
+ 'NAMENODE': {"else": 0},
+ 'SECONDARY_NAMENODE': {"else": 1},
+ 'HBASE_MASTER': {6: 0, 31: 2, "else": 3},
+
+ 'HISTORYSERVER': {31: 1, "else": 2},
+ 'RESOURCEMANAGER': {31: 1, "else": 2},
+
+ 'OOZIE_SERVER': {6: 1, 31: 2, "else": 3},
+
+ 'HIVE_SERVER': {6: 1, 31: 2, "else": 4},
+ 'HIVE_METASTORE': {6: 1, 31: 2, "else": 4},
+ 'WEBHCAT_SERVER': {6: 1, 31: 2, "else": 4},
+ }.get(componentName, None)
+
+ def isNotPreferableOnAmbariServerHost(self, component):
+ componentName = component["StackServiceComponents"]["component_name"]
+ service = ['GANGLIA_SERVER', 'NAGIOS_SERVER']
+ return componentName in service
+
def validateComponentLayout(self, services, hosts):
"""Returns array of Validation objects about issues with hostnames components assigned to"""
stackName = services["Versions"]["stack_name"]
@@ -195,7 +226,7 @@ class HDP206StackAdvisor(StackAdvisor):
items.append( { "type": 'host-component', "level": 'ERROR', "message": 'Cardinality violation, cardinality={0}, hosts count={1}'.format(cardinality, str(componentHostsCount)), "component-name": str(componentName) } )
# Validating host-usage
- usedHostsListList = [component["StackServiceComponents"]["hostnames"] for component in componentsList if not isNotValuable(component)]
+ usedHostsListList = [component["StackServiceComponents"]["hostnames"] for component in componentsList if not self.isNotValuable(component)]
usedHostsList = [item for sublist in usedHostsListList for item in sublist]
nonUsedHostsList = [item for item in hostsList if item not in usedHostsList]
for host in nonUsedHostsList:
@@ -204,6 +235,11 @@ class HDP206StackAdvisor(StackAdvisor):
return validations
pass
+ def isNotValuable(self, component):
+ componentName = component["StackServiceComponents"]["component_name"]
+ service = ['JOURNALNODE', 'ZKFC', 'GANGLIA_MONITOR']
+ return componentName in service
+
def recommendConfigurations(self, services, hosts):
stackName = services["Versions"]["stack_name"]
stackVersion = services["Versions"]["stack_version"]
@@ -242,9 +278,7 @@ class HDP206StackAdvisor(StackAdvisor):
def recommendServiceConfigurations(self, service):
return {
"YARN": self.recommendYARNConfigurations,
- "MAPREDUCE2": self.recommendMapReduce2Configurations,
- "HIVE": self.recommendHiveConfigurations,
- "OOZIE": self.recommendOozieConfigurations
+ "MAPREDUCE2": self.recommendMapReduce2Configurations
}.get(service, None)
def putProperty(self, config, configType):
@@ -259,15 +293,6 @@ class HDP206StackAdvisor(StackAdvisor):
putYarnProperty('yarn.scheduler.minimum-allocation-mb', clusterData['ramPerContainer'])
putYarnProperty('yarn.scheduler.maximum-allocation-mb', clusterData['containers'] * clusterData['ramPerContainer'])
- def recommendHiveConfigurations(self, configurations, clusterData):
- containerSize = clusterData['mapMemory'] if clusterData['mapMemory'] > 2048 else clusterData['reduceMemory']
- containerSize = min(clusterData['containers'] * clusterData['ramPerContainer'], containerSize)
- putHiveProperty = self.putProperty(configurations, "hive-site")
- putHiveProperty('hive.auto.convert.join.noconditionaltask.size', int(containerSize / 3) * 1048576)
- putHiveProperty('hive.tez.java.opts', "-server -Xmx" + str(int(0.8 * containerSize))
- + "m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC")
- putHiveProperty('hive.tez.container.size', containerSize)
-
def recommendMapReduce2Configurations(self, configurations, clusterData):
putMapredProperty = self.putProperty(configurations, "mapred-site")
putMapredProperty('yarn.app.mapreduce.am.resource.mb', clusterData['amMemory'])
@@ -278,14 +303,6 @@ class HDP206StackAdvisor(StackAdvisor):
putMapredProperty('mapreduce.reduce.java.opts', "-Xmx" + str(int(0.8 * clusterData['reduceMemory'])) + "m")
putMapredProperty('mapreduce.task.io.sort.mb', int(min(0.4 * clusterData['mapMemory'], 1024)))
- def recommendOozieConfigurations(self, configurations, clusterData):
- if "FALCON_SERVER" in clusterData["components"]:
- putMapredProperty = self.putProperty(configurations, "oozie-site")
- putMapredProperty("oozie.services.ext",
- "org.apache.oozie.service.JMSAccessorService," +
- "org.apache.oozie.service.PartitionDependencyManagerService," +
- "org.apache.oozie.service.HCatAccessorService")
-
def getClusterData(self, servicesList, hosts, components):
hBaseInstalled = False
@@ -380,26 +397,72 @@ class HDP206StackAdvisor(StackAdvisor):
configurations = services["configurations"]
for service in services["services"]:
serviceName = service["StackServices"]["service_name"]
- if serviceName == "MAPREDUCE2":
- mapReduceErrors = validateMapReduce2Configurations(getSiteProperties(configurations, "mapred-site"), recommendedDefaults["mapred-site"]["properties"])
- items.extend(mapReduceErrors)
- elif serviceName == "HIVE":
- hiveErrors = validateHiveConfigurations(getSiteProperties(configurations, "hive-site"), recommendedDefaults["hive-site"]["properties"])
- items.extend(hiveErrors)
- elif serviceName == "STORM":
- oozieErrors = [] #validateStormConfigurations(getSiteProperties(configurations, "storm-site"), recommendedDefaults["storm-site"]["properties"])
- items.extend(oozieErrors)
- elif serviceName == "TEZ":
- tezErrors = validateTezConfigurations(getSiteProperties(configurations, "tez-site"), recommendedDefaults["tez-site"]["properties"])
- items.extend(tezErrors)
- elif serviceName == "YARN":
- yarnErrors = validateYARNConfigurations(getSiteProperties(configurations, "yarn-site"), recommendedDefaults["yarn-site"]["properties"])
- items.extend(yarnErrors)
- else:
- pass
+ validator = self.validateServiceConfigurations(serviceName)
+ if validator is not None:
+ siteName = validator[0]
+ method = validator[1]
+ resultItems = method(getSiteProperties(configurations, siteName), recommendedDefaults[siteName]["properties"])
+ items.extend(resultItems)
return validations
pass
+ def validateServiceConfigurations(self, serviceName):
+ return {
+ "MAPREDUCE2": ["mapred-site", self.validateMapReduce2Configurations],
+ "YARN": ["yarn-site", self.validateYARNConfigurations]
+ }.get(serviceName, None)
+
+ def toConfigurationValidationErrors(self, items, siteName):
+ result = []
+ for item in items:
+ if item["message"] is not None:
+ error = { "type": 'configuration', "level": 'ERROR', "message": item["message"], "config-type": siteName, "config-name": item["config-name"] }
+ result.append(error)
+ return result
+
+ def validatorLessThenDefaultValue(self, properties, recommendedDefaults, propertyName):
+ value = to_number(properties[propertyName])
+ if value is None:
+ return "Value should be integer"
+ defaultValue = to_number(recommendedDefaults[propertyName])
+ if defaultValue is None:
+ return None
+ if value < defaultValue:
+ return "Value is less than the recommended default of {0}".format(defaultValue)
+ return None
+
+ def validateXmxValue(self, properties, recommendedDefaults, propertyName):
+ value = properties[propertyName]
+ defaultValue = recommendedDefaults[propertyName]
+ if defaultValue is None:
+ return "Config's default value can't be null or undefined"
+ if not checkXmxValueFormat(value):
+ return 'Invalid value format'
+ valueInt = formatXmxSizeToBytes(getXmxSize(value))
+ defaultValueXmx = getXmxSize(defaultValue)
+ defaultValueInt = formatXmxSizeToBytes(defaultValueXmx)
+ if valueInt < defaultValueInt:
+ return "Value is less than the recommended default of -Xmx" + defaultValueXmx
+ return None
+
+ def validateMapReduce2Configurations(self, properties, recommendedDefaults):
+ validationItems = [ {"config-name": 'mapreduce.map.java.opts', "message": self.validateXmxValue(properties, recommendedDefaults, 'mapreduce.map.java.opts')},
+ {"config-name": 'mapreduce.reduce.java.opts', "message": self.validateXmxValue(properties, recommendedDefaults, 'mapreduce.reduce.java.opts')},
+ {"config-name": 'mapreduce.task.io.sort.mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'mapreduce.task.io.sort.mb')},
+ {"config-name": 'mapreduce.map.memory.mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'mapreduce.map.memory.mb')},
+ {"config-name": 'mapreduce.reduce.memory.mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'mapreduce.reduce.memory.mb')},
+ {"config-name": 'yarn.app.mapreduce.am.resource.mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.app.mapreduce.am.resource.mb')},
+ {"config-name": 'yarn.app.mapreduce.am.command-opts', "message": self.validateXmxValue(properties, recommendedDefaults, 'yarn.app.mapreduce.am.command-opts')} ]
+ return self.toConfigurationValidationErrors(validationItems, "mapred-site")
+
+ def validateYARNConfigurations(self, properties, recommendedDefaults):
+ validationItems = [ {"config-name": 'yarn.nodemanager.resource.memory-mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.nodemanager.resource.memory-mb')},
+ {"config-name": 'yarn.scheduler.minimum-allocation-mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.scheduler.minimum-allocation-mb')},
+ {"config-name": 'yarn.scheduler.maximum-allocation-mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.scheduler.maximum-allocation-mb')} ]
+ return self.toConfigurationValidationErrors(validationItems, "yarn-site")
+
+
+# Validation helper methods
def getSiteProperties(configurations, siteName):
if configurations[siteName] is None:
return {}
@@ -413,39 +476,6 @@ def to_number(s):
except ValueError:
return None
-def toConfigurationValidationErrors(items, siteName):
- result = []
- for item in items:
- if item["message"] is not None:
- error = { "type": 'configuration', "level": 'ERROR', "message": item["message"], "config-type": siteName, "config-name": item["config-name"] }
- result.append(error)
- return result
-
-def validatorLessThenDefaultValue(properties, recommendedDefaults, propertyName):
- value = to_number(properties[propertyName])
- if value is None:
- return "Value should be integer"
- defaultValue = to_number(recommendedDefaults[propertyName])
- if defaultValue is None:
- return None
- if value < defaultValue:
- return "Value is less than the recommended default of {0}".format(defaultValue)
- return None
-
-def validateXmxValue(properties, recommendedDefaults, propertyName):
- value = properties[propertyName]
- defaultValue = recommendedDefaults[propertyName]
- if defaultValue is None:
- return "Config's default value can't be null or undefined"
- if not checkXmxValueFormat(value):
- return 'Invalid value format'
- valueInt = formatXmxSizeToBytes(getXmxSize(value))
- defaultValueXmx = getXmxSize(defaultValue)
- defaultValueInt = formatXmxSizeToBytes(defaultValueXmx)
- if valueInt < defaultValueInt:
- return "Value is less than the recommended default of -Xmx" + defaultValueXmx
- return None
-
def checkXmxValueFormat(value):
p = re.compile('-Xmx(\d+)(b|k|m|g|p|t|B|K|M|G|P|T)?')
matches = p.findall(value)
@@ -477,58 +507,8 @@ def formatXmxSizeToBytes(value):
}[1]
return to_number(value) * m
-def validateMapReduce2Configurations(properties, recommendedDefaults):
- validationItems = [ {"config-name": 'mapreduce.map.java.opts', "message": validateXmxValue(properties, recommendedDefaults, 'mapreduce.map.java.opts')},
- {"config-name": 'mapreduce.reduce.java.opts', "message": validateXmxValue(properties, recommendedDefaults, 'mapreduce.reduce.java.opts')},
- {"config-name": 'mapreduce.task.io.sort.mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'mapreduce.task.io.sort.mb')},
- {"config-name": 'mapreduce.map.memory.mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'mapreduce.map.memory.mb')},
- {"config-name": 'mapreduce.reduce.memory.mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'mapreduce.reduce.memory.mb')},
- {"config-name": 'yarn.app.mapreduce.am.resource.mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.app.mapreduce.am.resource.mb')},
- {"config-name": 'yarn.app.mapreduce.am.command-opts', "message": validateXmxValue(properties, recommendedDefaults, 'yarn.app.mapreduce.am.command-opts')} ]
- return toConfigurationValidationErrors(validationItems, "mapred-site")
-
-def validateHiveConfigurations(properties, recommendedDefaults):
- validationItems = [ {"config-name": 'hive.tez.container.size', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'hive.tez.container.size')},
- {"config-name": 'hive.tez.java.opts', "message": validateXmxValue(properties, recommendedDefaults, 'hive.tez.java.opts')},
- {"config-name": 'hive.auto.convert.join.noconditionaltask.size', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'hive.auto.convert.join.noconditionaltask.size')} ]
- return toConfigurationValidationErrors(validationItems, "hive-site")
-
-def validateStormConfigurations(properties, recommendedDefaults):
- validationItems = [ {"config-name": 'drpc.childopts', "message": validateXmxValue(properties, recommendedDefaults, 'drpc.childopts')},
- {"config-name": 'ui.childopts', "message": validateXmxValue(properties, recommendedDefaults, 'ui.childopts')},
- {"config-name": 'logviewer.childopts', "message": validateXmxValue(properties, recommendedDefaults, 'logviewer.childopts')} ]
- return toConfigurationValidationErrors(validationItems, "storm-site")
-
-def validateTezConfigurations(properties, recommendedDefaults):
- validationItems = [ {"config-name": 'tez.am.resource.memory.mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'tez.am.resource.memory.mb')},
- {"config-name": 'tez.am.java.opts', "message": validateXmxValue(properties, recommendedDefaults, 'tez.am.java.opts')} ]
- return toConfigurationValidationErrors(validationItems, "tez-site")
-
-def validateYARNConfigurations(properties, recommendedDefaults):
- validationItems = [ {"config-name": 'yarn.nodemanager.resource.memory-mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.nodemanager.resource.memory-mb')},
- {"config-name": 'yarn.scheduler.minimum-allocation-mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.scheduler.minimum-allocation-mb')},
- {"config-name": 'yarn.scheduler.maximum-allocation-mb', "message": validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.scheduler.maximum-allocation-mb')} ]
- return toConfigurationValidationErrors(validationItems, "yarn-site")
-
-# Helper methods
-def getHostForComponent(component, hostsList):
- componentName = component["StackServiceComponents"]["component_name"]
- scheme = selectionScheme(componentName)
-
- if len(hostsList) == 1:
- return hostsList[0]
- else:
- for key in scheme.keys():
- if isinstance(key, ( int, long )):
- if len(hostsList) < key:
- return hostsList[scheme[key]]
- return hostsList[scheme['else']]
-
-def isNotValuable(component):
- componentName = component["StackServiceComponents"]["component_name"]
- service = ['JOURNALNODE', 'ZKFC', 'APP_TIMELINE_SERVER', 'GANGLIA_MONITOR']
- return componentName in service
+# Recommendation helper methods
def isAlreadyPopulated(component):
if component["StackServiceComponents"]["hostnames"] is not None:
return len(component["StackServiceComponents"]["hostnames"]) > 0
@@ -550,11 +530,6 @@ def isMaster(component):
def isLocalHost(hostName):
return socket.getfqdn(hostName) == socket.getfqdn()
-def isNotPreferableOnAmbariServerHost(component):
- componentName = component["StackServiceComponents"]["component_name"]
- service = ['STORM_UI_SERVER', 'DRPC_SERVER', 'STORM_REST_API', 'NIMBUS', 'GANGLIA_SERVER', 'NAGIOS_SERVER', 'HUE_SERVER']
- return componentName in service
-
def isMasterWithMultipleInstances(component):
componentName = component["StackServiceComponents"]["component_name"]
masters = ['ZOOKEEPER_SERVER', 'HBASE_MASTER']
@@ -572,22 +547,3 @@ def cardinality(componentName):
'HBASE_MASTER': {min: 1},
}.get(componentName, {min:1, max:1})
-def selectionScheme(componentName):
- return {
- 'NAMENODE': {"else": 0},
- 'SECONDARY_NAMENODE': {"else": 1},
- 'HBASE_MASTER': {6: 0, 31: 2, "else": 3},
-
- 'JOBTRACKER': {31: 1, "else": 2},
- 'HISTORYSERVER': {31: 1, "else": 2},
- 'RESOURCEMANAGER': {31: 1, "else": 2},
- 'APP_TIMELINE_SERVER': {31: 1, "else": 2},
-
- 'OOZIE_SERVER': {6: 1, 31: 2, "else": 3},
- 'FALCON_SERVER': {6: 1, 31: 2, "else": 3},
-
- 'HIVE_SERVER': {6: 1, 31: 2, "else": 4},
- 'HIVE_METASTORE': {6: 1, 31: 2, "else": 4},
- 'WEBHCAT_SERVER': {6: 1, 31: 2, "else": 4},
- }.get(componentName, {"else": 0})
-
http://git-wip-us.apache.org/repos/asf/ambari/blob/32caa435/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
index eb2faa1..5d7a3bc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/stack_advisor.py
@@ -27,11 +27,30 @@ class HDP21StackAdvisor(HDP206StackAdvisor):
calculator = super(HDP21StackAdvisor, self).recommendServiceConfigurations(service)
if calculator is None:
return {
+ "OOZIE": self.recommendOozieConfigurations,
+ "HIVE": self.recommendHiveConfigurations,
"TEZ": self.recommendTezConfigurations
}.get(service, None)
else:
return calculator
+ def recommendOozieConfigurations(self, configurations, clusterData):
+ if "FALCON_SERVER" in clusterData["components"]:
+ putMapredProperty = self.putProperty(configurations, "oozie-site")
+ putMapredProperty("oozie.services.ext",
+ "org.apache.oozie.service.JMSAccessorService," +
+ "org.apache.oozie.service.PartitionDependencyManagerService," +
+ "org.apache.oozie.service.HCatAccessorService")
+
+ def recommendHiveConfigurations(self, configurations, clusterData):
+ containerSize = clusterData['mapMemory'] if clusterData['mapMemory'] > 2048 else clusterData['reduceMemory']
+ containerSize = min(clusterData['containers'] * clusterData['ramPerContainer'], containerSize)
+ putHiveProperty = self.putProperty(configurations, "hive-site")
+ putHiveProperty('hive.auto.convert.join.noconditionaltask.size', int(containerSize / 3) * 1048576)
+ putHiveProperty('hive.tez.java.opts', "-server -Xmx" + str(int(0.8 * containerSize))
+ + "m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC")
+ putHiveProperty('hive.tez.container.size', containerSize)
+
def recommendTezConfigurations(self, configurations, clusterData):
putTezProperty = self.putProperty(configurations, "tez-site")
putTezProperty("tez.am.resource.memory.mb", clusterData['amMemory'])
@@ -39,3 +58,52 @@ class HDP21StackAdvisor(HDP206StackAdvisor):
"-server -Xmx" + str(int(0.8 * clusterData["amMemory"]))
+ "m -Djava.net.preferIPv4Stack=true -XX:+UseNUMA -XX:+UseParallelGC")
+ def isNotPreferableOnAmbariServerHost(self, component):
+ componentName = component["StackServiceComponents"]["component_name"]
+ service = ['STORM_UI_SERVER', 'DRPC_SERVER', 'STORM_REST_API', 'NIMBUS', 'GANGLIA_SERVER', 'NAGIOS_SERVER']
+ return componentName in service
+
+ def isNotValuable(self, component):
+ componentName = component["StackServiceComponents"]["component_name"]
+ service = ['JOURNALNODE', 'ZKFC', 'GANGLIA_MONITOR', 'APP_TIMELINE_SERVER']
+ return componentName in service
+
+ def selectionScheme(self, componentName):
+ scheme = super(HDP21StackAdvisor, self).selectionScheme(componentName)
+ if scheme is None:
+ return {
+ 'APP_TIMELINE_SERVER': {31: 1, "else": 2},
+ 'FALCON_SERVER': {6: 1, 31: 2, "else": 3}
+ }.get(componentName, None)
+ else:
+ return scheme
+
+ def validateServiceConfigurations(self, serviceName):
+ validator = super(HDP21StackAdvisor, self).validateServiceConfigurations(serviceName)
+ if validator is None:
+ return {
+ "STORM": ["storm-site", self.validateStormConfigurations],
+ "HIVE": ["hive-site", self.validateHiveConfigurations],
+ "TEZ": ["tez-site", self.validateTezConfigurations]
+ }.get(serviceName, None)
+ else:
+ return validator
+
+ def validateHiveConfigurations(self, properties, recommendedDefaults):
+ validationItems = [ {"config-name": 'hive.tez.container.size', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'hive.tez.container.size')},
+ {"config-name": 'hive.tez.java.opts', "message": self.validateXmxValue(properties, recommendedDefaults, 'hive.tez.java.opts')},
+ {"config-name": 'hive.auto.convert.join.noconditionaltask.size', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'hive.auto.convert.join.noconditionaltask.size')} ]
+ return self.toConfigurationValidationErrors(validationItems, "hive-site")
+
+ def validateStormConfigurations(self, properties, recommendedDefaults):
+ validationItems = [ {"config-name": 'drpc.childopts', "message": self.validateXmxValue(properties, recommendedDefaults, 'drpc.childopts')},
+ {"config-name": 'ui.childopts', "message": self.validateXmxValue(properties, recommendedDefaults, 'ui.childopts')},
+ {"config-name": 'logviewer.childopts', "message": self.validateXmxValue(properties, recommendedDefaults, 'logviewer.childopts')} ]
+ return self.toConfigurationValidationErrors(validationItems, "storm-site")
+
+ def validateTezConfigurations(self, properties, recommendedDefaults):
+ validationItems = [ {"config-name": 'tez.am.resource.memory.mb', "message": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'tez.am.resource.memory.mb')},
+ {"config-name": 'tez.am.java.opts', "message": self.validateXmxValue(properties, recommendedDefaults, 'tez.am.java.opts')} ]
+ return self.toConfigurationValidationErrors(validationItems, "tez-site")
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/32caa435/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 3139a3e..b5bdba6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -35,7 +35,7 @@ class TestHDP206StackAdvisor(TestCase):
stack_advisor_impl = imp.load_module('stack_advisor_impl', fp, hdp206StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
clazz = getattr(stack_advisor_impl, hdp206StackAdvisorClassName)
self.stackAdvisor = clazz()
-
+
def test_recommendationCardinalityALL(self):
servicesInfo = [
{
@@ -98,12 +98,12 @@ class TestHDP206StackAdvisor(TestCase):
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
- expectedMessages = [
- "NameNode and Secondary NameNode cannot be hosted on same machine",
- "NameNode and Secondary NameNode cannot be hosted on same machine",
- "Host is not used"
+ expectedItems = [
+ {"message": "NameNode and Secondary NameNode cannot be hosted on same machine", "host": "host1"},
+ {"message": "NameNode and Secondary NameNode cannot be hosted on same machine", "host": "host1"},
+ {"message": "Host is not used", "host": "host2"}
]
- self.assertValidationMessages(expectedMessages, result)
+ self.assertValidationResult(expectedItems, result)
def test_validationCardinalityALL(self):
servicesInfo = [
@@ -119,10 +119,10 @@ class TestHDP206StackAdvisor(TestCase):
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
- expectedMessages = [
- "Cardinality violation, cardinality=ALL, hosts count=1"
+ expectedItems = [
+ {"message": "Cardinality violation, cardinality=ALL, hosts count=1"}
]
- self.assertValidationMessages(expectedMessages, result)
+ self.assertValidationResult(expectedItems, result)
def test_validationHostIsNotUsedForNonValuableComponent(self):
servicesInfo = [
@@ -138,10 +138,10 @@ class TestHDP206StackAdvisor(TestCase):
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
- expectedMessages = [
- "Host is not used"
+ expectedItems = [
+ {"message": "Host is not used", "host": "host1"}
]
- self.assertValidationMessages(expectedMessages, result)
+ self.assertValidationResult(expectedItems, result)
def test_validationCardinality01TwoHostsAssigned(self):
servicesInfo = [
@@ -156,10 +156,10 @@ class TestHDP206StackAdvisor(TestCase):
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
- expectedMessages = [
- "Cardinality violation, cardinality=0-1, hosts count=2"
+ expectedItems = [
+ {"message": "Cardinality violation, cardinality=0-1, hosts count=2"}
]
- self.assertValidationMessages(expectedMessages, result)
+ self.assertValidationResult(expectedItems, result)
def test_validationHostIsNotUsed(self):
servicesInfo = [
@@ -174,10 +174,10 @@ class TestHDP206StackAdvisor(TestCase):
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
- expectedMessages = [
- "Host is not used"
+ expectedItems = [
+ {"message": "Host is not used", "host": "host2"}
]
- self.assertValidationMessages(expectedMessages, result)
+ self.assertValidationResult(expectedItems, result)
def prepareHosts(self, hostsNames):
@@ -241,8 +241,15 @@ class TestHDP206StackAdvisor(TestCase):
if not len(l1) == len(l2) or not sorted(l1) == sorted(l2):
raise AssertionError("list1={0}, list2={1}".format(l1, l2))
- def assertValidationMessages(self, expectedMessages, result):
- realMessages = [item["message"] for item in result["items"]]
- self.checkEqual(expectedMessages, realMessages)
+ def assertValidationResult(self, expectedItems, result):
+ actualItems = []
+ for item in result["items"]:
+ next = { "message": item["message"] }
+ try:
+ next["host"] = item["host"]
+ except KeyError, err:
+ pass
+ actualItems.append(next)
+ self.checkEqual(expectedItems, actualItems)
[45/50] [abbrv] git commit: AMBARI-6880 - Alerts: Send Definitions
Down Via Commands to the Agent (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6880 - Alerts: Send Definitions Down Via Commands to the Agent (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e481286
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e481286
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e481286
Branch: refs/heads/branch-alerts-dev
Commit: 8e48128648f0a74942b9e3bcc88261bd2728427e
Parents: 9b6c02f
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Aug 15 14:27:27 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 20 10:48:02 2014 -0400
----------------------------------------------------------------------
.../apache/ambari/server/agent/ActionQueue.java | 51 +++++-
.../ambari/server/agent/AgentCommand.java | 3 +-
.../server/agent/AlertDefinitionCommand.java | 109 ++++++++++++
.../ambari/server/agent/HeartBeatHandler.java | 55 +++++-
.../ambari/server/agent/HeartBeatResponse.java | 56 +++---
.../ambari/server/agent/HeartbeatMonitor.java | 39 +++--
.../server/agent/RegistrationResponse.java | 50 +++++-
.../server/api/services/AmbariMetaInfo.java | 174 +++++++++----------
.../ambari/server/controller/AmbariServer.java | 6 +-
.../AlertDefinitionResourceProvider.java | 101 ++++++++++-
.../server/state/alert/AlertDefinition.java | 61 ++++---
.../state/alert/AlertDefinitionFactory.java | 151 ++++++++++++++++
.../server/state/alert/AlertDefinitionHash.java | 165 +++++++++++++-----
.../ambari/server/agent/TestActionQueue.java | 88 ++++++++--
.../server/api/services/AmbariMetaInfoTest.java | 4 +-
.../AlertDefinitionResourceProviderTest.java | 55 ++++--
.../state/alerts/AlertDefinitionHashTest.java | 86 +++++++--
17 files changed, 980 insertions(+), 274 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
index 225c7df..2479f37 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ActionQueue.java
@@ -18,17 +18,15 @@
package org.apache.ambari.server.agent;
import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.Collections;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
+import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -85,7 +83,44 @@ public class ActionQueue {
}
/**
+ * Dequeue's all commands of a specified type for the given host.
+ *
+ * @param hostname
+ * the host to remove commands for (not {@code null}).
+ * @param commandType
+ * the type of command to remove (not {@code null}).
+ * @return the commands removed, or an empty list if none (never {@code null}
+ * ).
+ */
+ public List<AgentCommand> dequeue(String hostname,
+ AgentCommandType commandType) {
+ if (null == hostname || null == commandType) {
+ return Collections.emptyList();
+ }
+
+ Queue<AgentCommand> queue = getQueue(hostname);
+ if (null == queue) {
+ return null;
+ }
+
+ List<AgentCommand> removedCommands = new ArrayList<AgentCommand>(
+ queue.size());
+
+ Iterator<AgentCommand> iterator = queue.iterator();
+ while (iterator.hasNext()) {
+ AgentCommand command = iterator.next();
+ if (command.getCommandType() == commandType) {
+ removedCommands.add(command);
+ iterator.remove();
+ }
+ }
+
+ return removedCommands;
+ }
+
+ /**
* Try to dequeue command with provided id.
+ *
* @param hostname
* @param commandId
* @return
@@ -99,8 +134,8 @@ public class ActionQueue {
return null;
} else {
AgentCommand c = null;
- for (Iterator it = q.iterator(); it.hasNext(); ) {
- AgentCommand ac = (AgentCommand) it.next();
+ for (Iterator<AgentCommand> it = q.iterator(); it.hasNext();) {
+ AgentCommand ac = it.next();
if (ac instanceof ExecutionCommand && ((ExecutionCommand) ac)
.getCommandId().equals(commandId)) {
c = ac;
@@ -111,7 +146,7 @@ public class ActionQueue {
return c;
}
}
-
+
public int size(String hostname) {
Queue<AgentCommand> q = getQueue(hostname);
if (q == null) {
@@ -125,6 +160,7 @@ public class ActionQueue {
if (q == null) {
return null;
}
+
List<AgentCommand> l = new ArrayList<AgentCommand>();
AgentCommand command;
@@ -137,6 +173,5 @@ public class ActionQueue {
} while (command != null);
return l;
-
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
index 54faf6a..29805a1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
@@ -34,7 +34,8 @@ public abstract class AgentCommand {
BACKGROUND_EXECUTION_COMMAND,
STATUS_COMMAND,
CANCEL_COMMAND,
- REGISTRATION_COMMAND
+ REGISTRATION_COMMAND,
+ ALERT_DEFINITION_COMMAND
}
public AgentCommandType getCommandType() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/agent/AlertDefinitionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/AlertDefinitionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/AlertDefinitionCommand.java
new file mode 100644
index 0000000..3c9615f
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/AlertDefinitionCommand.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.agent;
+
+import java.util.List;
+
+import org.apache.ambari.server.state.alert.AlertDefinition;
+import org.apache.ambari.server.state.alert.AlertDefinitionHash;
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * The {@link AlertDefinitionCommand} class is used to encapsulate the
+ * {@link AlertDefinition}s that will be returned to an agent given a requested
+ * hash.
+ */
+public class AlertDefinitionCommand extends AgentCommand {
+ @SerializedName("clusterName")
+ private final String m_clusterName;
+
+ @SerializedName("hostName")
+ private final String m_hostName;
+
+ @SerializedName("hash")
+ private final String m_hash;
+
+ @SerializedName("alertDefinitions")
+ private final List<AlertDefinition> m_definitions;
+
+ /**
+ * Constructor.
+ *
+ * @param clusterName
+ * the name of the cluster this response is for (
+ * @param hostName
+ * @param hash
+ * @param definitions
+ *
+ * @see AlertDefinitionHash
+ */
+ public AlertDefinitionCommand(String clusterName, String hostName,
+ String hash, List<AlertDefinition> definitions) {
+ super(AgentCommandType.ALERT_DEFINITION_COMMAND);
+
+ m_clusterName = clusterName;
+ m_hostName = hostName;
+ m_hash = hash;
+ m_definitions = definitions;
+ }
+
+ /**
+ *
+ */
+ @Override
+ public AgentCommandType getCommandType() {
+ return AgentCommandType.ALERT_DEFINITION_COMMAND;
+ }
+
+ /**
+ * Gets the global hash for all alert definitions for a given host.
+ *
+ * @return the hash (never {@code null}).
+ */
+ public String getHash() {
+ return m_hash;
+ }
+
+ /**
+ * Gets the alert definitions
+ *
+ * @return
+ */
+ public List<AlertDefinition> getAlertDefinitions() {
+ return m_definitions;
+ }
+
+ /**
+ * Gets the name of the cluster.
+ *
+ * @return the cluster name (not {@code null}).
+ */
+ public String getClusterName() {
+ return m_clusterName;
+ }
+
+ /**
+ * Gets the host name.
+ *
+ * @return the host name (not {@code null}).
+ */
+ public String getHostName() {
+ return m_hostName;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index 8a818a6..65b7b6f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -58,6 +58,7 @@ import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
import org.apache.ambari.server.state.host.HostHealthyHeartbeatEvent;
@@ -223,10 +224,6 @@ public class HeartBeatHandler {
annotateResponse(hostname, response);
}
- // send the alert definition hash for this host
- Map<String, String> alertDefinitionHashes = alertDefinitionHash.getHashes(hostname);
- response.setAlertDefinitionHash(alertDefinitionHashes);
-
return response;
}
@@ -597,9 +594,13 @@ public class HeartBeatHandler {
response.addCancelCommand((CancelCommand) ac);
break;
}
+ case ALERT_DEFINITION_COMMAND: {
+ response.addAlertDefinitionCommand((AlertDefinitionCommand) ac);
+ break;
+ }
default:
- LOG.error("There is no action for agent command =" +
- ac.getCommandType().name());
+ LOG.error("There is no action for agent command ="
+ + ac.getCommandType().name());
}
}
}
@@ -677,6 +678,7 @@ public class HeartBeatHandler {
clusterFsm.addHost(hostname);
hostObject = clusterFsm.getHost(hostname);
}
+
// Resetting host state
hostObject.setState(HostState.INIT);
@@ -693,6 +695,7 @@ public class HeartBeatHandler {
null != register.getPublicHostname() ? register.getPublicHostname() : hostname,
new AgentVersion(register.getAgentVersion()), now, register.getHardwareProfile(),
register.getAgentEnv()));
+
RegistrationResponse response = new RegistrationResponse();
if (cmds.isEmpty()) {
//No status commands needed let the fsm know that status step is done
@@ -706,6 +709,10 @@ public class HeartBeatHandler {
response.setResponseStatus(RegistrationStatus.OK);
+ // force the registering agent host to receive its list of alert definitions
+ List<AlertDefinitionCommand> alertDefinitionCommands = getAlertDefinitionCommands(hostname);
+ response.setAlertDefinitionCommands(alertDefinitionCommands);
+
Long requestId = 0L;
hostResponseIds.put(hostname, requestId);
response.setResponseId(requestId);
@@ -771,4 +778,40 @@ public class HeartBeatHandler {
return result;
}
+
+ /**
+ * Gets the {@link AlertDefinitionCommand} instances that need to be sent for
+ * each cluster that the registering host is a member of.
+ *
+ * @param hostname
+ * @return
+ * @throws AmbariException
+ */
+ private List<AlertDefinitionCommand> getAlertDefinitionCommands(
+ String hostname) throws AmbariException {
+
+ Set<Cluster> hostClusters = clusterFsm.getClustersForHost(hostname);
+ if (null == hostClusters || hostClusters.size() == 0) {
+ return null;
+ }
+
+ List<AlertDefinitionCommand> commands = new ArrayList<AlertDefinitionCommand>();
+
+ // for every cluster this host is a member of, build the command
+ for (Cluster cluster : hostClusters) {
+ String clusterName = cluster.getClusterName();
+ alertDefinitionHash.invalidate(clusterName, hostname);
+
+ List<AlertDefinition> definitions = alertDefinitionHash.getAlertDefinitions(
+ clusterName, hostname);
+
+ String hash = alertDefinitionHash.getHash(clusterName, hostname);
+ AlertDefinitionCommand command = new AlertDefinitionCommand(clusterName,
+ hostname, hash, definitions);
+
+ commands.add(command);
+ }
+
+ return commands;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
index 24bd8a2..0dff507 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
@@ -20,7 +20,6 @@ package org.apache.ambari.server.agent;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import org.codehaus.jackson.annotate.JsonProperty;
@@ -35,20 +34,19 @@ public class HeartBeatResponse {
private List<StatusCommand> statusCommands = new ArrayList<StatusCommand>();
private List<CancelCommand> cancelCommands = new ArrayList<CancelCommand>();
+ /**
+ * {@link AlertDefinitionCommand}s are used to isntruct the agent as to which
+ * alert definitions it needs to schedule.
+ */
+ @JsonProperty("alertDefinitionCommands")
+ private List<AlertDefinitionCommand> alertDefinitionCommands = new ArrayList<AlertDefinitionCommand>();
+
+
private RegistrationCommand registrationCommand;
private boolean restartAgent = false;
private boolean hasMappedComponents = false;
- /**
- * A mapping between cluster name and the alert defintion hash for that
- * cluster. The alert definition hash for a cluster is a hashed value of all
- * of the UUIDs for each alert definition that the agent host should be
- * scheduling. If any of the alert definitions change, their UUID will change
- * which will cause this hash value to change.
- */
- private Map<String, String> alertDefinitionHashes = null;
-
@JsonProperty("responseId")
public long getResponseId() {
return responseId;
@@ -99,6 +97,28 @@ public class HeartBeatResponse {
this.registrationCommand = registrationCommand;
}
+ /**
+ * Gets the alert definition commands that contain the alert definitions for
+ * each cluster that the host is a member of.
+ *
+ * @param commands
+ * the commands, or {@code null} for none.
+ */
+ public List<AlertDefinitionCommand> getAlertDefinitionCommands() {
+ return alertDefinitionCommands;
+ }
+
+ /**
+ * Sets the alert definition commands that contain the alert definitions for
+ * each cluster that the host is a member of.
+ *
+ * @param commands
+ * the commands, or {@code null} for none.
+ */
+ public void setAlertDefinitionCommands(List<AlertDefinitionCommand> commands) {
+ alertDefinitionCommands = commands;
+ }
+
@JsonProperty("restartAgent")
public boolean isRestartAgent() {
return restartAgent;
@@ -119,16 +139,6 @@ public class HeartBeatResponse {
this.hasMappedComponents = hasMappedComponents;
}
- @JsonProperty("alertDefinitionHashes")
- public Map<String, String> getAlertDefinitionHash() {
- return alertDefinitionHashes;
- }
-
- @JsonProperty("alertDefinitionHashes")
- public void setAlertDefinitionHash(Map<String, String> alertDefinitionHashes) {
- this.alertDefinitionHashes = alertDefinitionHashes;
- }
-
public void addExecutionCommand(ExecutionCommand execCmd) {
executionCommands.add(execCmd);
}
@@ -141,6 +151,10 @@ public class HeartBeatResponse {
cancelCommands.add(cancelCmd);
}
+ public void addAlertDefinitionCommand(AlertDefinitionCommand command) {
+ alertDefinitionCommands.add(command);
+ }
+
@Override
public String toString() {
StringBuilder buffer = new StringBuilder("HeartBeatResponse{");
@@ -148,9 +162,9 @@ public class HeartBeatResponse {
buffer.append(", executionCommands=").append(executionCommands);
buffer.append(", statusCommands=").append(statusCommands);
buffer.append(", cancelCommands=").append(cancelCommands);
+ buffer.append(", alertDefinitionCommands=").append(alertDefinitionCommands);
buffer.append(", registrationCommand=").append(registrationCommand);
buffer.append(", restartAgent=").append(restartAgent);
- buffer.append(", alertDefinitionHashes=").append(alertDefinitionHashes);
buffer.append('}');
return buffer.toString();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index 9eab651..5336694 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -18,7 +18,6 @@
package org.apache.ambari.server.agent;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GLOBAL;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
@@ -30,6 +29,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VER
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
@@ -39,7 +39,6 @@ import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.state.Alert;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
@@ -62,7 +61,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.inject.Injector;
-import java.util.HashSet;
/**
* Monitors the node state and heartbeats.
@@ -83,14 +81,14 @@ public class HeartbeatMonitor implements Runnable {
public HeartbeatMonitor(Clusters clusters, ActionQueue aq, ActionManager am,
int threadWakeupInterval, Injector injector) {
this.clusters = clusters;
- this.actionQueue = aq;
- this.actionManager = am;
+ actionQueue = aq;
+ actionManager = am;
this.threadWakeupInterval = threadWakeupInterval;
- this.configHelper = injector.getInstance(ConfigHelper.class);
- this.ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
- this.ambariManagementController = injector.getInstance(
+ configHelper = injector.getInstance(ConfigHelper.class);
+ ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+ ambariManagementController = injector.getInstance(
AmbariManagementController.class);
- this.configuration = injector.getInstance(Configuration.class);
+ configuration = injector.getInstance(Configuration.class);
}
public void shutdown() {
@@ -246,37 +244,38 @@ public class HeartbeatMonitor implements Runnable {
// apply config group overrides
//Config clusterConfig = cluster.getDesiredConfigByType(GLOBAL);
Collection<Config> clusterConfigs = cluster.getAllConfigs();
-
+
for(Config clusterConfig: clusterConfigs) {
- if(!clusterConfig.getType().endsWith("-env"))
+ if(!clusterConfig.getType().endsWith("-env")) {
continue;
-
+ }
+
if (clusterConfig != null) {
// cluster config for 'global'
Map<String, String> props = new HashMap<String, String>(clusterConfig.getProperties());
-
+
// Apply global properties for this host from all config groups
Map<String, Map<String, String>> allConfigTags = configHelper
.getEffectiveDesiredTags(cluster, hostname);
-
+
Map<String, Map<String, String>> configTags = new HashMap<String,
Map<String, String>>();
-
+
for (Map.Entry<String, Map<String, String>> entry : allConfigTags.entrySet()) {
if (entry.getKey().equals(clusterConfig.getType())) {
configTags.put(clusterConfig.getType(), entry.getValue());
}
}
-
+
Map<String, Map<String, String>> properties = configHelper
.getEffectiveConfigProperties(cluster, configTags);
-
+
if (!properties.isEmpty()) {
for (Map<String, String> propertyMap : properties.values()) {
props.putAll(propertyMap);
}
}
-
+
configurations.put(clusterConfig.getType(), props);
Map<String, Map<String, String>> attrs = new TreeMap<String, Map<String, String>>();
@@ -298,7 +297,9 @@ public class HeartbeatMonitor implements Runnable {
Collection<Alert> clusterAlerts = cluster.getAlerts();
Collection<Alert> alerts = new HashSet<Alert>();
for (Alert alert : clusterAlerts) {
- if (!alert.getName().equals("host_alert")) alerts.add(alert);
+ if (!alert.getName().equals("host_alert")) {
+ alerts.add(alert);
+ }
}
if (alerts.size() > 0) {
statusCmd = new NagiosAlertCommand();
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/agent/RegistrationResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/RegistrationResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/RegistrationResponse.java
index dae80bb..8a24560 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/RegistrationResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/RegistrationResponse.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.agent;
+import java.util.ArrayList;
import java.util.List;
import org.codehaus.jackson.annotate.JsonProperty;
@@ -32,6 +33,13 @@ public class RegistrationResponse {
private RegistrationStatus response;
/**
+ * {@link AlertDefinitionCommand}s are used to isntruct the agent as to which
+ * alert definitions it needs to schedule.
+ */
+ @JsonProperty("alertDefinitionCommands")
+ private List<AlertDefinitionCommand> alertDefinitionCommands = new ArrayList<AlertDefinitionCommand>();
+
+ /**
* exitstatus is a code of error which was rised on server side.
* exitstatus = 0 (OK - Default)
* exitstatus = 1 (Registration failed because
@@ -44,12 +52,12 @@ public class RegistrationResponse {
* log - message, which will be printed to agents log
*/
@JsonProperty("log")
- private String log;
-
+ private String log;
+
//Response id to start with, usually zero.
@JsonProperty("responseId")
private long responseId;
-
+
@JsonProperty("statusCommands")
private List<StatusCommand> statusCommands = null;
@@ -69,6 +77,28 @@ public class RegistrationResponse {
this.statusCommands = statusCommands;
}
+ /**
+ * Gets the alert definition commands that contain the alert definitions for
+ * each cluster that the host is a member of.
+ *
+ * @param commands
+ * the commands, or {@code null} for none.
+ */
+ public List<AlertDefinitionCommand> getAlertDefinitionCommands() {
+ return alertDefinitionCommands;
+ }
+
+ /**
+ * Sets the alert definition commands that contain the alert definitions for
+ * each cluster that the host is a member of.
+ *
+ * @param commands
+ * the commands, or {@code null} for none.
+ */
+ public void setAlertDefinitionCommands(List<AlertDefinitionCommand> commands) {
+ alertDefinitionCommands = commands;
+ }
+
public long getResponseId() {
return responseId;
}
@@ -84,13 +114,15 @@ public class RegistrationResponse {
public void setLog(String log) {
this.log = log;
}
-
+
@Override
public String toString() {
- return "RegistrationResponse{" +
- "response=" + response +
- ", responseId=" + responseId +
- ", statusCommands=" + statusCommands +
- '}';
+ StringBuilder buffer = new StringBuilder("RegistrationResponse{");
+ buffer.append("response=").append(response);
+ buffer.append(", responseId=").append(responseId);
+ buffer.append(", statusCommands=").append(statusCommands);
+ buffer.append(", alertDefinitionCommands=").append(alertDefinitionCommands);
+ buffer.append('}');
+ return buffer.toString();
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 80af575..3347a77 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -60,9 +60,7 @@ import org.apache.ambari.server.state.Stack;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.alert.AlertDefinition;
-import org.apache.ambari.server.state.alert.MetricSource;
-import org.apache.ambari.server.state.alert.Source;
-import org.apache.ambari.server.state.alert.SourceType;
+import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
import org.apache.ambari.server.state.stack.LatestRepoCallable;
import org.apache.ambari.server.state.stack.MetricDefinition;
import org.apache.ambari.server.state.stack.RepositoryXml;
@@ -72,12 +70,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParseException;
import com.google.gson.reflect.TypeToken;
import com.google.inject.Inject;
import com.google.inject.Injector;
@@ -133,12 +125,16 @@ public class AmbariMetaInfo {
private File stackRoot;
private File serverVersionFile;
private File customActionRoot;
+
@Inject
private MetainfoDAO metainfoDAO;
+
@Inject
Injector injector;
-
-
+
+ @Inject
+ private AlertDefinitionFactory alertDefinitionFactory;
+
// Required properties by stack version
private final Map<StackId, Map<String, Map<String, PropertyInfo>>> requiredProperties =
new HashMap<StackId, Map<String, Map<String, PropertyInfo>>>();
@@ -153,11 +149,11 @@ public class AmbariMetaInfo {
public AmbariMetaInfo(Configuration conf) throws Exception {
String stackPath = conf.getMetadataPath();
String serverVersionFilePath = conf.getServerVersionFilePath();
- this.stackRoot = new File(stackPath);
- this.serverVersionFile = new File(serverVersionFilePath);
- this.customActionRoot = new File(conf.getCustomActionDefinitionPath());
+ stackRoot = new File(stackPath);
+ serverVersionFile = new File(serverVersionFilePath);
+ customActionRoot = new File(conf.getCustomActionDefinitionPath());
}
-
+
public AmbariMetaInfo(File stackRoot, File serverVersionFile) throws Exception {
this.stackRoot = stackRoot;
this.serverVersionFile = serverVersionFile;
@@ -174,6 +170,8 @@ public class AmbariMetaInfo {
readServerVersion();
getConfigurationInformation(stackRoot);
getCustomActionDefinitions(customActionRoot);
+
+ alertDefinitionFactory = injector.getInstance(AlertDefinitionFactory.class);
}
/**
@@ -190,13 +188,14 @@ public class AmbariMetaInfo {
String serviceName, String componentName) throws AmbariException {
ComponentInfo component = null;
List<ComponentInfo> components = getComponentsByService(stackName, version, serviceName);
- if (components != null)
+ if (components != null) {
for (ComponentInfo cmp : components) {
if (cmp.getName().equals(componentName)) {
component = cmp;
break;
}
}
+ }
return component;
}
@@ -227,24 +226,27 @@ public class AmbariMetaInfo {
List<ComponentInfo> componentsByService = getComponentsByService(stackName, version, serviceName);
- if (componentsByService.size() == 0)
+ if (componentsByService.size() == 0) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", serviceName=" + serviceName
+ ", componentName=" + componentName);
+ }
ComponentInfo componentResult = null;
for (ComponentInfo component : componentsByService) {
- if (component.getName().equals(componentName))
+ if (component.getName().equals(componentName)) {
componentResult = component;
+ }
}
- if (componentResult == null)
+ if (componentResult == null) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", serviceName=" + serviceName
+ ", componentName=" + componentName);
+ }
return componentResult;
}
@@ -334,8 +336,9 @@ public class AmbariMetaInfo {
List<RepositoryInfo> repositoriesResult = new ArrayList<RepositoryInfo>();
for (RepositoryInfo repository : repositories) {
- if (repository.getOsType().equals(osType))
+ if (repository.getOsType().equals(osType)) {
repositoriesResult.add(repository);
+ }
}
return repositoriesResult;
}
@@ -345,22 +348,25 @@ public class AmbariMetaInfo {
List<RepositoryInfo> repositories = getRepositories(stackName, version, osType);
- if (repositories.size() == 0)
+ if (repositories.size() == 0) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", osType=" + osType
+ ", repoId=" + repoId);
+ }
RepositoryInfo repoResult = null;
for (RepositoryInfo repository : repositories) {
- if (repository.getRepoId().equals(repoId))
+ if (repository.getRepoId().equals(repoId)) {
repoResult = repository;
+ }
}
- if (repoResult == null)
+ if (repoResult == null) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion= " + version
+ ", osType=" + osType
+ ", repoId= " + repoId);
+ }
return repoResult;
}
@@ -452,10 +458,10 @@ public class AmbariMetaInfo {
Map<String, Map<String, String>> propertiesResult = new HashMap<String, Map<String, String>>();
ServiceInfo service = getServiceInfo(stackName, version, serviceName);
- if (service != null)
+ if (service != null) {
if (serviceName.equals(service.getName())) {
List<PropertyInfo> properties = service.getProperties();
- if (properties != null)
+ if (properties != null) {
for (PropertyInfo propertyInfo : properties) {
Map<String, String> fileProperties = propertiesResult
.get(propertyInfo.getFilename());
@@ -471,7 +477,9 @@ public class AmbariMetaInfo {
}
}
+ }
}
+ }
return propertiesResult;
}
@@ -497,10 +505,11 @@ public class AmbariMetaInfo {
}
services = stack.getServices();
- if (services != null)
+ if (services != null) {
for (ServiceInfo service : services) {
servicesInfoResult.put(service.getName(), service);
}
+ }
return servicesInfoResult;
}
@@ -508,13 +517,15 @@ public class AmbariMetaInfo {
Map<String, ServiceInfo> services = getServices(stackName, version);
- if (services.size() == 0)
+ if (services.size() == 0) {
throw new StackAccessException("stackName=" + stackName + ", stackVersion=" + version + ", serviceName=" + serviceName);
+ }
ServiceInfo serviceInfo = services.get(serviceName);
- if (serviceInfo == null)
+ if (serviceInfo == null) {
throw new StackAccessException("stackName=" + stackName + ", stackVersion=" + version + ", serviceName=" + serviceName);
+ }
return serviceInfo;
@@ -532,13 +543,14 @@ public class AmbariMetaInfo {
}
services = stack.getServices();
- if (services != null)
+ if (services != null) {
for (ServiceInfo service : services) {
if (serviceName.equals(service.getName())) {
serviceInfoResult = service;
break;
}
}
+ }
return serviceInfoResult;
}
@@ -546,8 +558,9 @@ public class AmbariMetaInfo {
throws AmbariException {
List<ServiceInfo> servicesResult = null;
StackInfo stack = getStackInfo(stackName, version);
- if (stack != null)
+ if (stack != null) {
servicesResult = stack.getServices();
+ }
return servicesResult;
}
@@ -568,18 +581,18 @@ public class AmbariMetaInfo {
throws AmbariException{
HashSet<String> needRestartServices = new HashSet<String>();
-
+
List<ServiceInfo> serviceInfos = getSupportedServices(stackName, version);
-
-
+
+
for (ServiceInfo service : serviceInfos) {
if (service.isRestartRequiredAfterChange() != null && service.isRestartRequiredAfterChange()) {
needRestartServices.add(service.getName());
}
}
return needRestartServices;
- }
-
+ }
+
public List<StackInfo> getSupportedStacks() {
return stacksResult;
}
@@ -601,18 +614,21 @@ public class AmbariMetaInfo {
Set<Stack> supportedStackNames = getStackNames();
- if (supportedStackNames.size() == 0)
+ if (supportedStackNames.size() == 0) {
throw new StackAccessException("stackName=" + stackName);
+ }
Stack stackResult = null;
for (Stack stack : supportedStackNames) {
- if (stack.getStackName().equals(stackName))
+ if (stack.getStackName().equals(stackName)) {
stackResult = stack;
+ }
}
- if (stackResult == null)
+ if (stackResult == null) {
throw new StackAccessException("stackName=" + stackName);
+ }
return stackResult;
}
@@ -621,8 +637,9 @@ public class AmbariMetaInfo {
Set<StackInfo> stackVersions = new HashSet<StackInfo>();
for (StackInfo stackInfo : stacksResult) {
- if (stackName.equals(stackInfo.getName()))
+ if (stackName.equals(stackInfo.getName())) {
stackVersions.add(stackInfo);
+ }
}
return stackVersions;
}
@@ -638,9 +655,10 @@ public class AmbariMetaInfo {
}
}
- if (stackInfoResult == null)
+ if (stackInfoResult == null) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version);
+ }
return stackInfoResult;
}
@@ -659,24 +677,27 @@ public class AmbariMetaInfo {
throws AmbariException {
Set<PropertyInfo> properties = getProperties(stackName, version, serviceName);
- if (properties.size() == 0)
+ if (properties.size() == 0) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", serviceName=" + serviceName
+ ", propertyName=" + propertyName);
+ }
Set<PropertyInfo> propertyResult = new HashSet<PropertyInfo>();
for (PropertyInfo property : properties) {
- if (property.getName().equals(propertyName))
+ if (property.getName().equals(propertyName)) {
propertyResult.add(property);
+ }
}
- if (propertyResult.isEmpty())
+ if (propertyResult.isEmpty()) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", serviceName=" + serviceName
+ ", propertyName=" + propertyName);
+ }
return propertyResult;
}
@@ -703,28 +724,31 @@ public class AmbariMetaInfo {
Set<OperatingSystemInfo> operatingSystems = getOperatingSystems(stackName, version);
- if (operatingSystems.size() == 0)
+ if (operatingSystems.size() == 0) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", osType=" + osType);
+ }
OperatingSystemInfo resultOperatingSystem = null;
for (OperatingSystemInfo operatingSystem : operatingSystems) {
- if (operatingSystem.getOsType().equals(osType))
+ if (operatingSystem.getOsType().equals(osType)) {
resultOperatingSystem = operatingSystem;
+ }
}
- if (resultOperatingSystem == null)
+ if (resultOperatingSystem == null) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", osType=" + osType);
+ }
return resultOperatingSystem;
}
private void readServerVersion() throws Exception {
- File versionFile = this.serverVersionFile;
+ File versionFile = serverVersionFile;
if (!versionFile.exists()) {
throw new AmbariException("Server version file does not exist.");
}
@@ -773,10 +797,11 @@ public class AmbariMetaInfo {
+ ", stackRoot = " + stackRootAbsPath);
}
- if (!stackRoot.isDirectory() && !stackRoot.exists())
+ if (!stackRoot.isDirectory() && !stackRoot.exists()) {
throw new IOException("" + Configuration.METADETA_DIR_PATH
+ " should be a directory with stack"
+ ", stackRoot = " + stackRootAbsPath);
+ }
StackExtensionHelper stackExtensionHelper = new StackExtensionHelper(injector, stackRoot);
stackExtensionHelper.fillInfo();
@@ -961,8 +986,9 @@ public class AmbariMetaInfo {
// validate existing
RepositoryInfo ri = getRepository(stackName, stackVersion, osType, repoId);
- if (!stackRoot.exists())
+ if (!stackRoot.exists()) {
throw new StackAccessException("Stack root does not exist.");
+ }
ri.setBaseUrl(newBaseUrl);
@@ -1048,7 +1074,7 @@ public class AmbariMetaInfo {
}
return requiredProperties;
}
-
+
/**
* @param stackName the stack name
* @param stackVersion the stack version
@@ -1058,53 +1084,18 @@ public class AmbariMetaInfo {
*/
public Set<AlertDefinition> getAlertDefinitions(String stackName, String stackVersion,
String serviceName) throws AmbariException {
-
+
ServiceInfo svc = getService(stackName, stackVersion, serviceName);
+ File alertsFile = svc.getAlertsFile();
- if (null == svc.getAlertsFile() || !svc.getAlertsFile().exists()) {
+ if (null == alertsFile || !alertsFile.exists()) {
LOG.debug("Alerts file for " + stackName + "/" + stackVersion + "/" + serviceName + " not found.");
return null;
}
-
- Map<String, List<AlertDefinition>> map = null;
-
- GsonBuilder builder = new GsonBuilder().registerTypeAdapter(Source.class,
- new JsonDeserializer<Source>() {
- @Override
- public Source deserialize(JsonElement json, Type typeOfT,
- JsonDeserializationContext context) throws JsonParseException {
- JsonObject jsonObj = (JsonObject) json;
-
- SourceType type = SourceType.valueOf(jsonObj.get("type").getAsString());
- Class<? extends Source> cls = null;
-
- switch (type) {
- case METRIC:
- cls = MetricSource.class;
- break;
- default:
- break;
- }
-
- if (null != cls)
- return context.deserialize(json, cls);
- else
- return null;
- }
- });
-
- Gson gson = builder.create();
-
- try {
- Type type = new TypeToken<Map<String, List<AlertDefinition>>>(){}.getType();
- map = gson.fromJson(new FileReader(svc.getAlertsFile()), type);
- } catch (Exception e) {
- LOG.error ("Could not read the alert definition file", e);
- throw new AmbariException("Could not read alert definition file", e);
- }
Set<AlertDefinition> defs = new HashSet<AlertDefinition>();
-
+ Map<String, List<AlertDefinition>> map = alertDefinitionFactory.getAlertDefinitions(alertsFile);
+
for (Entry<String, List<AlertDefinition>> entry : map.entrySet()) {
for (AlertDefinition ad : entry.getValue()) {
ad.setServiceName(serviceName);
@@ -1114,8 +1105,7 @@ public class AmbariMetaInfo {
}
defs.addAll(entry.getValue());
}
-
+
return defs;
}
-
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index e9b0c9e..489fbb2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -59,7 +59,6 @@ import org.apache.ambari.server.controller.internal.ViewPermissionResourceProvid
import org.apache.ambari.server.controller.nagios.NagiosPropertyProvider;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.PersistenceType;
-import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.dao.BlueprintDAO;
import org.apache.ambari.server.orm.dao.ClusterDAO;
import org.apache.ambari.server.orm.dao.GroupDAO;
@@ -92,7 +91,6 @@ import org.apache.ambari.server.security.unsecured.rest.CertificateSign;
import org.apache.ambari.server.security.unsecured.rest.ConnectionInfo;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.ambari.server.utils.VersionUtils;
import org.apache.ambari.server.view.ViewRegistry;
@@ -534,9 +532,7 @@ public class AmbariServer {
injector.getInstance(Gson.class), ambariMetaInfo);
StackDependencyResourceProvider.init(ambariMetaInfo);
ClusterResourceProvider.init(injector.getInstance(BlueprintDAO.class), ambariMetaInfo, injector.getInstance(ConfigHelper.class));
- AlertDefinitionResourceProvider.init(
- injector.getInstance(AlertDefinitionDAO.class),
- injector.getInstance(AlertDefinitionHash.class));
+ AlertDefinitionResourceProvider.init(injector);
PermissionResourceProvider.init(injector.getInstance(PermissionDAO.class));
ViewPermissionResourceProvider.init(injector.getInstance(PermissionDAO.class));
PrivilegeResourceProvider.init(injector.getInstance(PrivilegeDAO.class), injector.getInstance(UserDAO.class),
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index 83bd7b1..e3b5d93 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -28,6 +28,9 @@ import java.util.Set;
import java.util.UUID;
import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.agent.ActionQueue;
+import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
+import org.apache.ambari.server.agent.AlertDefinitionCommand;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
@@ -42,6 +45,8 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.state.alert.Scope;
import org.apache.ambari.server.state.alert.SourceType;
@@ -49,6 +54,7 @@ import org.apache.ambari.server.state.alert.SourceType;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.inject.Inject;
+import com.google.inject.Injector;
/**
* ResourceProvider for Alert Definitions
@@ -76,13 +82,16 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
private static AlertDefinitionHash alertDefinitionHash;
+ private static ActionQueue actionQueue;
+
/**
* @param instance
*/
@Inject
- public static void init(AlertDefinitionDAO instance, AlertDefinitionHash adh) {
- alertDefinitionDAO = instance;
- alertDefinitionHash = adh;
+ public static void init(Injector injector) {
+ alertDefinitionDAO = injector.getInstance(AlertDefinitionDAO.class);
+ alertDefinitionHash = injector.getInstance(AlertDefinitionHash.class);
+ actionQueue = injector.getInstance(ActionQueue.class);
}
AlertDefinitionResourceProvider(Set<String> propertyIds,
@@ -117,15 +126,25 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
throws AmbariException {
List<AlertDefinitionEntity> entities = new ArrayList<AlertDefinitionEntity>();
+ String clusterName = null;
for (Map<String, Object> requestMap : requestMaps) {
entities.add(toCreateEntity(requestMap));
+
+ if (null == clusterName) {
+ clusterName = (String) requestMap.get(ALERT_DEF_CLUSTER_NAME);
+ }
}
+ Set<String> invalidatedHosts = new HashSet<String>();
+
// !!! TODO multi-create in a transaction
for (AlertDefinitionEntity entity : entities) {
alertDefinitionDAO.create(entity);
- alertDefinitionHash.invalidateHosts(entity);
+ invalidatedHosts.addAll(alertDefinitionHash.invalidateHosts(entity));
}
+
+ // build alert definition commands for all agent hosts affected
+ enqueueAgentCommands(clusterName, invalidatedHosts);
}
private AlertDefinitionEntity toCreateEntity(Map<String, Object> requestMap)
@@ -248,6 +267,10 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
throws SystemException, UnsupportedPropertyException,
NoSuchResourceException, NoSuchParentResourceException {
+ String clusterName = null;
+ Set<String> invalidatedHosts = new HashSet<String>();
+ Clusters clusters = getManagementController().getClusters();
+
for (Map<String, Object> requestPropMap : request.getProperties()) {
for (Map<String, Object> propertyMap : getPropertyMaps(requestPropMap, predicate)) {
Long id = (Long) propertyMap.get(ALERT_DEF_ID);
@@ -257,6 +280,17 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
continue;
}
+ if (null == clusterName) {
+ try {
+ Cluster cluster = clusters.getClusterById(entity.getClusterId());
+ if (null != cluster) {
+ clusterName = cluster.getClusterName();
+ }
+ } catch (AmbariException ae) {
+ throw new IllegalArgumentException("Invalid cluster ID", ae);
+ }
+ }
+
if (propertyMap.containsKey(ALERT_DEF_NAME)) {
entity.setDefinitionName((String) propertyMap.get(ALERT_DEF_NAME));
}
@@ -301,11 +335,13 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
entity.setHash(UUID.randomUUID().toString());
alertDefinitionDAO.merge(entity);
-
- alertDefinitionHash.invalidateHosts(entity);
+ invalidatedHosts.addAll(alertDefinitionHash.invalidateHosts(entity));
}
}
+ // build alert definition commands for all agent hosts affected
+ enqueueAgentCommands(clusterName, invalidatedHosts);
+
notifyUpdate(Resource.Type.AlertDefinition, request, predicate);
return getRequestStatus(null);
@@ -321,12 +357,17 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
Set<Long> definitionIds = new HashSet<Long>();
+ String clusterName = null;
for (final Resource resource : resources) {
definitionIds.add((Long) resource.getPropertyValue(ALERT_DEF_ID));
+
+ if (null == clusterName) {
+ clusterName = (String) resource.getPropertyValue(ALERT_DEF_CLUSTER_NAME);
+ }
}
+ final Set<String> invalidatedHosts = new HashSet<String>();
for (Long definitionId : definitionIds) {
-
LOG.info("Deleting alert definition {}", definitionId);
final AlertDefinitionEntity entity = alertDefinitionDAO.findById(definitionId.longValue());
@@ -335,12 +376,15 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
@Override
public Void invoke() throws AmbariException {
alertDefinitionDAO.remove(entity);
- alertDefinitionHash.invalidateHosts(entity);
+ invalidatedHosts.addAll(alertDefinitionHash.invalidateHosts(entity));
return null;
}
});
}
+ // build alert definition commands for all agent hosts affected
+ enqueueAgentCommands(clusterName, invalidatedHosts);
+
notifyDelete(Resource.Type.AlertDefinition, predicate);
return getRequestStatus(null);
@@ -379,4 +423,45 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
return resource;
}
+ /**
+ * Enqueue {@link AlertDefinitionCommand}s for every host specified so that
+ * they will receive a payload of alert definitions that they should be
+ * running.
+ * <p/>
+ * This method is typically called after
+ * {@link AlertDefinitionHash#invalidateHosts(AlertDefinitionEntity)} has
+ * caused a cache invalidation of the alert definition hash.
+ *
+ * @param clusterName
+ * the name of the cluster (not {@code null}).
+ * @param hosts
+ * the hosts to push {@link AlertDefinitionCommand}s for.
+ */
+ private void enqueueAgentCommands(String clusterName, Set<String> hosts) {
+ if (null == clusterName) {
+ LOG.warn("Unable to create alert definition agent commands because of a null cluster name");
+ return;
+ }
+
+ if (null == hosts || hosts.size() == 0) {
+ return;
+ }
+
+ for (String hostName : hosts) {
+ List<AlertDefinition> definitions = alertDefinitionHash.getAlertDefinitions(
+ clusterName, hostName);
+
+ String hash = alertDefinitionHash.getHash(clusterName, hostName);
+
+ AlertDefinitionCommand command = new AlertDefinitionCommand(clusterName,
+ hostName, hash, definitions);
+
+ // unlike other commands, the alert definitions commands are really
+ // designed to be 1:1 per change; if multiple invalidations happened
+ // before the next heartbeat, there would be several commands that would
+ // force the agents to reschedule their alerts more than once
+ actionQueue.dequeue(hostName, AgentCommandType.ALERT_DEFINITION_COMMAND);
+ actionQueue.enqueue(hostName, command);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
index e6f9b39..8d9b3c2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
@@ -25,10 +25,9 @@ public class AlertDefinition {
private String serviceName = null;
private String componentName = null;
-
+
private String name = null;
- private String label = null;
- private String scope = null;
+ private Scope scope = null;
private int interval = 1;
private boolean enabled = true;
private Source source = null;
@@ -39,7 +38,7 @@ public class AlertDefinition {
public String getServiceName() {
return serviceName;
}
-
+
/**
* @param name the service name
*/
@@ -53,70 +52,88 @@ public class AlertDefinition {
public String getComponentName() {
return componentName;
}
-
+
/**
- *
+ *
* @param name the component name
*/
public void setComponentName(String name) {
componentName = name;
}
-
+
/**
* @return the name
*/
public String getName() {
return name;
}
-
+
/**
- * @return the label
+ * @param definitionName
+ * the definition name.
*/
- public String getLabel() {
- return label;
+ public void setName(String definitionName) {
+ name = definitionName;
}
-
+
/**
* @return the scope
*/
- public String getScope() {
+ public Scope getScope() {
return scope;
}
-
+
+ public void setScope(Scope definitionScope) {
+ scope = definitionScope;
+ }
+
/**
* @return the interval
*/
public int getInterval() {
return interval;
}
-
+
+ public void setInterval(int definitionInterval) {
+ interval = definitionInterval;
+ }
+
/**
* @return {@code true} if enabled
*/
public boolean isEnabled() {
return enabled;
}
-
+
+ public void setEnabled(boolean definitionEnabled) {
+ enabled = definitionEnabled;
+ }
+
public Source getSource() {
return source;
}
-
+
+ public void setSource(Source definitionSource) {
+ source = definitionSource;
+ }
+
@Override
public boolean equals(Object obj) {
- if (null == obj || !obj.getClass().equals(AlertDefinition.class))
+ if (null == obj || !obj.getClass().equals(AlertDefinition.class)) {
return false;
-
+ }
+
return name.equals(((AlertDefinition) obj).name);
}
-
+
@Override
public int hashCode() {
return name.hashCode();
}
-
+
@Override
public String toString() {
return name;
}
-
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java
new file mode 100644
index 0000000..1775f88
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alert;
+
+import java.io.File;
+import java.io.FileReader;
+import java.lang.reflect.Type;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParseException;
+import com.google.gson.reflect.TypeToken;
+import com.google.inject.Singleton;
+
+/**
+ * The {@link AlertDefinitionFactory} class is used to construct
+ * {@link AlertDefinition} instances from a variety of sources.
+ */
+@Singleton
+public class AlertDefinitionFactory {
+ /**
+ * Logger.
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(AlertDefinitionFactory.class);
+
+ /**
+ * Builder used for type adapter registration.
+ */
+ private final GsonBuilder m_builder = new GsonBuilder().registerTypeAdapter(
+ Source.class, new AlertDefinitionSourceAdapter());
+
+ /**
+ * Thread safe deserializer.
+ */
+ private final Gson m_gson = m_builder.create();
+
+
+ /**
+ * Gets a list of all of the alert definitions defined in the specified JSON
+ * {@link File}.
+ *
+ * @param alertDefinitionFile
+ * @return
+ * @throws AmbariException
+ * if there was a problem reading the file or parsing the JSON.
+ */
+ public Map<String, List<AlertDefinition>> getAlertDefinitions(
+ File alertDefinitionFile) throws AmbariException {
+ try {
+ Type type = new TypeToken<Map<String, List<AlertDefinition>>>(){}.getType();
+
+ return m_gson.fromJson(new FileReader(alertDefinitionFile), type);
+ } catch (Exception e) {
+ LOG.error("Could not read the alert definition file", e);
+ throw new AmbariException("Could not read alert definition file", e);
+ }
+ }
+
+ /**
+ * Gets an {@link AlertDefinition} constructed from the specified
+ * {@link AlertDefinitionEntity}.
+ *
+ * @param entity
+ * the entity to use to construct the {@link AlertDefinition} (not
+ * {@code null}).
+ * @return the definiion or {@code null} if it could not be coerced.
+ */
+ public AlertDefinition coerce(AlertDefinitionEntity entity) {
+ if (null == entity) {
+ return null;
+ }
+
+ AlertDefinition definition = new AlertDefinition();
+ definition.setComponentName(entity.getComponentName());
+ definition.setEnabled(entity.getEnabled());
+ definition.setInterval(entity.getScheduleInterval());
+ definition.setName(entity.getDefinitionName());
+ definition.setScope(entity.getScope());
+ definition.setServiceName(entity.getServiceName());
+
+ try{
+ String sourceJson = entity.getSource();
+ Source source = m_gson.fromJson(sourceJson, Source.class);
+ definition.setSource(source);
+ } catch (Exception exception) {
+ LOG.error(
+ "Unable to deserialized the alert definition source during coercion",
+ exception);
+ }
+
+ return definition;
+ }
+
+ /**
+ * Deserializes {@link Source} implementations.
+ */
+ private static final class AlertDefinitionSourceAdapter implements JsonDeserializer<Source>{
+ /**
+ *
+ */
+ @Override
+ public Source deserialize(JsonElement json, Type typeOfT,
+ JsonDeserializationContext context) throws JsonParseException {
+ JsonObject jsonObj = (JsonObject) json;
+
+ SourceType type = SourceType.valueOf(jsonObj.get("type").getAsString());
+ Class<? extends Source> cls = null;
+
+ switch (type) {
+ case METRIC:
+ cls = MetricSource.class;
+ break;
+ default:
+ break;
+ }
+
+ if (null != cls) {
+ return context.deserialize(json, cls);
+ } else {
+ return null;
+ }
+ }
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
index 7cbd4b3..3a89dd9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
@@ -29,6 +29,8 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.controller.RootServiceResponseFactory.Components;
@@ -73,6 +75,9 @@ public class AlertDefinitionHash {
@Inject
private AlertDefinitionDAO m_definitionDao;
+ @Inject
+ private AlertDefinitionFactory m_factory;
+
/**
* All clusters.
*/
@@ -80,9 +85,15 @@ public class AlertDefinitionHash {
private Clusters m_clusters;
/**
- * The hashes for all hosts.
+ * !!! TODO: this class needs some thoughts on locking
+ */
+ private ReadWriteLock m_lock = new ReentrantReadWriteLock();
+
+ /**
+ * The hashes for all hosts for any cluster. The key is the hostname and the
+ * value is a map between cluster name and hash.
*/
- private Map<String, String> m_hashes = new ConcurrentHashMap<String, String>();
+ private Map<String, Map<String, String>> m_hashes = new HashMap<String, Map<String, String>>();
/**
* Gets a unique hash value reprssenting all of the alert definitions that
@@ -101,13 +112,19 @@ public class AlertDefinitionHash {
* @return the unique hash or {@value #NULL_MD5_HASH} if none.
*/
public String getHash(String clusterName, String hostName) {
- String hash = m_hashes.get(hostName);
+ Map<String, String> clusterMapping = m_hashes.get(hostName);
+ if (null == clusterMapping) {
+ clusterMapping = new ConcurrentHashMap<String, String>();
+ m_hashes.put(hostName, clusterMapping);
+ }
+
+ String hash = clusterMapping.get(hostName);
if (null != hash) {
return hash;
}
hash = hash(clusterName, hostName);
- m_hashes.put(hostName, hash);
+ clusterMapping.put(clusterName, hash);
return hash;
}
@@ -123,8 +140,7 @@ public class AlertDefinitionHash {
* @see #getHash(String, String)
* @throws AmbariException
*/
- public Map<String, String> getHashes(String hostName)
- throws AmbariException {
+ public Map<String, String> getHashes(String hostName) throws AmbariException {
Set<Cluster> clusters = m_clusters.getClustersForHost(hostName);
if (null == clusters || clusters.size() == 0) {
return Collections.emptyMap();
@@ -148,7 +164,8 @@ public class AlertDefinitionHash {
}
/**
- * Invalidates the cached hash for the specified agent host.
+ * Invalidates the cached hash for the specified agent host across all
+ * clusters.
*
* @param hostName
* the host to invalidate the cache for (not {@code null}).
@@ -158,6 +175,22 @@ public class AlertDefinitionHash {
}
/**
+ * Invalidates the cached hash for the specified agent host in the specified
+ * cluster.
+ *
+ * @param clusterName
+ * the name of the cluster (not {@code null}).
+ * @param hostName
+ * the host to invalidate the cache for (not {@code null}).
+ */
+ public void invalidate(String clusterName, String hostName) {
+ Map<String, String> clusterMapping = m_hashes.get(hostName);
+ if (null != clusterMapping) {
+ clusterMapping.remove(clusterName);
+ }
+ }
+
+ /**
* Gets whether the alert definition has for the specified host has been
* calculated and cached.
*
@@ -165,12 +198,17 @@ public class AlertDefinitionHash {
* the host.
* @return {@code true} if the hash was calculated; {@code false} otherwise.
*/
- public boolean isHashCached(String hostName) {
- if (null == hostName) {
+ public boolean isHashCached(String clusterName, String hostName) {
+ if (null == clusterName || null == hostName) {
+ return false;
+ }
+
+ Map<String, String> clusterMapping = m_hashes.get(hostName);
+ if (null == clusterMapping) {
return false;
}
- return m_hashes.containsKey(hostName);
+ return clusterMapping.containsKey(clusterName);
}
/**
@@ -189,7 +227,42 @@ public class AlertDefinitionHash {
* @return the alert definitions for the host, or an empty set (never
* {@code null}).
*/
- public Set<AlertDefinitionEntity> getAlertDefinitions(String clusterName,
+ public List<AlertDefinition> getAlertDefinitions(
+ String clusterName,
+ String hostName) {
+
+ Set<AlertDefinitionEntity> entities = getAlertDefinitionEntities(
+ clusterName, hostName);
+
+ List<AlertDefinition> definitions = new ArrayList<AlertDefinition>(
+ entities.size());
+
+ for (AlertDefinitionEntity entity : entities) {
+ definitions.add(m_factory.coerce(entity));
+ }
+
+ return definitions;
+ }
+
+
+ /**
+ * Gets the alert definition entities for the specified host. This will include the
+ * following types of alert definitions:
+ * <ul>
+ * <li>Service/Component alerts</li>
+ * <li>Service alerts where the host is a MASTER</li>
+ * <li>Host alerts that are not bound to a service</li>
+ * </ul>
+ *
+ * @param clusterName
+ * the cluster name (not {@code null}).
+ * @param hostName
+ * the host name (not {@code null}).
+ * @return the alert definitions for the host, or an empty set (never
+ * {@code null}).
+ */
+ private Set<AlertDefinitionEntity> getAlertDefinitionEntities(
+ String clusterName,
String hostName) {
Set<AlertDefinitionEntity> definitions = new HashSet<AlertDefinitionEntity>();
@@ -261,26 +334,21 @@ public class AlertDefinitionHash {
* @param definition
* the definition to use to find the hosts to invlidate (not
* {@code null}).
+ * @return the hosts that were invalidated, or an empty set (never
+ * {@code null}).
*/
- public void invalidateHosts(AlertDefinitionEntity definition) {
+ public Set<String> invalidateHosts(AlertDefinitionEntity definition) {
long clusterId = definition.getClusterId();
-
- // intercept host agent alerts; they affect all hosts
- String definitionServiceName = definition.getServiceName();
- String definitionComponentName = definition.getComponentName();
- if (Services.AMBARI.equals(definitionServiceName)
- && Components.AMBARI_AGENT.equals(definitionComponentName)) {
-
- invalidateAll();
- return;
- }
+ Set<String> invalidatedHosts = new HashSet<String>();
Cluster cluster = null;
Map<String, Host> hosts = null;
+ String clusterName = null;
try {
cluster = m_clusters.getClusterById(clusterId);
if (null != cluster) {
- hosts = m_clusters.getHostsForCluster(cluster.getClusterName());
+ clusterName = cluster.getClusterName();
+ hosts = m_clusters.getHostsForCluster(clusterName);
}
if (null == cluster) {
@@ -291,25 +359,35 @@ public class AlertDefinitionHash {
}
if (null == cluster) {
- return;
+ return invalidatedHosts;
+ }
+
+ // intercept host agent alerts; they affect all hosts
+ String definitionServiceName = definition.getServiceName();
+ String definitionComponentName = definition.getComponentName();
+ if (Services.AMBARI.equals(definitionServiceName)
+ && Components.AMBARI_AGENT.equals(definitionComponentName)) {
+
+ invalidateAll();
+ invalidatedHosts.addAll(hosts.keySet());
+ return invalidatedHosts;
}
// find all hosts that have the matching service and component
- if (null != hosts) {
- for (String hostName : hosts.keySet()) {
- List<ServiceComponentHost> hostComponents = cluster.getServiceComponentHosts(hostName);
- if (null == hostComponents || hostComponents.size() == 0) {
- continue;
- }
+ for (String hostName : hosts.keySet()) {
+ List<ServiceComponentHost> hostComponents = cluster.getServiceComponentHosts(hostName);
+ if (null == hostComponents || hostComponents.size() == 0) {
+ continue;
+ }
- // if a host has a matching service/component, invalidate it
- for (ServiceComponentHost component : hostComponents) {
- String serviceName = component.getServiceName();
- String componentName = component.getServiceComponentName();
- if (serviceName.equals(definitionServiceName)
- && componentName.equals(definitionComponentName)) {
- invalidate(hostName);
- }
+ // if a host has a matching service/component, invalidate it
+ for (ServiceComponentHost component : hostComponents) {
+ String serviceName = component.getServiceName();
+ String componentName = component.getServiceComponentName();
+ if (serviceName.equals(definitionServiceName)
+ && componentName.equals(definitionComponentName)) {
+ invalidate(clusterName, hostName);
+ invalidatedHosts.add(hostName);
}
}
}
@@ -320,7 +398,8 @@ public class AlertDefinitionHash {
if (null == service) {
LOG.warn("The alert definition {} has an unknown service of {}",
definition.getDefinitionName(), definitionServiceName);
- return;
+
+ return invalidatedHosts;
}
// get all master components of the definition's service; any hosts that
@@ -332,12 +411,15 @@ public class AlertDefinitionHash {
Map<String, ServiceComponentHost> componentHosts = component.getValue().getServiceComponentHosts();
if (null != componentHosts) {
for (String componentHost : componentHosts.keySet()) {
- invalidate(componentHost);
+ invalidate(clusterName, componentHost);
+ invalidatedHosts.add(componentHost);
}
}
}
}
}
+
+ return invalidatedHosts;
}
/**
@@ -353,7 +435,8 @@ public class AlertDefinitionHash {
* @return the unique hash or {@value #NULL_MD5_HASH} if none.
*/
private String hash(String clusterName, String hostName) {
- Set<AlertDefinitionEntity> definitions = getAlertDefinitions(clusterName,
+ Set<AlertDefinitionEntity> definitions = getAlertDefinitionEntities(
+ clusterName,
hostName);
// no definitions found for this host, don't bother hashing
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
index 847db33..c4f5b86 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestActionQueue.java
@@ -17,52 +17,55 @@
*/
package org.apache.ambari.server.agent;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
+import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
+import org.easymock.EasyMock;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestActionQueue {
-
+
private static Logger LOG = LoggerFactory.getLogger(TestActionQueue.class);
-
+
private static int threadCount = 100;
static class ActionQueueOperation implements Runnable {
-
+
enum OpType {
ENQUEUE,
DEQUEUE,
DEQUEUEALL
}
-
+
private volatile boolean shouldRun = true;
private long [] opCounts;
private ActionQueue actionQueue;
private OpType operation;
private String[] hosts;
-
+
public ActionQueueOperation(ActionQueue aq, String [] hosts, OpType op) {
- this.actionQueue = aq;
- this.operation = op;
+ actionQueue = aq;
+ operation = op;
this.hosts = hosts;
opCounts = new long [hosts.length];
for (int i = 0; i < hosts.length; i++) {
opCounts[i] = 0;
}
}
-
+
public long [] getOpCounts() {
return opCounts;
}
-
+
public void stop() {
- this.shouldRun = false;
+ shouldRun = false;
}
-
+
@Override
public void run() {
try {
@@ -82,7 +85,7 @@ public class TestActionQueue {
throw new RuntimeException("Failure", ex);
}
}
-
+
private void enqueueOp() throws InterruptedException {
while (shouldRun) {
int index = 0;
@@ -94,7 +97,7 @@ public class TestActionQueue {
Thread.sleep(1);
}
}
-
+
private void dequeueOp() throws InterruptedException {
while (shouldRun) {
int index = 0;
@@ -108,7 +111,7 @@ public class TestActionQueue {
Thread.sleep(1);
}
}
-
+
private void dequeueAllOp() throws InterruptedException {
while (shouldRun) {
int index = 0;
@@ -123,7 +126,7 @@ public class TestActionQueue {
}
}
}
-
+
@Test
public void testConcurrentOperations() throws InterruptedException {
ActionQueue aq = new ActionQueue();
@@ -185,7 +188,7 @@ public class TestActionQueue {
}
}
}
-
+
// Stop all threads
for (int i = 0; i < threadCount; i++) {
dequeOperators[i].stop();
@@ -195,7 +198,7 @@ public class TestActionQueue {
for (Thread consumer : consumers) {
consumer.join();
}
-
+
for (int h = 0; h<hosts.length; h++) {
long opsEnqueued = 0;
long opsDequeued = 0;
@@ -211,4 +214,53 @@ public class TestActionQueue {
assertEquals(opsDequeued, opsEnqueued);
}
}
+
+ /**
+ * @throws Exception
+ */
+ @Test
+ public void testDequeueCommandType() throws Exception {
+ ActionQueue queue = new ActionQueue();
+ String c6401 = "c6401.ambari.apache.org";
+ String c6402 = "c6402.ambari.apache.org";
+
+ queue.enqueue(c6401,
+ EasyMock.createMockBuilder(ExecutionCommand.class).createNiceMock());
+
+ queue.enqueue(c6401,
+ EasyMock.createMockBuilder(StatusCommand.class).createNiceMock());
+
+ queue.enqueue(c6401,
+ EasyMock.createMockBuilder(AlertDefinitionCommand.class).createNiceMock());
+
+ queue.enqueue(c6401,
+ EasyMock.createMockBuilder(StatusCommand.class).createNiceMock());
+
+ queue.enqueue(c6401,
+ EasyMock.createMockBuilder(AlertDefinitionCommand.class).createNiceMock());
+
+ queue.enqueue(c6401,
+ EasyMock.createMockBuilder(StatusCommand.class).createNiceMock());
+
+ queue.enqueue(c6401,
+ EasyMock.createMockBuilder(AlertDefinitionCommand.class).createNiceMock());
+
+ queue.enqueue(c6402,
+ EasyMock.createMockBuilder(ExecutionCommand.class).createNiceMock());
+
+ queue.enqueue(c6402,
+ EasyMock.createMockBuilder(StatusCommand.class).createNiceMock());
+
+ queue.enqueue(c6402,
+ EasyMock.createMockBuilder(AlertDefinitionCommand.class).createNiceMock());
+
+ assertEquals(7, queue.size(c6401));
+
+ List<AgentCommand> commands = queue.dequeue(c6401,
+ AgentCommandType.ALERT_DEFINITION_COMMAND);
+
+ assertEquals(3, commands.size());
+ assertEquals(4, queue.size(c6401));
+ assertEquals(3, queue.size(c6402));
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index 53a78eb..b1b83fa 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -24,8 +24,6 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.mock;
import java.io.File;
import java.lang.reflect.Method;
@@ -108,10 +106,12 @@ public class AmbariMetaInfoTest {
@Before
public void before() throws Exception {
injector = Guice.createInjector(new MockModule());
+
File stackRoot = new File("src/test/resources/stacks");
LOG.info("Stacks file " + stackRoot.getAbsolutePath());
metaInfo = new AmbariMetaInfo(stackRoot, new File("target/version"));
metaInfo.injector = injector;
+
try {
metaInfo.init();
} catch(Exception e) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
index 864eb08..d21df88 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
@@ -32,6 +32,7 @@ import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -43,6 +44,7 @@ import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.utilities.PredicateBuilder;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.state.Cluster;
@@ -54,13 +56,20 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.util.Modules;
+
/**
* AlertDefinition tests
*/
public class AlertDefinitionResourceProviderTest {
- AlertDefinitionDAO dao = null;
- AlertDefinitionHash definitionHash = null;
+ private AlertDefinitionDAO dao = null;
+ private AlertDefinitionHash definitionHash = null;
+ private Injector m_injector;
private static String DEFINITION_UUID = UUID.randomUUID().toString();
@@ -69,7 +78,10 @@ public class AlertDefinitionResourceProviderTest {
dao = createStrictMock(AlertDefinitionDAO.class);
definitionHash = createNiceMock(AlertDefinitionHash.class);
- AlertDefinitionResourceProvider.init(dao, definitionHash);
+ m_injector = Guice.createInjector(Modules.override(
+ new InMemoryDefaultTestModule()).with(new MockModule()));
+
+ AlertDefinitionResourceProvider.init(m_injector);
}
/**
@@ -185,8 +197,9 @@ public class AlertDefinitionResourceProviderTest {
expectLastCall();
// creating a single definition should invalidate hosts of the definition
- definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class));
- expectLastCall().once();
+ expect(
+ definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class))).andReturn(
+ new HashSet<String>()).once();
replay(amc, clusters, cluster, dao, definitionHash);
@@ -235,15 +248,18 @@ public class AlertDefinitionResourceProviderTest {
Cluster cluster = createMock(Cluster.class);
expect(amc.getClusters()).andReturn(clusters).atLeastOnce();
expect(clusters.getCluster((String) anyObject())).andReturn(cluster).atLeastOnce();
- expect(cluster.getClusterId()).andReturn(Long.valueOf(1)).anyTimes();
+ expect(clusters.getClusterById(EasyMock.anyInt())).andReturn(cluster).atLeastOnce();
+ expect(cluster.getClusterId()).andReturn(Long.valueOf(1)).atLeastOnce();
+ expect(cluster.getClusterName()).andReturn("c1").atLeastOnce();
Capture<AlertDefinitionEntity> entityCapture = new Capture<AlertDefinitionEntity>();
dao.create(capture(entityCapture));
expectLastCall();
// updateing a single definition should invalidate hosts of the definition
- definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class));
- expectLastCall().once();
+ expect(
+ definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class))).andReturn(
+ new HashSet<String>()).atLeastOnce();
replay(amc, clusters, cluster, dao, definitionHash);
@@ -311,8 +327,9 @@ public class AlertDefinitionResourceProviderTest {
expectLastCall();
// deleting a single definition should invalidate hosts of the definition
- definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class));
- expectLastCall().once();
+ expect(
+ definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class))).andReturn(
+ new HashSet<String>()).atLeastOnce();
replay(amc, clusters, cluster, dao, definitionHash);
@@ -351,7 +368,6 @@ public class AlertDefinitionResourceProviderTest {
Assert.assertEquals(Long.valueOf(1), entity1.getDefinitionId());
verify(amc, clusters, cluster, dao);
-
}
/**
@@ -385,4 +401,21 @@ public class AlertDefinitionResourceProviderTest {
return Arrays.asList(entity);
}
+ /**
+ *
+ */
+ private class MockModule implements Module {
+ /**
+ *
+ */
+ @Override
+ public void configure(Binder binder) {
+ binder.bind(AlertDefinitionDAO.class).toInstance(dao);
+ binder.bind(AlertDefinitionHash.class).toInstance(definitionHash);
+ binder.bind(Clusters.class).toInstance(
+ EasyMock.createNiceMock(Clusters.class));
+ binder.bind(Cluster.class).toInstance(
+ EasyMock.createNiceMock(Cluster.class));
+ }
+ }
}
[31/50] [abbrv] git commit: AMBARI-6920. Admin Views: Sync LDAP does
not sync the LDAP groups. (mahadev)
Posted by jo...@apache.org.
AMBARI-6920. Admin Views: Sync LDAP does not sync the LDAP groups. (mahadev)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/51bebd3d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/51bebd3d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/51bebd3d
Branch: refs/heads/branch-alerts-dev
Commit: 51bebd3daa97478946bbef65ffd77ee62af19ba2
Parents: 818dc16
Author: Mahadev Konar <ma...@apache.org>
Authored: Tue Aug 19 12:15:57 2014 -0700
Committer: Mahadev Konar <ma...@apache.org>
Committed: Tue Aug 19 12:16:02 2014 -0700
----------------------------------------------------------------------
.../internal/PrivilegeResourceProvider.java | 3 +
.../authorization/AmbariLdapDataPopulator.java | 32 +--
.../AmbariLdapDataPopulatorTest.java | 261 +++++++++++++++++++
3 files changed, 277 insertions(+), 19 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/51bebd3d/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PrivilegeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PrivilegeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PrivilegeResourceProvider.java
index d8fce4d..f0bcaf8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PrivilegeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PrivilegeResourceProvider.java
@@ -342,6 +342,9 @@ public abstract class PrivilegeResourceProvider<T> extends AbstractResourceProvi
}
} else if (PrincipalTypeEntity.USER_PRINCIPAL_TYPE_NAME.equalsIgnoreCase(principalType)) {
UserEntity userEntity = userDAO.findLocalUserByName(principalName);
+ if (userEntity == null) {
+ userEntity = userDAO.findLdapUserByName(principalName);
+ }
if (userEntity != null) {
entity.setPrincipal(principalDAO.findById(userEntity.getPrincipal().getId()));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/51bebd3d/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
index 7932833..b5f9341 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
@@ -65,7 +65,7 @@ public class AmbariLdapDataPopulator {
/**
* LDAP specific properties.
*/
- private LdapServerProperties ldapServerProperties;
+ protected LdapServerProperties ldapServerProperties;
/**
* LDAP template for making search queries.
@@ -86,14 +86,7 @@ public class AmbariLdapDataPopulator {
public boolean isLdapEnabled() {
try {
final LdapTemplate ldapTemplate = loadLdapTemplate();
- ldapTemplate.search(ldapServerProperties.getBaseDN(),
- "(objectclass=person)", new AttributesMapper() {
-
- public Object mapFromAttributes(Attributes attributes)
- throws NamingException {
- return "";
- }
- });
+ ldapTemplate.list(ldapServerProperties.getBaseDN());
return true;
} catch (Exception ex) {
LOG.error("Could not connect to LDAP server", ex);
@@ -217,7 +210,7 @@ public class AmbariLdapDataPopulator {
* @param groupName group name
* @throws AmbariException if group refresh failed
*/
- private void refreshGroupMembers(String groupName) throws AmbariException {
+ protected void refreshGroupMembers(String groupName) throws AmbariException {
final Set<String> externalMembers = getExternalLdapGroupMembers(groupName);
final Map<String, User> internalUsers = getInternalUsers();
final Map<String, User> internalMembers = getInternalMembers(groupName);
@@ -233,8 +226,9 @@ public class AmbariLdapDataPopulator {
internalMembers.remove(externalMember);
internalUsers.remove(externalMember);
} else {
- users.createUser(externalMember, "", true, false);
+ users.createUser(externalMember, "");
users.setUserLdap(externalMember);
+ users.addMemberToGroup(groupName, externalMember);
}
}
for (Entry<String, User> userToBeUnsynced: internalMembers.entrySet()) {
@@ -248,7 +242,7 @@ public class AmbariLdapDataPopulator {
*
* @throws AmbariException
*/
- private void cleanUpLdapUsersWithoutGroup() throws AmbariException {
+ protected void cleanUpLdapUsersWithoutGroup() throws AmbariException {
final List<User> allUsers = users.getAllUsers();
for (User user: allUsers) {
if (user.isLdapUser() && user.getGroups().isEmpty()) {
@@ -264,7 +258,7 @@ public class AmbariLdapDataPopulator {
*
* @return set of user names
*/
- private Set<String> getExternalLdapGroupNames() {
+ protected Set<String> getExternalLdapGroupNames() {
final Set<String> groups = new HashSet<String>();
final LdapTemplate ldapTemplate = loadLdapTemplate();
final EqualsFilter equalsFilter = new EqualsFilter("objectClass",
@@ -287,7 +281,7 @@ public class AmbariLdapDataPopulator {
*
* @return set of user names
*/
- private Set<String> getExternalLdapUserNames() {
+ protected Set<String> getExternalLdapUserNames() {
final Set<String> users = new HashSet<String>();
final LdapTemplate ldapTemplate = loadLdapTemplate();
final EqualsFilter equalsFilter = new EqualsFilter("objectClass",
@@ -311,7 +305,7 @@ public class AmbariLdapDataPopulator {
* @param groupName group name
* @return set of group names
*/
- private Set<String> getExternalLdapGroupMembers(String groupName) {
+ protected Set<String> getExternalLdapGroupMembers(String groupName) {
final Set<String> members = new HashSet<String>();
final LdapTemplate ldapTemplate = loadLdapTemplate();
final AndFilter andFilter = new AndFilter();
@@ -337,7 +331,7 @@ public class AmbariLdapDataPopulator {
*
* @return map of GroupName-Group pairs
*/
- private Map<String, Group> getInternalGroups() {
+ protected Map<String, Group> getInternalGroups() {
final List<Group> internalGroups = users.getAllGroups();
final Map<String, Group> internalGroupsMap = new HashMap<String, Group>();
for (Group group : internalGroups) {
@@ -351,7 +345,7 @@ public class AmbariLdapDataPopulator {
*
* @return map of UserName-User pairs
*/
- private Map<String, User> getInternalUsers() {
+ protected Map<String, User> getInternalUsers() {
final List<User> internalUsers = users.getAllUsers();
final Map<String, User> internalUsersMap = new HashMap<String, User>();
for (User user : internalUsers) {
@@ -366,7 +360,7 @@ public class AmbariLdapDataPopulator {
* @param groupName group name
* @return map of UserName-User pairs
*/
- private Map<String, User> getInternalMembers(String groupName) {
+ protected Map<String, User> getInternalMembers(String groupName) {
final Collection<User> internalMembers = users.getGroupMembers(groupName);
final Map<String, User> internalMembersMap = new HashMap<String, User>();
for (User user : internalMembers) {
@@ -380,7 +374,7 @@ public class AmbariLdapDataPopulator {
*
* @return LdapTemplate instance
*/
- private LdapTemplate loadLdapTemplate() {
+ protected LdapTemplate loadLdapTemplate() {
final LdapServerProperties properties = configuration
.getLdapServerProperties();
if (ldapTemplate == null || !properties.equals(ldapServerProperties)) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/51bebd3d/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulatorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulatorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulatorTest.java
new file mode 100644
index 0000000..e8f0525
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulatorTest.java
@@ -0,0 +1,261 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.security.authorization;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import junit.framework.Assert;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.orm.entities.GroupEntity;
+import org.apache.ambari.server.orm.entities.MemberEntity;
+import org.apache.ambari.server.orm.entities.PrincipalEntity;
+import org.apache.ambari.server.orm.entities.PrivilegeEntity;
+import org.apache.ambari.server.orm.entities.RoleEntity;
+import org.apache.ambari.server.orm.entities.UserEntity;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.easymock.IAnswer;
+import org.junit.Test;
+import org.springframework.ldap.core.LdapTemplate;
+
+public class AmbariLdapDataPopulatorTest {
+ private static class AmbariLdapDataPopulatorTestInstance extends AmbariLdapDataPopulator {
+
+ public AmbariLdapDataPopulatorTestInstance(Configuration configuration,
+ Users users) {
+ super(configuration, users);
+ this.ldapServerProperties = EasyMock.createNiceMock(LdapServerProperties.class);
+ }
+
+ final LdapTemplate ldapTemplate = EasyMock.createNiceMock(LdapTemplate.class);
+
+ @Override
+ protected LdapTemplate loadLdapTemplate() {
+ return ldapTemplate;
+ }
+
+ public LdapServerProperties getLdapServerProperties() {
+ return this.ldapServerProperties;
+ }
+ }
+
+ @Test
+ public void testRefreshGroupMembers() throws AmbariException {
+ final Configuration configuration = EasyMock.createNiceMock(Configuration.class);
+ final Users users = EasyMock.createNiceMock(Users.class);
+
+ final GroupEntity ldapGroup = new GroupEntity();
+ ldapGroup.setGroupId(1);
+ ldapGroup.setGroupName("ldapGroup");
+ ldapGroup.setLdapGroup(true);
+ ldapGroup.setMemberEntities(new HashSet<MemberEntity>());
+
+ final User ldapUserWithoutGroup = createLdapUserWithoutGroup();
+ final User ldapUserWithGroup = createLdapUserWithGroup(ldapGroup);
+ final User localUserWithoutGroup = createLocalUserWithoutGroup();
+ final User localUserWithGroup = createLocalUserWithGroup(ldapGroup);
+
+ final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users) {
+ @Override
+ protected Set<String> getExternalLdapGroupMembers(String groupName) {
+ return new HashSet<String>() {
+ {
+ add(ldapUserWithGroup.getUserName());
+ add(ldapUserWithoutGroup.getUserName());
+ }
+ };
+ }
+
+ @Override
+ protected Map<String, User> getInternalUsers() {
+ return new HashMap<String, User>() {
+ {
+ put(localUserWithGroup.getUserName(), localUserWithGroup);
+ put(localUserWithoutGroup.getUserName(), localUserWithoutGroup);
+ }
+ };
+ }
+
+ @Override
+ protected Map<String, User> getInternalMembers(String groupName) {
+ return new HashMap<String, User>() {
+ {
+ put(localUserWithGroup.getUserName(), localUserWithGroup);
+ }
+ };
+ }
+ };
+
+ users.createUser(EasyMock.<String> anyObject(), EasyMock.<String> anyObject());
+ EasyMock.expectLastCall().times(2);
+
+ users.addMemberToGroup(EasyMock.<String> anyObject(), EasyMock.<String> anyObject());
+ EasyMock.expectLastCall().times(2);
+
+ EasyMock.replay(users);
+
+ populator.refreshGroupMembers(ldapGroup.getGroupName());
+
+ EasyMock.verify(users);
+ }
+
+ @Test
+ public void testIsLdapEnabled() {
+ final Configuration configuration = EasyMock.createNiceMock(Configuration.class);
+ final Users users = EasyMock.createNiceMock(Users.class);
+
+ final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users);
+
+ EasyMock.expect(populator.loadLdapTemplate().list(EasyMock. <String>anyObject())).andReturn(Collections.emptyList()).once();
+ EasyMock.replay(populator.loadLdapTemplate());
+
+ populator.isLdapEnabled();
+ EasyMock.verify(populator.loadLdapTemplate());
+ }
+
+ @Test
+ public void testIsLdapEnabled_reallyEnabled() {
+ final Configuration configuration = EasyMock.createNiceMock(Configuration.class);
+ final Users users = EasyMock.createNiceMock(Users.class);
+
+ final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users);
+
+ EasyMock.expect(populator.loadLdapTemplate().list(EasyMock. <String>anyObject())).andReturn(Collections.emptyList()).once();
+ EasyMock.replay(populator.loadLdapTemplate());
+
+ Assert.assertTrue(populator.isLdapEnabled());
+ EasyMock.verify(populator.loadLdapTemplate());
+ }
+
+ @Test
+ public void testIsLdapEnabled_reallyDisabled() {
+ final Configuration configuration = EasyMock.createNiceMock(Configuration.class);
+ final Users users = EasyMock.createNiceMock(Users.class);
+
+ final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users);
+
+ EasyMock.expect(populator.loadLdapTemplate().list(EasyMock. <String>anyObject())).andThrow(new NullPointerException()).once();
+ EasyMock.replay(populator.loadLdapTemplate());
+
+ Assert.assertFalse(populator.isLdapEnabled());
+ EasyMock.verify(populator.loadLdapTemplate());
+ }
+
+ @Test
+ @SuppressWarnings("serial")
+ public void testCleanUpLdapUsersWithoutGroup() throws AmbariException {
+ final Configuration configuration = EasyMock.createNiceMock(Configuration.class);
+ final Users users = EasyMock.createNiceMock(Users.class);
+
+ final GroupEntity ldapGroup = new GroupEntity();
+ ldapGroup.setGroupId(1);
+ ldapGroup.setGroupName("ldapGroup");
+ ldapGroup.setLdapGroup(true);
+ ldapGroup.setMemberEntities(new HashSet<MemberEntity>());
+
+ final User ldapUserWithoutGroup = createLdapUserWithoutGroup();
+ final User ldapUserWithGroup = createLdapUserWithGroup(ldapGroup);
+ final User localUserWithoutGroup = createLocalUserWithoutGroup();
+ final User localUserWithGroup = createLocalUserWithGroup(ldapGroup);
+
+ final List<User> allUsers = new ArrayList<User>() {
+ {
+ add(ldapUserWithoutGroup);
+ add(ldapUserWithGroup);
+ add(localUserWithoutGroup);
+ add(localUserWithGroup);
+ }
+ };
+ EasyMock.expect(users.getAllUsers()).andReturn(new ArrayList<User>(allUsers));
+
+ final List<User> removedUsers = new ArrayList<User>();
+ final Capture<User> userCapture = new Capture<User>();
+ users.removeUser(EasyMock.capture(userCapture));
+ EasyMock.expectLastCall().andAnswer(new IAnswer<Void>() {
+ @Override
+ public Void answer() throws Throwable {
+ removedUsers.add(userCapture.getValue());
+ allUsers.remove(userCapture.getValue());
+ return null;
+ }
+ });
+
+ EasyMock.replay(users);
+
+ final AmbariLdapDataPopulator populator = new AmbariLdapDataPopulatorTestInstance(configuration, users);
+ populator.cleanUpLdapUsersWithoutGroup();
+
+ Assert.assertEquals(removedUsers.size(), 1);
+ Assert.assertEquals(allUsers.size(), 3);
+ Assert.assertTrue(allUsers.contains(ldapUserWithGroup));
+ Assert.assertTrue(allUsers.contains(localUserWithoutGroup));
+ Assert.assertTrue(allUsers.contains(localUserWithGroup));
+ Assert.assertEquals(removedUsers.get(0), ldapUserWithoutGroup);
+
+ EasyMock.verify(users);
+ }
+
+ private static int userIdCounter = 1;
+
+ private User createUser(String name, boolean ldapUser, GroupEntity group) {
+ final UserEntity userEntity = new UserEntity();
+ userEntity.setUserId(userIdCounter++);
+ userEntity.setUserName(name);
+ userEntity.setCreateTime(new Date());
+ userEntity.setLdapUser(ldapUser);
+ userEntity.setActive(true);
+ userEntity.setMemberEntities(new HashSet<MemberEntity>());
+ userEntity.setRoleEntities(new HashSet<RoleEntity>());
+ final PrincipalEntity principalEntity = new PrincipalEntity();
+ principalEntity.setPrivileges(new HashSet<PrivilegeEntity>());
+ userEntity.setPrincipal(principalEntity);
+ if (group != null) {
+ final MemberEntity member = new MemberEntity();
+ member.setUser(userEntity);
+ member.setGroup(group);
+ group.getMemberEntities().add(member);
+ userEntity.getMemberEntities().add(member);
+ }
+ return new User(userEntity);
+ }
+
+ private User createLdapUserWithoutGroup() {
+ return createUser("LdapUserWithoutGroup", true, null);
+ }
+
+ private User createLocalUserWithoutGroup() {
+ return createUser("LocalUserWithoutGroup", false, null);
+ }
+
+ private User createLdapUserWithGroup(GroupEntity group) {
+ return createUser("LdapUserWithGroup", true, group);
+ }
+
+ private User createLocalUserWithGroup(GroupEntity group) {
+ return createUser("LocalUserWithGroup", false, group);
+ }
+}
[39/50] [abbrv] git commit: AMBARI-6938 Incorrect behavior of 'Abort
operation' button after abort request is sent. (Max Shepel via ababiichuk)
Posted by jo...@apache.org.
AMBARI-6938 Incorrect behavior of 'Abort operation' button after abort request is sent. (Max Shepel via ababiichuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7581b9aa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7581b9aa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7581b9aa
Branch: refs/heads/branch-alerts-dev
Commit: 7581b9aa69b22688db70c99e4bff842a58423fef
Parents: 35dbbe9
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Wed Aug 20 14:56:34 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Wed Aug 20 14:56:34 2014 +0300
----------------------------------------------------------------------
.../templates/common/host_progress_popup.hbs | 2 +-
ambari-web/app/utils/host_progress_popup.js | 43 +++++------
.../test/utils/host_progress_popup_test.js | 76 ++++++++++++++++----
3 files changed, 80 insertions(+), 41 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7581b9aa/ambari-web/app/templates/common/host_progress_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/host_progress_popup.hbs b/ambari-web/app/templates/common/host_progress_popup.hbs
index d60dd30..a80cd65 100644
--- a/ambari-web/app/templates/common/host_progress_popup.hbs
+++ b/ambari-web/app/templates/common/host_progress_popup.hbs
@@ -49,7 +49,7 @@
{{servicesInfo.name}}
</a>
{{#if App.supports.abortRequests}}
- {{#if servicesInfo.abortable}}
+ {{#if servicesInfo.isAbortable}}
{{view abortIcon servicesInfoBinding="servicesInfo"}}
{{/if}}
{{/if}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7581b9aa/ambari-web/app/utils/host_progress_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/host_progress_popup.js b/ambari-web/app/utils/host_progress_popup.js
index cc687df..724f131 100644
--- a/ambari-web/app/utils/host_progress_popup.js
+++ b/ambari-web/app/utils/host_progress_popup.js
@@ -68,19 +68,9 @@ App.HostPopup = Em.Object.create({
*/
isPopup: null,
- /**
- * List of aborted requests
- * @type {Array}
- */
- abortedRequests: [],
-
abortIcon: Em.View.extend({
tagName: 'i',
- classNames: ['abortable', 'abort-icon', 'icon-remove-circle'],
- classNameBindings: ['abortClassName'],
- abortClassName: function () {
- return this.get('servicesInfo.abortable') ? this.get('servicesInfo.abortClassName') : 'hidden';
- }.property('servicesInfo'),
+ classNames: ['abort-icon', 'icon-remove-circle'],
click: function () {
this.get('controller').abortRequest(this.get('servicesInfo'));
return false;
@@ -107,20 +97,30 @@ App.HostPopup = Em.Object.create({
}),
/**
+ * Determines if background operation can be aborted depending on its status
+ * @param status
+ * @returns {boolean}
+ */
+ isAbortableByStatus: function (status) {
+ var statuses = this.get('statusesStyleMap');
+ return !Em.keys(statuses).contains(status) || status == 'IN_PROGRESS';
+ },
+
+ /**
* Send request to abort operation
*/
abortRequest: function (serviceInfo) {
var requestName = serviceInfo.get('name');
var self = this;
App.showConfirmationPopup(function () {
- var requestId = serviceInfo.get('id');
- self.get('abortedRequests').push(requestId);
+ serviceInfo.set('isAbortable', false);
App.ajax.send({
name: 'background_operations.abort_request',
sender: self,
data: {
- requestId: requestId,
- requestName: requestName
+ requestId: serviceInfo.get('id'),
+ requestName: requestName,
+ serviceInfo: serviceInfo
},
success: 'abortRequestSuccessCallback',
error: 'abortRequestErrorCallback'
@@ -146,8 +146,7 @@ App.HostPopup = Em.Object.create({
* Method called on unsuccessful sending request to abort operation
*/
abortRequestErrorCallback: function (xhr, textStatus, error, opt, data) {
- var abortedRequests = this.get('controller.abortedRequests');
- this.set('controller.abortedRequests', abortedRequests.without(data.requestId));
+ data.serviceInfo.set('isAbortable', this.isAbortableByStatus(data.serviceInfo.status));
App.ajax.defaultErrorHandler(xhr, opt.url, 'PUT', xhr.status);
},
/**
@@ -359,15 +358,7 @@ App.HostPopup = Em.Object.create({
servicesInfo.insertAt(index, updatedService);
}
if (App.get('supports.abortRequests')) {
- var abortable = !Em.keys(statuses).contains(service.status) || service.status == 'IN_PROGRESS';
- if (!abortable) {
- var abortedRequests = this.get('abortedRequests');
- this.set('abortedRequests', abortedRequests.without(id));
- }
- updatedService.setProperties({
- abortable: abortable,
- abortClassName: 'abort' + id
- });
+ updatedService.set('isAbortable', this.isAbortableByStatus(service.status));
}
}, this);
this.removeOldServices(servicesInfo, currentServices);
http://git-wip-us.apache.org/repos/asf/ambari/blob/7581b9aa/ambari-web/test/utils/host_progress_popup_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/host_progress_popup_test.js b/ambari-web/test/utils/host_progress_popup_test.js
index 1198099..3a8a1c1 100644
--- a/ambari-web/test/utils/host_progress_popup_test.js
+++ b/ambari-web/test/utils/host_progress_popup_test.js
@@ -231,6 +231,33 @@ describe('App.HostPopup', function () {
}
];
+ var statusCases = [
+ {
+ status: 'FAILED',
+ result: false
+ },
+ {
+ status: 'ABORTED',
+ result: false
+ },
+ {
+ status: 'TIMEDOUT',
+ result: false
+ },
+ {
+ status: 'IN_PROGRESS',
+ result: true
+ },
+ {
+ status: 'COMPLETED',
+ result: false
+ },
+ {
+ status: 'PENDING',
+ result: true
+ }
+ ];
+
describe('#setSelectCount', function () {
var itemsForStatusTest = [
{
@@ -334,8 +361,15 @@ describe('App.HostPopup', function () {
});
});
+ describe('#isAbortableByStatus', function () {
+ statusCases.forEach(function (item) {
+ it('should return ' + item.result + ' for ' + item.status, function () {
+ expect(App.HostPopup.isAbortableByStatus(item.status)).to.equal(item.result);
+ });
+ });
+ });
+
describe('#abortRequest', function () {
- var popup;
beforeEach(function () {
sinon.stub(App.ajax, 'send', Em.K);
sinon.spy(App, 'showConfirmationPopup');
@@ -361,7 +395,8 @@ describe('App.HostPopup', function () {
});
it('should open popup', function () {
App.HostPopup.abortRequestSuccessCallback(null, null, {
- requestName: 'name'
+ requestName: 'name',
+ serviceInfo: Em.Object.create()
});
expect(App.ModalPopup.show.calledOnce).to.be.true;
});
@@ -375,9 +410,12 @@ describe('App.HostPopup', function () {
return Em.get(App, k);
});
sinon.spy(App.ModalPopup, 'show');
- popup.set('controller', Em.Object.create({
- abortedRequests: [0]
- }));
+ });
+ afterEach(function () {
+ App.ModalPopup.show.restore();
+ App.ajax.get.restore();
+ });
+ it('should open popup', function () {
popup.abortRequestErrorCallback({
responseText: {
message: 'message'
@@ -386,18 +424,28 @@ describe('App.HostPopup', function () {
}, 'status', 'error', {
url: 'url'
}, {
- requestId: 0
+ requestId: 0,
+ serviceInfo: Em.Object.create()
});
- });
- afterEach(function () {
- App.ModalPopup.show.restore();
- App.ajax.get.restore();
- });
- it('should open popup', function () {
expect(App.ModalPopup.show.calledOnce).to.be.true;
});
- it('should remove current request id from abortedRequests', function () {
- expect(App.HostPopup.get('abortedRequests')).to.be.empty;
+ statusCases.forEach(function (item) {
+ it('should set serviceInfo.isAbortable to' + item.result + ' if status is ' + item.status, function () {
+ popup.abortRequestErrorCallback({
+ responseText: {
+ message: 'message'
+ },
+ status: 404
+ }, 'status', 'error', {
+ url: 'url'
+ }, {
+ requestId: 0,
+ serviceInfo: Em.Object.create({
+ status: item.status
+ })
+ });
+ expect(App.HostPopup.isAbortableByStatus(item.status)).to.equal(item.result);
+ });
});
});
[16/50] [abbrv] git commit: AMBARI-6910 IE doesn't handle download
configs errors
Posted by jo...@apache.org.
AMBARI-6910 IE doesn't handle download configs errors
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6cfdd1cf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6cfdd1cf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6cfdd1cf
Branch: refs/heads/branch-alerts-dev
Commit: 6cfdd1cfdc43e9b1c4a69106693ebd5b0e31aaac
Parents: 388c52e
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Tue Aug 19 14:44:03 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Tue Aug 19 14:44:03 2014 +0300
----------------------------------------------------------------------
ambari-web/app/messages.js | 4 ++-
ambari-web/app/utils/components.js | 55 +++++++++++++++++++++------------
2 files changed, 39 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6cfdd1cf/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 5bd9a87..0a9fafd 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1187,7 +1187,9 @@ Em.I18n.translations = {
'services.service.actions.downloadClientConfigs':'Download Client Configs',
'services.service.actions.downloadClientConfigs.fail.noConfigFile':'No configuration files defined for the component',
'services.service.actions.downloadClientConfigs.fail.popup.header':'{0} Configs',
- 'services.service.actions.downloadClientConfigs.fail.popup.body':'Generation of {0} configurations file has failed with {1} error: <br /><pre><span class="text-error">{2}</span></pre>Do you want to try again?',
+ 'services.service.actions.downloadClientConfigs.fail.popup.body.noErrorMessage':'Generation of {0} configurations file has failed. ',
+ 'services.service.actions.downloadClientConfigs.fail.popup.body.errorMessage':'Generation of {0} configurations file has failed with {1} error: <br /><pre><span class="text-error">{2}</span></pre>',
+ 'services.service.actions.downloadClientConfigs.fail.popup.body.question':'Do you want to try again?',
'services.service.actions.run.rebalancer':'Run Rebalancer',
'services.service.actions.run.rebalanceHdfsNodes':'Rebalance HDFS',
'services.service.actions.run.rebalanceHdfsNodes.title':'HDFS Rebalance',
http://git-wip-us.apache.org/repos/asf/ambari/blob/6cfdd1cf/ambari-web/app/utils/components.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/components.js b/ambari-web/app/utils/components.js
index 3fb8d87..e72cc04 100644
--- a/ambari-web/app/utils/components.js
+++ b/ambari-web/app/utils/components.js
@@ -95,28 +95,45 @@ module.exports = {
var url = App.get('apiPrefix') + '/clusters/' + App.router.getClusterName() + '/' +
(isForHost ? 'hosts/' + data.hostName + '/host_components/' : 'services/' + data.serviceName + '/components/') +
data.componentName + '?format=client_config_tar';
- var self = this;
- $.fileDownload(url).fail(function (error) {
- var errorObj = JSON.parse($(error).text());
- var isNoConfigs = errorObj.message.contains(Em.I18n.t('services.service.actions.downloadClientConfigs.fail.noConfigFile'));
- var errorMessage = isNoConfigs ? Em.I18n.t('services.service.actions.downloadClientConfigs.fail.noConfigFile') :
- Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body').format(data.displayName, errorObj.status, errorObj.message);
- App.ModalPopup.show({
- header: Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.header').format(data.displayName),
- bodyClass: Ember.View.extend({
- template: Em.Handlebars.compile(errorMessage)
- }),
- secondary: isNoConfigs ? false : Em.I18n.t('common.cancel'),
- onPrimary: function () {
- this.hide();
- if (!isNoConfigs) {
- self.downloadClientConfigs({
- context: Em.Object.create(data)
- })
+ try {
+ var self = this;
+ $.fileDownload(url).fail(function (error) {
+ var errorMessage = '';
+ var isNoConfigs = false;
+ if (error && $(error).text()) {
+ var errorObj = JSON.parse($(error).text());
+ if (errorObj && errorObj.message && errorObj.status) {
+ isNoConfigs = errorObj.message.indexOf(Em.I18n.t('services.service.actions.downloadClientConfigs.fail.noConfigFile')) !== -1;
+ errorMessage += isNoConfigs ? Em.I18n.t('services.service.actions.downloadClientConfigs.fail.noConfigFile') :
+ Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.errorMessage').format(data.displayName, errorObj.status, errorObj.message);
+ } else {
+ errorMessage += Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.noErrorMessage').format(data.displayName);
}
+ errorMessage += Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.question');
+ } else {
+ errorMessage += Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.noErrorMessage').format(data.displayName) +
+ Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.question');
}
+ App.ModalPopup.show({
+ header: Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.header').format(data.displayName),
+ bodyClass: Ember.View.extend({
+ template: Em.Handlebars.compile(errorMessage)
+ }),
+ secondary: isNoConfigs ? false : Em.I18n.t('common.cancel'),
+ onPrimary: function () {
+ this.hide();
+ if (!isNoConfigs) {
+ self.downloadClientConfigs({
+ context: Em.Object.create(data)
+ })
+ }
+ }
+ });
});
- });
+ } catch (err) {
+ var newWindow = window.open(url);
+ newWindow.focus();
+ }
}
};
\ No newline at end of file
[18/50] [abbrv] git commit: AMBARI-6913. A lot of Ganglia metrics are
absent on UI on Centos 5.9 (aonishuk)
Posted by jo...@apache.org.
AMBARI-6913. A lot of Ganglia metrics are absent on UI on Centos 5.9 (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0116db8d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0116db8d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0116db8d
Branch: refs/heads/branch-alerts-dev
Commit: 0116db8d5ebcfcd919bc908f51bc802dd0c8f018
Parents: 26c1edc
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Aug 19 16:03:34 2014 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Aug 19 16:03:34 2014 +0300
----------------------------------------------------------------------
.../2.0.6/services/GANGLIA/package/templates/rrd.py.j2 | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0116db8d/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/templates/rrd.py.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/templates/rrd.py.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/templates/rrd.py.j2
index 055c810..65d70e2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/templates/rrd.py.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/GANGLIA/package/templates/rrd.py.j2
@@ -18,6 +18,9 @@ See the License for the specific language governing permissions and
limitations under the License.
'''
+# NOTE: This script is executed by Python 2.4 on Centos 5.
+# Make sure your changes are compatible.
+
import cgi
import glob
import os
@@ -109,7 +112,12 @@ def collectStatMetrics(clusterName, hostName, metricName, files, cf, start, end,
if timestamp is None and stepsize is None and concreteMetricName is None:
timestamp = rrdMetric[0][0]
stepsize = rrdMetric[0][2]
- suffix = metricStat if not isRate else '_rate.' + metricStat
+
+ if not isRate:
+ suffix = metricStat
+ else:
+ suffix = '_rate.' + metricStat
+
concreteMetricName = file.split(os.sep).pop().replace('rrd', suffix)
metricValues = rrdMetric[2]
[29/50] [abbrv] git commit: AMBARI-6918. After putting RM in
MaintMode (and stopping component), service shows stopped (dlysnichenko)
Posted by jo...@apache.org.
AMBARI-6918. After putting RM in MaintMode (and stopping component), service shows stopped (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4644a826
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4644a826
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4644a826
Branch: refs/heads/branch-alerts-dev
Commit: 4644a826ba2b9a13a0815c356faaa9529e1922bc
Parents: 5aaa32c
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Aug 19 17:47:16 2014 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Aug 19 22:03:28 2014 +0300
----------------------------------------------------------------------
.../internal/ServiceResourceProvider.java | 52 +++++----
.../internal/ServiceResourceProviderTest.java | 113 +++++++++++++++++++
2 files changed, 146 insertions(+), 19 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/4644a826/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index c77b8d6..f8a362d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -992,34 +992,48 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
if (componentInfo != null) {
State state = getHostComponentState(hostComponentResponse);
+ // Components in MM should not affect service status,
+ // so we tend to ignore them
+ boolean isInMaintenance = ! MaintenanceState.OFF.toString().
+ equals(hostComponentResponse.getMaintenanceState());
- if (state.equals(State.DISABLED)) {
+ if (state.equals(State.DISABLED) || isInMaintenance) {
hasDisabled = true;
- } else {
- if (componentInfo.isMaster()) {
+ }
+
+ if (componentInfo.isMaster()) {
+ if (state.equals(State.STARTED) || ! isInMaintenance) {
+ // We rely on master's state to determine service state
hasMaster = true;
- if(!state.equals(State.STARTED) &&
- ( masterState == null || state.ordinal() > masterState.ordinal())) {
- masterState = state;
- }
- } else if (componentInfo.isClient()) {
- hasClient = true;
- if (!state.equals(State.INSTALLED) &&
- (clientState == null || state.ordinal() > clientState.ordinal())) {
- clientState = state;
- }
- } else {
- hasOther = true;
- if (otherState == null || state.ordinal() > otherState.ordinal()) {
- otherState = state;
- }
+ }
+
+ if (! state.equals(State.STARTED) &&
+ ! isInMaintenance && // Ignore status of MM component
+ ( masterState == null || state.ordinal() > masterState.ordinal())) {
+ masterState = state;
+ }
+ } else if (componentInfo.isClient()) {
+ hasClient = true;
+ if (!state.equals(State.INSTALLED) &&
+ (clientState == null || state.ordinal() > clientState.ordinal())) {
+ clientState = state;
+ }
+ } else {
+ if (state.equals(State.STARTED) || ! isInMaintenance) {
+ // We rely on slaves's state to determine service state
+ hasOther = true;
+ }
+ if (! state.equals(State.STARTED) &&
+ ! isInMaintenance && // Ignore status of MM component
+ ( otherState == null || state.ordinal() > otherState.ordinal())) {
+ otherState = state;
}
}
}
}
return hasMaster ? masterState == null ? State.STARTED : masterState :
- hasOther ? otherState :
+ hasOther ? otherState == null ? State.STARTED : otherState :
hasClient ? clientState == null ? State.INSTALLED : clientState :
hasDisabled ? State.DISABLED : State.UNKNOWN;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/4644a826/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
index f37dea8..7dbc38b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
@@ -23,6 +23,7 @@ import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.isNull;
@@ -61,6 +62,7 @@ import org.apache.ambari.server.metadata.RoleCommandOrder;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.MaintenanceState;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
@@ -674,8 +676,11 @@ public class ServiceResourceProviderTest {
ComponentInfo componentInfo = createNiceMock(ComponentInfo.class);
ServiceComponentHostResponse shr1 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "JOBTRACKER", "Host100", "UNKNOWN", "", null, null, null);
+ shr1.setMaintenanceState(MaintenanceState.OFF.toString());
ServiceComponentHostResponse shr2 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "MAPREDUCE_CLIENT", "Host100", "STARTED", "", null, null, null);
+ shr2.setMaintenanceState(MaintenanceState.OFF.toString());
ServiceComponentHostResponse shr3 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "TASKTRACKER", "Host100", "STARTED", "", null, null, null);
+ shr3.setMaintenanceState(MaintenanceState.OFF.toString());
Set<ServiceComponentHostResponse> responses = new LinkedHashSet<ServiceComponentHostResponse>();
responses.add(shr1);
@@ -720,8 +725,11 @@ public class ServiceResourceProviderTest {
ComponentInfo componentInfo = createNiceMock(ComponentInfo.class);
ServiceComponentHostResponse shr1 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "JOBTRACKER", "Host100", "STARTING", "", null, null, null);
+ shr1.setMaintenanceState(MaintenanceState.OFF.toString());
ServiceComponentHostResponse shr2 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "MAPREDUCE_CLIENT", "Host100", "STARTED", "", null, null, null);
+ shr2.setMaintenanceState(MaintenanceState.OFF.toString());
ServiceComponentHostResponse shr3 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "TASKTRACKER", "Host100", "STARTED", "", null, null, null);
+ shr3.setMaintenanceState(MaintenanceState.OFF.toString());
Set<ServiceComponentHostResponse> responses = new LinkedHashSet<ServiceComponentHostResponse>();
responses.add(shr1);
@@ -766,8 +774,11 @@ public class ServiceResourceProviderTest {
ComponentInfo componentInfo = createNiceMock(ComponentInfo.class);
ServiceComponentHostResponse shr1 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "JOBTRACKER", "Host100", "INSTALLED", "", null, null, null);
+ shr1.setMaintenanceState(MaintenanceState.OFF.toString());
ServiceComponentHostResponse shr2 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "MAPREDUCE_CLIENT", "Host100", "STARTED", "", null, null, null);
+ shr2.setMaintenanceState(MaintenanceState.OFF.toString());
ServiceComponentHostResponse shr3 = new ServiceComponentHostResponse("C1", "MAPREDUCE", "TASKTRACKER", "Host100", "STARTED", "", null, null, null);
+ shr3.setMaintenanceState(MaintenanceState.OFF.toString());
Set<ServiceComponentHostResponse> responses = new LinkedHashSet<ServiceComponentHostResponse>();
responses.add(shr1);
@@ -1269,6 +1280,108 @@ public class ServiceResourceProviderTest {
verify(managementController, clusters, cluster);
}
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testDefaultServiceState_Master_In_MM() throws Exception{
+ AmbariManagementController managementController = createMock(AmbariManagementController.class);
+ Clusters clusters = createNiceMock(Clusters.class);
+ Cluster cluster = createNiceMock(Cluster.class);
+ AmbariMetaInfo ambariMetaInfo = createNiceMock(AmbariMetaInfo.class);
+ StackId stackId = createNiceMock(StackId.class);
+ ComponentInfo componentInfo = createStrictMock(ComponentInfo.class);
+
+ ServiceComponentHostResponse shr1 = new ServiceComponentHostResponse("C1", "YARN", "RESOURCEMANAGER", "Host100", "INSTALLED", "", null, null, null);
+ shr1.setMaintenanceState(MaintenanceState.ON.toString());
+ ServiceComponentHostResponse shr2 = new ServiceComponentHostResponse("C1", "YARN", "RESOURCEMANAGER", "Host101", "STARTED", "", null, null, null);
+ shr2.setMaintenanceState(MaintenanceState.OFF.toString());
+ ServiceComponentHostResponse shr3 = new ServiceComponentHostResponse("C1", "YARN", "NODEMANAGER", "Host100", "STARTED", "", null, null, null);
+ shr3.setMaintenanceState(MaintenanceState.OFF.toString());
+
+ Set<ServiceComponentHostResponse> responses = new LinkedHashSet<ServiceComponentHostResponse>();
+ responses.add(shr1);
+ responses.add(shr2);
+ responses.add(shr3);
+
+ // set expectations
+ expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+ expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+ expect(clusters.getCluster("C1")).andReturn(cluster).anyTimes();
+ expect(managementController.getHostComponents((Set<ServiceComponentHostRequest>) anyObject())).andReturn(responses).anyTimes();
+ expect(cluster.getDesiredStackVersion()).andReturn(stackId);
+
+ expect(stackId.getStackName()).andReturn("S1").anyTimes();
+ expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
+
+
+ expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ (String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
+
+ expect(componentInfo.isMaster()).andReturn(true).times(2);
+ expect(componentInfo.isMaster()).andReturn(false);
+
+ expect(componentInfo.isClient()).andReturn(false);
+
+ // replay
+ replay(managementController, clusters, cluster, ambariMetaInfo, stackId, componentInfo);
+
+ ServiceResourceProvider.ServiceState serviceState = new ServiceResourceProvider.DefaultServiceState();
+
+ State state = serviceState.getState(managementController, "C1", "YARN");
+ Assert.assertEquals(State.STARTED, state);
+
+ // verify
+ verify(managementController, clusters, cluster, ambariMetaInfo, stackId, componentInfo);
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testDefaultServiceState_Slave_In_MM() throws Exception{
+ AmbariManagementController managementController = createMock(AmbariManagementController.class);
+ Clusters clusters = createNiceMock(Clusters.class);
+ Cluster cluster = createNiceMock(Cluster.class);
+ AmbariMetaInfo ambariMetaInfo = createNiceMock(AmbariMetaInfo.class);
+ StackId stackId = createNiceMock(StackId.class);
+ ComponentInfo componentInfo = createNiceMock(ComponentInfo.class);
+
+ ServiceComponentHostResponse shr1 = new ServiceComponentHostResponse("C1", "YARN", "NODEMANAGER", "Host100", "INSTALLED", "", null, null, null);
+ shr1.setMaintenanceState(MaintenanceState.ON.toString());
+ ServiceComponentHostResponse shr2 = new ServiceComponentHostResponse("C1", "YARN", "NODEMANAGER", "Host101", "STARTED", "", null, null, null);
+ shr2.setMaintenanceState(MaintenanceState.OFF.toString());
+
+ Set<ServiceComponentHostResponse> responses = new LinkedHashSet<ServiceComponentHostResponse>();
+ responses.add(shr1);
+ responses.add(shr2);
+
+ // set expectations
+ expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+ expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+ expect(clusters.getCluster("C1")).andReturn(cluster).anyTimes();
+ expect(managementController.getHostComponents((Set<ServiceComponentHostRequest>) anyObject())).andReturn(responses).anyTimes();
+ expect(cluster.getDesiredStackVersion()).andReturn(stackId);
+
+ expect(stackId.getStackName()).andReturn("S1").anyTimes();
+ expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
+
+
+ expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ (String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
+
+ expect(componentInfo.isMaster()).andReturn(false).anyTimes();
+
+ expect(componentInfo.isClient()).andReturn(false).anyTimes();
+
+ // replay
+ replay(managementController, clusters, cluster, ambariMetaInfo, stackId, componentInfo);
+
+ ServiceResourceProvider.ServiceState serviceState = new ServiceResourceProvider.DefaultServiceState();
+
+ State state = serviceState.getState(managementController, "C1", "YARN");
+ Assert.assertEquals(State.STARTED, state);
+
+ // verify
+ verify(managementController, clusters, cluster, ambariMetaInfo, stackId, componentInfo);
+ }
+
/**
* This factory method creates default MaintenanceStateHelper mock.
[38/50] [abbrv] git commit: AMBARI-6937. Move Wizard: infinite
spinner on step2. (akovalenko)
Posted by jo...@apache.org.
AMBARI-6937. Move Wizard: infinite spinner on step2. (akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/35dbbe98
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/35dbbe98
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/35dbbe98
Branch: refs/heads/branch-alerts-dev
Commit: 35dbbe98daab67725dcc1f36039b3c116a81476c
Parents: 127eec2
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Wed Aug 20 14:53:12 2014 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Wed Aug 20 14:53:12 2014 +0300
----------------------------------------------------------------------
.../app/controllers/main/service/reassign/step2_controller.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/35dbbe98/ambari-web/app/controllers/main/service/reassign/step2_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/reassign/step2_controller.js b/ambari-web/app/controllers/main/service/reassign/step2_controller.js
index 0d1bd19..f837408 100644
--- a/ambari-web/app/controllers/main/service/reassign/step2_controller.js
+++ b/ambari-web/app/controllers/main/service/reassign/step2_controller.js
@@ -106,7 +106,7 @@ App.ReassignMasterWizardStep2Controller = App.WizardStep5Controller.extend({
return false;
},
- getIsSubmitDisabled: function () {
+ updateIsSubmitDisabled: function () {
var isSubmitDisabled = this._super();
if (!isSubmitDisabled) {
var reassigned = 0;
[21/50] [abbrv] git commit: AMBARI-6919. Abort request text popup
edits. (Max Shepel via akovalenko)
Posted by jo...@apache.org.
AMBARI-6919. Abort request text popup edits. (Max Shepel via akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/27456750
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/27456750
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/27456750
Branch: refs/heads/branch-alerts-dev
Commit: 2745675078ad3ff895b069b1b82179c769442ddb
Parents: 1bd86fb
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Tue Aug 19 19:40:02 2014 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Tue Aug 19 19:40:02 2014 +0300
----------------------------------------------------------------------
ambari-web/app/messages.js | 2 +-
ambari-web/app/utils/host_progress_popup.js | 8 +++++++-
2 files changed, 8 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/27456750/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 0a9fafd..6e118ba 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -262,7 +262,7 @@ Em.I18n.translations = {
'hostPopup.bgop.abortRequest.confirmation.body': 'Are you sure you want to abort \'{0}\' operation?',
'hostPopup.bgop.abortRequest.reason': 'Aborted by user',
'hostPopup.bgop.abortRequest.modal.header': 'Abort request sent',
- 'hostPopup.bgop.abortRequest.modal.body': 'The request to abort \'{0}\' operation is sent to server. Note that some tasks that are already running may have enough time to finish as completed or failed ones before abort request is applied.',
+ 'hostPopup.bgop.abortRequest.modal.body': 'The abort request for <strong>{0}</strong> has been sent to the server. Note: some tasks that are already running may have time to complete or fail before the abort request is applied.',
'hostPopup.bgop.sourceRequestSchedule.running': 'Future operations of this batch request can be aborted',
'hostPopup.bgop.sourceRequestSchedule.aborted': 'Future operations of this batch request have been aborted',
'hostPopup.bgop.abort.rollingRestart': 'Abort Rolling Restart',
http://git-wip-us.apache.org/repos/asf/ambari/blob/27456750/ambari-web/app/utils/host_progress_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/host_progress_popup.js b/ambari-web/app/utils/host_progress_popup.js
index 4d27132..cc687df 100644
--- a/ambari-web/app/utils/host_progress_popup.js
+++ b/ambari-web/app/utils/host_progress_popup.js
@@ -133,7 +133,13 @@ App.HostPopup = Em.Object.create({
* Method called on successful sending request to abort operation
*/
abortRequestSuccessCallback: function (response, request, data) {
- App.showAlertPopup(Em.I18n.t('hostPopup.bgop.abortRequest.modal.header'), Em.I18n.t('hostPopup.bgop.abortRequest.modal.body').format(data.requestName));
+ App.ModalPopup.show({
+ header: Em.I18n.t('hostPopup.bgop.abortRequest.modal.header'),
+ bodyClass: Em.View.extend({
+ template: Em.Handlebars.compile(Em.I18n.t('hostPopup.bgop.abortRequest.modal.body').format(data.requestName))
+ }),
+ secondary: null
+ });
},
/**
[47/50] [abbrv] git commit: AMBARI-6887. Alerts: groundwork for alert
collection (ncole)
Posted by jo...@apache.org.
AMBARI-6887. Alerts: groundwork for alert collection (ncole)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/14e79ed1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/14e79ed1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/14e79ed1
Branch: refs/heads/branch-alerts-dev
Commit: 14e79ed1fbd6b2891e1b66163c61eb360a4c6c38
Parents: 8e48128
Author: Nate Cole <nc...@hortonworks.com>
Authored: Sun Aug 17 19:26:50 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 20 10:50:58 2014 -0400
----------------------------------------------------------------------
ambari-agent/pom.xml | 1 +
.../ambari_agent/AlertSchedulerHandler.py | 127 ++++
.../src/main/python/ambari_agent/Controller.py | 12 +-
.../main/python/ambari_agent/alerts/__init__.py | 18 +
.../python/ambari_agent/alerts/base_alert.py | 72 +++
.../python/ambari_agent/alerts/port_alert.py | 96 +++
.../python/ambari_agent/apscheduler/__init__.py | 3 +
.../python/ambari_agent/apscheduler/events.py | 64 ++
.../main/python/ambari_agent/apscheduler/job.py | 137 +++++
.../apscheduler/jobstores/__init__.py | 0
.../ambari_agent/apscheduler/jobstores/base.py | 25 +
.../apscheduler/jobstores/mongodb_store.py | 84 +++
.../apscheduler/jobstores/ram_store.py | 25 +
.../apscheduler/jobstores/redis_store.py | 91 +++
.../apscheduler/jobstores/shelve_store.py | 74 +++
.../apscheduler/jobstores/sqlalchemy_store.py | 91 +++
.../ambari_agent/apscheduler/scheduler.py | 607 +++++++++++++++++++
.../ambari_agent/apscheduler/threadpool.py | 133 ++++
.../apscheduler/triggers/__init__.py | 3 +
.../apscheduler/triggers/cron/__init__.py | 144 +++++
.../apscheduler/triggers/cron/expressions.py | 194 ++++++
.../apscheduler/triggers/cron/fields.py | 100 +++
.../apscheduler/triggers/interval.py | 39 ++
.../ambari_agent/apscheduler/triggers/simple.py | 17 +
.../python/ambari_agent/apscheduler/util.py | 230 +++++++
.../src/test/python/ambari_agent/TestAlerts.py | 73 +++
.../dummy_files/alert_definitions.json | 46 ++
27 files changed, 2505 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index ebf30fa..1c3083b 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -628,6 +628,7 @@
<exclude>src/test/python/ambari_agent/dummy_files/*</exclude>
<exclude>src/test/python/ambari_agent/dummy*.txt</exclude>
<exclude>src/main/python/ambari_agent/imports.txt</exclude>
+ <exclude>src/main/python/ambari_agent/apscheduler/**</exclude>
<exclude>**/*.erb</exclude>
<exclude>**/*.json</exclude>
<exclude>**/*.pydevproject</exclude>
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py b/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
new file mode 100644
index 0000000..cd0605f
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/AlertSchedulerHandler.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+'''
+http://apscheduler.readthedocs.org/en/v2.1.2
+'''
+from apscheduler.scheduler import Scheduler
+from alerts.port_alert import PortAlert
+import json
+import logging
+import sys
+import time
+
+logger = logging.getLogger()
+
+class AlertSchedulerHandler():
+
+ def __init__(self, filename, in_minutes=True):
+ self.filename = filename
+
+ config = {
+ 'threadpool.core_threads': 3,
+ 'coalesce': True,
+ 'standalone': False
+ }
+
+ self.scheduler = Scheduler(config)
+
+ alert_callables = self.__load_alerts()
+
+ for _callable in alert_callables:
+ if in_minutes:
+ self.scheduler.add_interval_job(self.__make_function(_callable),
+ minutes=_callable.interval())
+ else:
+ self.scheduler.add_interval_job(self.__make_function(_callable),
+ seconds=_callable.interval())
+
+ def __make_function(self, alert_def):
+ return lambda: alert_def.collect()
+
+ def start(self):
+ if not self.scheduler is None:
+ self.scheduler.start()
+
+ def stop(self):
+ if not self.scheduler is None:
+ self.scheduler.shutdown(wait=False)
+ self.scheduler = None
+
+ def __load_alerts(self):
+ definitions = []
+ try:
+ # FIXME make location configurable
+ with open(self.filename) as fp:
+ cluster_defs = json.load(fp)
+ for deflist in cluster_defs.values():
+ for definition in deflist:
+ obj = self.__json_to_callable(definition)
+ if obj is not None:
+ definitions.append(obj)
+ except:
+ import traceback
+ traceback.print_exc()
+ pass
+ return definitions
+
+ def __json_to_callable(self, json_definition):
+ source = json_definition['source']
+ source_type = source.get('type', '')
+
+ alert = None
+
+ if source_type == 'METRIC':
+ pass
+ elif source_type == 'PORT':
+ alert = PortAlert(json_definition, source)
+ elif type == 'SCRIPT':
+ pass
+
+ return alert
+
+ def __json_to_meta(self, json_definition):
+ pass
+
+def main():
+ args = list(sys.argv)
+ del args[0]
+
+ try:
+ logger.setLevel(logger.debug)
+ except TypeError:
+ logger.setLevel(12)
+
+ ash = AlertSchedulerHandler(args[0], False)
+ ash.start()
+
+ i = 0
+ try:
+ while i < 10:
+ time.sleep(1)
+ i += 1
+ except KeyboardInterrupt:
+ pass
+ ash.stop()
+
+if __name__ == "__main__":
+ main()
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/Controller.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/Controller.py b/ambari-agent/src/main/python/ambari_agent/Controller.py
index 87af939..3be54c2 100644
--- a/ambari-agent/src/main/python/ambari_agent/Controller.py
+++ b/ambari-agent/src/main/python/ambari_agent/Controller.py
@@ -39,7 +39,7 @@ import security
from NetUtil import NetUtil
import ssl
from LiveStatus import LiveStatus
-
+from AlertSchedulerHandler import AlertSchedulerHandler
logger = logging.getLogger()
@@ -73,6 +73,14 @@ class Controller(threading.Thread):
self.heartbeat_wait_event = threading.Event()
# List of callbacks that are called at agent registration
self.registration_listeners = []
+
+ # pull config directory out of config
+ cache_dir = config.get('agent', 'cache_dir')
+ if cache_dir is None:
+ cache_dir = '/var/lib/ambari-agent/cache'
+
+ self.alert_scheduler_handler = AlertSchedulerHandler(
+ os.path.join(cache_dir, 'alerts', 'alert_definitions.json'))
def __del__(self):
@@ -317,6 +325,8 @@ class Controller(threading.Thread):
message = registerResponse['response']
logger.info("Registration response from %s was %s", self.serverHostname, message)
+ self.alert_scheduler_handler.start()
+
if self.isRegistered:
# Clearing command queue to stop executing "stale" commands
# after registration
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/alerts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/__init__.py b/ambari-agent/src/main/python/ambari_agent/alerts/__init__.py
new file mode 100644
index 0000000..0a0e1ca
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/__init__.py
@@ -0,0 +1,18 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py b/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
new file mode 100644
index 0000000..e102d56
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/base_alert.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import logging
+
+logger = logging.getLogger()
+
+class BaseAlert(object):
+ RESULT_OK = 'OK'
+ RESULT_WARNING = 'WARNING'
+ RESULT_CRITICAL = 'CRITICAL'
+ RESULT_UNKNOWN = 'UNKNOWN'
+
+ def __init__(self, alert_meta, alert_source_meta):
+ self.alert_meta = alert_meta
+ self.alert_source_meta = alert_source_meta
+
+ def interval(self):
+ if not self.alert_meta.has_key('interval'):
+ return 1
+ else:
+ return self.alert_meta['interval']
+
+ def collect(self):
+ res = (BaseAlert.RESULT_UNKNOWN, [])
+ try:
+ res = self._collect()
+ except Exception as e:
+ res = (BaseAlert.RESULT_CRITICAL, [str(e)])
+
+ res_base_text = self.alert_source_meta['reporting'][res[0].lower()]['text']
+
+ data = {}
+ data['name'] = self._find_value('name')
+ data['state'] = res[0]
+ data['text'] = res_base_text.format(*res[1])
+ # data['cluster'] = self._find_value('cluster') # not sure how to get this yet
+ data['service'] = self._find_value('service')
+ data['component'] = self._find_value('component')
+
+ print str(data)
+
+ def _find_value(self, meta_key):
+ if self.alert_meta.has_key(meta_key):
+ return self.alert_meta[meta_key]
+ else:
+ return None
+
+ def _collect(self):
+ '''
+ Low level function to collect alert data. The result is a tuple as:
+ res[0] = the result code
+ res[1] = the list of arguments supplied to the reporting text for the result code
+ '''
+ raise NotImplementedError
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
new file mode 100644
index 0000000..165f890
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import logging
+import re
+import socket
+import time
+import traceback
+from alerts.base_alert import BaseAlert
+from resource_management.libraries.functions.get_port_from_url import get_port_from_url
+
+logger = logging.getLogger()
+
+class PortAlert(BaseAlert):
+
+ def __init__(self, alert_meta, alert_source_meta):
+ super(PortAlert, self).__init__(alert_meta, alert_source_meta)
+
+ default_port = alert_source_meta['default_port']
+ uri = alert_source_meta['uri']
+
+ self.port = default_port
+ self.host = get_host_from_url(uri)
+
+ try:
+ self.port = int(get_port_from_url(uri))
+ except:
+ traceback.print_exc()
+ pass
+
+
+ def _collect(self):
+ s = None
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.settimeout(1.5)
+ t = time.time()
+ s.connect((self.host, self.port))
+ millis = time.time() - t
+ return (self.RESULT_OK, [millis/1000, self.port])
+ finally:
+ if s is not None:
+ try:
+ s.close()
+ except:
+ pass
+
+'''
+See RFC3986, Appendix B
+Tested on the following cases:
+ "192.168.54.1"
+ "192.168.54.2:7661
+ "hdfs://192.168.54.3/foo/bar"
+ "ftp://192.168.54.4:7842/foo/bar"
+'''
+def get_host_from_url(uri):
+ # RFC3986, Appendix B
+ parts = re.findall('^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?' , uri)
+
+ # index of parts
+ # scheme = 1
+ # authority = 3
+ # path = 4
+ # query = 6
+ # fragment = 8
+
+ host_and_port = uri
+ if 0 == len(parts[0][1]):
+ host_and_port = parts[0][4]
+ elif 0 == len(parts[0][2]):
+ host_and_port = parts[0][1]
+ elif parts[0][2].startswith("//"):
+ host_and_port = parts[0][3]
+
+ if -1 == host_and_port.find(':'):
+ return host_and_port
+ else:
+ return host_and_port.split(':')[0]
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/__init__.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/__init__.py
new file mode 100644
index 0000000..71cc53d
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/__init__.py
@@ -0,0 +1,3 @@
+version_info = (2, 1, 2)
+version = '.'.join(str(n) for n in version_info[:3])
+release = '.'.join(str(n) for n in version_info)
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/events.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/events.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/events.py
new file mode 100644
index 0000000..80bde8e
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/events.py
@@ -0,0 +1,64 @@
+__all__ = ('EVENT_SCHEDULER_START', 'EVENT_SCHEDULER_SHUTDOWN',
+ 'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED',
+ 'EVENT_JOBSTORE_JOB_ADDED', 'EVENT_JOBSTORE_JOB_REMOVED',
+ 'EVENT_JOB_EXECUTED', 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED',
+ 'EVENT_ALL', 'SchedulerEvent', 'JobStoreEvent', 'JobEvent')
+
+
+EVENT_SCHEDULER_START = 1 # The scheduler was started
+EVENT_SCHEDULER_SHUTDOWN = 2 # The scheduler was shut down
+EVENT_JOBSTORE_ADDED = 4 # A job store was added to the scheduler
+EVENT_JOBSTORE_REMOVED = 8 # A job store was removed from the scheduler
+EVENT_JOBSTORE_JOB_ADDED = 16 # A job was added to a job store
+EVENT_JOBSTORE_JOB_REMOVED = 32 # A job was removed from a job store
+EVENT_JOB_EXECUTED = 64 # A job was executed successfully
+EVENT_JOB_ERROR = 128 # A job raised an exception during execution
+EVENT_JOB_MISSED = 256 # A job's execution was missed
+EVENT_ALL = (EVENT_SCHEDULER_START | EVENT_SCHEDULER_SHUTDOWN |
+ EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED |
+ EVENT_JOBSTORE_JOB_ADDED | EVENT_JOBSTORE_JOB_REMOVED |
+ EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED)
+
+
+class SchedulerEvent(object):
+ """
+ An event that concerns the scheduler itself.
+
+ :var code: the type code of this event
+ """
+ def __init__(self, code):
+ self.code = code
+
+
+class JobStoreEvent(SchedulerEvent):
+ """
+ An event that concerns job stores.
+
+ :var alias: the alias of the job store involved
+ :var job: the new job if a job was added
+ """
+ def __init__(self, code, alias, job=None):
+ SchedulerEvent.__init__(self, code)
+ self.alias = alias
+ if job:
+ self.job = job
+
+
+class JobEvent(SchedulerEvent):
+ """
+ An event that concerns the execution of individual jobs.
+
+ :var job: the job instance in question
+ :var scheduled_run_time: the time when the job was scheduled to be run
+ :var retval: the return value of the successfully executed job
+ :var exception: the exception raised by the job
+ :var traceback: the traceback object associated with the exception
+ """
+ def __init__(self, code, job, scheduled_run_time, retval=None,
+ exception=None, traceback=None):
+ SchedulerEvent.__init__(self, code)
+ self.job = job
+ self.scheduled_run_time = scheduled_run_time
+ self.retval = retval
+ self.exception = exception
+ self.traceback = traceback
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py
new file mode 100644
index 0000000..cfc09a2
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/job.py
@@ -0,0 +1,137 @@
+"""
+Jobs represent scheduled tasks.
+"""
+
+from threading import Lock
+from datetime import timedelta
+
+from apscheduler.util import to_unicode, ref_to_obj, get_callable_name,\
+ obj_to_ref
+
+
+class MaxInstancesReachedError(Exception):
+ pass
+
+
+class Job(object):
+ """
+ Encapsulates the actual Job along with its metadata. Job instances
+ are created by the scheduler when adding jobs, and should not be
+ directly instantiated. These options can be set when adding jobs
+ to the scheduler (see :ref:`job_options`).
+
+ :var trigger: trigger that determines the execution times
+ :var func: callable to call when the trigger is triggered
+ :var args: list of positional arguments to call func with
+ :var kwargs: dict of keyword arguments to call func with
+ :var name: name of the job
+ :var misfire_grace_time: seconds after the designated run time that
+ the job is still allowed to be run
+ :var coalesce: run once instead of many times if the scheduler determines
+ that the job should be run more than once in succession
+ :var max_runs: maximum number of times this job is allowed to be
+ triggered
+ :var max_instances: maximum number of concurrently running
+ instances allowed for this job
+ :var runs: number of times this job has been triggered
+ :var instances: number of concurrently running instances of this job
+ """
+ id = None
+ next_run_time = None
+
+ def __init__(self, trigger, func, args, kwargs, misfire_grace_time,
+ coalesce, name=None, max_runs=None, max_instances=1):
+ if not trigger:
+ raise ValueError('The trigger must not be None')
+ if not hasattr(func, '__call__'):
+ raise TypeError('func must be callable')
+ if not hasattr(args, '__getitem__'):
+ raise TypeError('args must be a list-like object')
+ if not hasattr(kwargs, '__getitem__'):
+ raise TypeError('kwargs must be a dict-like object')
+ if misfire_grace_time <= 0:
+ raise ValueError('misfire_grace_time must be a positive value')
+ if max_runs is not None and max_runs <= 0:
+ raise ValueError('max_runs must be a positive value')
+ if max_instances <= 0:
+ raise ValueError('max_instances must be a positive value')
+
+ self._lock = Lock()
+
+ self.trigger = trigger
+ self.func = func
+ self.args = args
+ self.kwargs = kwargs
+ self.name = to_unicode(name or get_callable_name(func))
+ self.misfire_grace_time = misfire_grace_time
+ self.coalesce = coalesce
+ self.max_runs = max_runs
+ self.max_instances = max_instances
+ self.runs = 0
+ self.instances = 0
+
+ def compute_next_run_time(self, now):
+ if self.runs == self.max_runs:
+ self.next_run_time = None
+ else:
+ self.next_run_time = self.trigger.get_next_fire_time(now)
+
+ return self.next_run_time
+
+ def get_run_times(self, now):
+ """
+ Computes the scheduled run times between ``next_run_time`` and ``now``.
+ """
+ run_times = []
+ run_time = self.next_run_time
+ increment = timedelta(microseconds=1)
+ while ((not self.max_runs or self.runs < self.max_runs) and
+ run_time and run_time <= now):
+ run_times.append(run_time)
+ run_time = self.trigger.get_next_fire_time(run_time + increment)
+
+ return run_times
+
+ def add_instance(self):
+ self._lock.acquire()
+ try:
+ if self.instances == self.max_instances:
+ raise MaxInstancesReachedError
+ self.instances += 1
+ finally:
+ self._lock.release()
+
+ def remove_instance(self):
+ self._lock.acquire()
+ try:
+ assert self.instances > 0, 'Already at 0 instances'
+ self.instances -= 1
+ finally:
+ self._lock.release()
+
+ def __getstate__(self):
+ # Prevents the unwanted pickling of transient or unpicklable variables
+ state = self.__dict__.copy()
+ state.pop('instances', None)
+ state.pop('func', None)
+ state.pop('_lock', None)
+ state['func_ref'] = obj_to_ref(self.func)
+ return state
+
+ def __setstate__(self, state):
+ state['instances'] = 0
+ state['func'] = ref_to_obj(state.pop('func_ref'))
+ state['_lock'] = Lock()
+ self.__dict__ = state
+
+ def __eq__(self, other):
+ if isinstance(other, Job):
+ return self.id is not None and other.id == self.id or self is other
+ return NotImplemented
+
+ def __repr__(self):
+ return '<Job (name=%s, trigger=%s)>' % (self.name, repr(self.trigger))
+
+ def __str__(self):
+ return '%s (trigger: %s, next run at: %s)' % (
+ self.name, str(self.trigger), str(self.next_run_time))
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/__init__.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/__init__.py
new file mode 100644
index 0000000..e69de29
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/base.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/base.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/base.py
new file mode 100644
index 0000000..f0a16dd
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/base.py
@@ -0,0 +1,25 @@
+"""
+Abstract base class that provides the interface needed by all job stores.
+Job store methods are also documented here.
+"""
+
+
+class JobStore(object):
+ def add_job(self, job):
+ """Adds the given job from this store."""
+ raise NotImplementedError
+
+ def update_job(self, job):
+ """Persists the running state of the given job."""
+ raise NotImplementedError
+
+ def remove_job(self, job):
+ """Removes the given jobs from this store."""
+ raise NotImplementedError
+
+ def load_jobs(self):
+ """Loads jobs from this store into memory."""
+ raise NotImplementedError
+
+ def close(self):
+ """Frees any resources still bound to this job store."""
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py
new file mode 100644
index 0000000..3f522c2
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/mongodb_store.py
@@ -0,0 +1,84 @@
+"""
+Stores jobs in a MongoDB database.
+"""
+import logging
+
+from apscheduler.jobstores.base import JobStore
+from apscheduler.job import Job
+
+try:
+ import cPickle as pickle
+except ImportError: # pragma: nocover
+ import pickle
+
+try:
+ from bson.binary import Binary
+ from pymongo.connection import Connection
+except ImportError: # pragma: nocover
+ raise ImportError('MongoDBJobStore requires PyMongo installed')
+
+logger = logging.getLogger(__name__)
+
+
+class MongoDBJobStore(JobStore):
+ def __init__(self, database='apscheduler', collection='jobs',
+ connection=None, pickle_protocol=pickle.HIGHEST_PROTOCOL,
+ **connect_args):
+ self.jobs = []
+ self.pickle_protocol = pickle_protocol
+
+ if not database:
+ raise ValueError('The "database" parameter must not be empty')
+ if not collection:
+ raise ValueError('The "collection" parameter must not be empty')
+
+ if connection:
+ self.connection = connection
+ else:
+ self.connection = Connection(**connect_args)
+
+ self.collection = self.connection[database][collection]
+
+ def add_job(self, job):
+ job_dict = job.__getstate__()
+ job_dict['trigger'] = Binary(pickle.dumps(job.trigger,
+ self.pickle_protocol))
+ job_dict['args'] = Binary(pickle.dumps(job.args,
+ self.pickle_protocol))
+ job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs,
+ self.pickle_protocol))
+ job.id = self.collection.insert(job_dict)
+ self.jobs.append(job)
+
+ def remove_job(self, job):
+ self.collection.remove(job.id)
+ self.jobs.remove(job)
+
+ def load_jobs(self):
+ jobs = []
+ for job_dict in self.collection.find():
+ try:
+ job = Job.__new__(Job)
+ job_dict['id'] = job_dict.pop('_id')
+ job_dict['trigger'] = pickle.loads(job_dict['trigger'])
+ job_dict['args'] = pickle.loads(job_dict['args'])
+ job_dict['kwargs'] = pickle.loads(job_dict['kwargs'])
+ job.__setstate__(job_dict)
+ jobs.append(job)
+ except Exception:
+ job_name = job_dict.get('name', '(unknown)')
+ logger.exception('Unable to restore job "%s"', job_name)
+ self.jobs = jobs
+
+ def update_job(self, job):
+ spec = {'_id': job.id}
+ document = {'$set': {'next_run_time': job.next_run_time},
+ '$inc': {'runs': 1}}
+ self.collection.update(spec, document)
+
+ def close(self):
+ self.connection.disconnect()
+
+ def __repr__(self):
+ connection = self.collection.database.connection
+ return '<%s (connection=%s)>' % (self.__class__.__name__, connection)
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py
new file mode 100644
index 0000000..60458fb
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/ram_store.py
@@ -0,0 +1,25 @@
+"""
+Stores jobs in an array in RAM. Provides no persistence support.
+"""
+
+from apscheduler.jobstores.base import JobStore
+
+
+class RAMJobStore(JobStore):
+ def __init__(self):
+ self.jobs = []
+
+ def add_job(self, job):
+ self.jobs.append(job)
+
+ def update_job(self, job):
+ pass
+
+ def remove_job(self, job):
+ self.jobs.remove(job)
+
+ def load_jobs(self):
+ pass
+
+ def __repr__(self):
+ return '<%s>' % (self.__class__.__name__)
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py
new file mode 100644
index 0000000..5eabf4b
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/redis_store.py
@@ -0,0 +1,91 @@
+"""
+Stores jobs in a Redis database.
+"""
+from uuid import uuid4
+from datetime import datetime
+import logging
+
+from apscheduler.jobstores.base import JobStore
+from apscheduler.job import Job
+
+try:
+ import cPickle as pickle
+except ImportError: # pragma: nocover
+ import pickle
+
+try:
+ from redis import StrictRedis
+except ImportError: # pragma: nocover
+ raise ImportError('RedisJobStore requires redis installed')
+
+try:
+ long = long
+except NameError:
+ long = int
+
+logger = logging.getLogger(__name__)
+
+
+class RedisJobStore(JobStore):
+ def __init__(self, db=0, key_prefix='jobs.',
+ pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
+ self.jobs = []
+ self.pickle_protocol = pickle_protocol
+ self.key_prefix = key_prefix
+
+ if db is None:
+ raise ValueError('The "db" parameter must not be empty')
+ if not key_prefix:
+ raise ValueError('The "key_prefix" parameter must not be empty')
+
+ self.redis = StrictRedis(db=db, **connect_args)
+
+ def add_job(self, job):
+ job.id = str(uuid4())
+ job_state = job.__getstate__()
+ job_dict = {
+ 'job_state': pickle.dumps(job_state, self.pickle_protocol),
+ 'runs': '0',
+ 'next_run_time': job_state.pop('next_run_time').isoformat()}
+ self.redis.hmset(self.key_prefix + job.id, job_dict)
+ self.jobs.append(job)
+
+ def remove_job(self, job):
+ self.redis.delete(self.key_prefix + job.id)
+ self.jobs.remove(job)
+
+ def load_jobs(self):
+ jobs = []
+ keys = self.redis.keys(self.key_prefix + '*')
+ pipeline = self.redis.pipeline()
+ for key in keys:
+ pipeline.hgetall(key)
+ results = pipeline.execute()
+
+ for job_dict in results:
+ job_state = {}
+ try:
+ job = Job.__new__(Job)
+ job_state = pickle.loads(job_dict['job_state'.encode()])
+ job_state['runs'] = long(job_dict['runs'.encode()])
+ dateval = job_dict['next_run_time'.encode()].decode()
+ job_state['next_run_time'] = datetime.strptime(
+ dateval, '%Y-%m-%dT%H:%M:%S')
+ job.__setstate__(job_state)
+ jobs.append(job)
+ except Exception:
+ job_name = job_state.get('name', '(unknown)')
+ logger.exception('Unable to restore job "%s"', job_name)
+ self.jobs = jobs
+
+ def update_job(self, job):
+ attrs = {
+ 'next_run_time': job.next_run_time.isoformat(),
+ 'runs': job.runs}
+ self.redis.hmset(self.key_prefix + job.id, attrs)
+
+ def close(self):
+ self.redis.connection_pool.disconnect()
+
+ def __repr__(self):
+ return '<%s>' % self.__class__.__name__
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py
new file mode 100644
index 0000000..d1be58f
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/shelve_store.py
@@ -0,0 +1,74 @@
+"""
+Stores jobs in a file governed by the :mod:`shelve` module.
+"""
+
+import shelve
+import pickle
+import random
+import logging
+
+from apscheduler.jobstores.base import JobStore
+from apscheduler.job import Job
+from apscheduler.util import itervalues
+
+logger = logging.getLogger(__name__)
+
+
+class ShelveJobStore(JobStore):
+ MAX_ID = 1000000
+
+ def __init__(self, path, pickle_protocol=pickle.HIGHEST_PROTOCOL):
+ self.jobs = []
+ self.path = path
+ self.pickle_protocol = pickle_protocol
+ self._open_store()
+
+ def _open_store(self):
+ self.store = shelve.open(self.path, 'c', self.pickle_protocol)
+
+ def _generate_id(self):
+ id = None
+ while not id:
+ id = str(random.randint(1, self.MAX_ID))
+ if not id in self.store:
+ return id
+
+ def add_job(self, job):
+ job.id = self._generate_id()
+ self.store[job.id] = job.__getstate__()
+ self.store.close()
+ self._open_store()
+ self.jobs.append(job)
+
+ def update_job(self, job):
+ job_dict = self.store[job.id]
+ job_dict['next_run_time'] = job.next_run_time
+ job_dict['runs'] = job.runs
+ self.store[job.id] = job_dict
+ self.store.close()
+ self._open_store()
+
+ def remove_job(self, job):
+ del self.store[job.id]
+ self.store.close()
+ self._open_store()
+ self.jobs.remove(job)
+
+ def load_jobs(self):
+ jobs = []
+ for job_dict in itervalues(self.store):
+ try:
+ job = Job.__new__(Job)
+ job.__setstate__(job_dict)
+ jobs.append(job)
+ except Exception:
+ job_name = job_dict.get('name', '(unknown)')
+ logger.exception('Unable to restore job "%s"', job_name)
+
+ self.jobs = jobs
+
+ def close(self):
+ self.store.close()
+
+ def __repr__(self):
+ return '<%s (path=%s)>' % (self.__class__.__name__, self.path)
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py
new file mode 100644
index 0000000..5b64a35
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/jobstores/sqlalchemy_store.py
@@ -0,0 +1,91 @@
+"""
+Stores jobs in a database table using SQLAlchemy.
+"""
+import pickle
+import logging
+
+import sqlalchemy
+
+from apscheduler.jobstores.base import JobStore
+from apscheduler.job import Job
+
+try:
+ from sqlalchemy import *
+except ImportError: # pragma: nocover
+ raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed')
+
+logger = logging.getLogger(__name__)
+
+
+class SQLAlchemyJobStore(JobStore):
+ def __init__(self, url=None, engine=None, tablename='apscheduler_jobs',
+ metadata=None, pickle_protocol=pickle.HIGHEST_PROTOCOL):
+ self.jobs = []
+ self.pickle_protocol = pickle_protocol
+
+ if engine:
+ self.engine = engine
+ elif url:
+ self.engine = create_engine(url)
+ else:
+ raise ValueError('Need either "engine" or "url" defined')
+
+ if sqlalchemy.__version__ < '0.7':
+ pickle_coltype = PickleType(pickle_protocol, mutable=False)
+ else:
+ pickle_coltype = PickleType(pickle_protocol)
+ self.jobs_t = Table(
+ tablename, metadata or MetaData(),
+ Column('id', Integer,
+ Sequence(tablename + '_id_seq', optional=True),
+ primary_key=True),
+ Column('trigger', pickle_coltype, nullable=False),
+ Column('func_ref', String(1024), nullable=False),
+ Column('args', pickle_coltype, nullable=False),
+ Column('kwargs', pickle_coltype, nullable=False),
+ Column('name', Unicode(1024)),
+ Column('misfire_grace_time', Integer, nullable=False),
+ Column('coalesce', Boolean, nullable=False),
+ Column('max_runs', Integer),
+ Column('max_instances', Integer),
+ Column('next_run_time', DateTime, nullable=False),
+ Column('runs', BigInteger))
+
+ self.jobs_t.create(self.engine, True)
+
+ def add_job(self, job):
+ job_dict = job.__getstate__()
+ result = self.engine.execute(self.jobs_t.insert().values(**job_dict))
+ job.id = result.inserted_primary_key[0]
+ self.jobs.append(job)
+
+ def remove_job(self, job):
+ delete = self.jobs_t.delete().where(self.jobs_t.c.id == job.id)
+ self.engine.execute(delete)
+ self.jobs.remove(job)
+
+ def load_jobs(self):
+ jobs = []
+ for row in self.engine.execute(select([self.jobs_t])):
+ try:
+ job = Job.__new__(Job)
+ job_dict = dict(row.items())
+ job.__setstate__(job_dict)
+ jobs.append(job)
+ except Exception:
+ job_name = job_dict.get('name', '(unknown)')
+ logger.exception('Unable to restore job "%s"', job_name)
+ self.jobs = jobs
+
+ def update_job(self, job):
+ job_dict = job.__getstate__()
+ update = self.jobs_t.update().where(self.jobs_t.c.id == job.id).\
+ values(next_run_time=job_dict['next_run_time'],
+ runs=job_dict['runs'])
+ self.engine.execute(update)
+
+ def close(self):
+ self.engine.dispose()
+
+ def __repr__(self):
+ return '<%s (url=%s)>' % (self.__class__.__name__, self.engine.url)
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
new file mode 100644
index 0000000..319037a
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/scheduler.py
@@ -0,0 +1,607 @@
+"""
+This module is the main part of the library. It houses the Scheduler class
+and related exceptions.
+"""
+
+from threading import Thread, Event, Lock
+from datetime import datetime, timedelta
+from logging import getLogger
+import os
+import sys
+
+from apscheduler.util import *
+from apscheduler.triggers import SimpleTrigger, IntervalTrigger, CronTrigger
+from apscheduler.jobstores.ram_store import RAMJobStore
+from apscheduler.job import Job, MaxInstancesReachedError
+from apscheduler.events import *
+from apscheduler.threadpool import ThreadPool
+
+logger = getLogger(__name__)
+
+
+class SchedulerAlreadyRunningError(Exception):
+ """
+ Raised when attempting to start or configure the scheduler when it's
+ already running.
+ """
+
+ def __str__(self):
+ return 'Scheduler is already running'
+
+
+class Scheduler(object):
+ """
+ This class is responsible for scheduling jobs and triggering
+ their execution.
+ """
+
+ _stopped = True
+ _thread = None
+
+ def __init__(self, gconfig={}, **options):
+ self._wakeup = Event()
+ self._jobstores = {}
+ self._jobstores_lock = Lock()
+ self._listeners = []
+ self._listeners_lock = Lock()
+ self._pending_jobs = []
+ self.configure(gconfig, **options)
+
+ def configure(self, gconfig={}, **options):
+ """
+ Reconfigures the scheduler with the given options. Can only be done
+ when the scheduler isn't running.
+ """
+ if self.running:
+ raise SchedulerAlreadyRunningError
+
+ # Set general options
+ config = combine_opts(gconfig, 'apscheduler.', options)
+ self.misfire_grace_time = int(config.pop('misfire_grace_time', 1))
+ self.coalesce = asbool(config.pop('coalesce', True))
+ self.daemonic = asbool(config.pop('daemonic', True))
+ self.standalone = asbool(config.pop('standalone', False))
+
+ # Configure the thread pool
+ if 'threadpool' in config:
+ self._threadpool = maybe_ref(config['threadpool'])
+ else:
+ threadpool_opts = combine_opts(config, 'threadpool.')
+ self._threadpool = ThreadPool(**threadpool_opts)
+
+ # Configure job stores
+ jobstore_opts = combine_opts(config, 'jobstore.')
+ jobstores = {}
+ for key, value in jobstore_opts.items():
+ store_name, option = key.split('.', 1)
+ opts_dict = jobstores.setdefault(store_name, {})
+ opts_dict[option] = value
+
+ for alias, opts in jobstores.items():
+ classname = opts.pop('class')
+ cls = maybe_ref(classname)
+ jobstore = cls(**opts)
+ self.add_jobstore(jobstore, alias, True)
+
+ def start(self):
+ """
+ Starts the scheduler in a new thread.
+
+ In threaded mode (the default), this method will return immediately
+ after starting the scheduler thread.
+
+ In standalone mode, this method will block until there are no more
+ scheduled jobs.
+ """
+ if self.running:
+ raise SchedulerAlreadyRunningError
+
+ # Create a RAMJobStore as the default if there is no default job store
+ if not 'default' in self._jobstores:
+ self.add_jobstore(RAMJobStore(), 'default', True)
+
+ # Schedule all pending jobs
+ for job, jobstore in self._pending_jobs:
+ self._real_add_job(job, jobstore, False)
+ del self._pending_jobs[:]
+
+ self._stopped = False
+ if self.standalone:
+ self._main_loop()
+ else:
+ self._thread = Thread(target=self._main_loop, name='APScheduler')
+ self._thread.setDaemon(self.daemonic)
+ self._thread.start()
+
+ def shutdown(self, wait=True, shutdown_threadpool=True,
+ close_jobstores=True):
+ """
+ Shuts down the scheduler and terminates the thread.
+ Does not interrupt any currently running jobs.
+
+ :param wait: ``True`` to wait until all currently executing jobs have
+ finished (if ``shutdown_threadpool`` is also ``True``)
+ :param shutdown_threadpool: ``True`` to shut down the thread pool
+ :param close_jobstores: ``True`` to close all job stores after shutdown
+ """
+ if not self.running:
+ return
+
+ self._stopped = True
+ self._wakeup.set()
+
+ # Shut down the thread pool
+ if shutdown_threadpool:
+ self._threadpool.shutdown(wait)
+
+ # Wait until the scheduler thread terminates
+ if self._thread:
+ self._thread.join()
+
+ # Close all job stores
+ if close_jobstores:
+ for jobstore in itervalues(self._jobstores):
+ jobstore.close()
+
+ @property
+ def running(self):
+ thread_alive = self._thread and self._thread.isAlive()
+ standalone = getattr(self, 'standalone', False)
+ return not self._stopped and (standalone or thread_alive)
+
+ def add_jobstore(self, jobstore, alias, quiet=False):
+ """
+ Adds a job store to this scheduler.
+
+ :param jobstore: job store to be added
+ :param alias: alias for the job store
+ :param quiet: True to suppress scheduler thread wakeup
+ :type jobstore: instance of
+ :class:`~apscheduler.jobstores.base.JobStore`
+ :type alias: str
+ """
+ self._jobstores_lock.acquire()
+ try:
+ if alias in self._jobstores:
+ raise KeyError('Alias "%s" is already in use' % alias)
+ self._jobstores[alias] = jobstore
+ jobstore.load_jobs()
+ finally:
+ self._jobstores_lock.release()
+
+ # Notify listeners that a new job store has been added
+ self._notify_listeners(JobStoreEvent(EVENT_JOBSTORE_ADDED, alias))
+
+ # Notify the scheduler so it can scan the new job store for jobs
+ if not quiet:
+ self._wakeup.set()
+
+ def remove_jobstore(self, alias, close=True):
+ """
+ Removes the job store by the given alias from this scheduler.
+
+ :param close: ``True`` to close the job store after removing it
+ :type alias: str
+ """
+ self._jobstores_lock.acquire()
+ try:
+ jobstore = self._jobstores.pop(alias)
+ if not jobstore:
+ raise KeyError('No such job store: %s' % alias)
+ finally:
+ self._jobstores_lock.release()
+
+ # Close the job store if requested
+ if close:
+ jobstore.close()
+
+ # Notify listeners that a job store has been removed
+ self._notify_listeners(JobStoreEvent(EVENT_JOBSTORE_REMOVED, alias))
+
+ def add_listener(self, callback, mask=EVENT_ALL):
+ """
+ Adds a listener for scheduler events. When a matching event occurs,
+ ``callback`` is executed with the event object as its sole argument.
+ If the ``mask`` parameter is not provided, the callback will receive
+ events of all types.
+
+ :param callback: any callable that takes one argument
+ :param mask: bitmask that indicates which events should be listened to
+ """
+ self._listeners_lock.acquire()
+ try:
+ self._listeners.append((callback, mask))
+ finally:
+ self._listeners_lock.release()
+
+ def remove_listener(self, callback):
+ """
+ Removes a previously added event listener.
+ """
+ self._listeners_lock.acquire()
+ try:
+ for i, (cb, _) in enumerate(self._listeners):
+ if callback == cb:
+ del self._listeners[i]
+ finally:
+ self._listeners_lock.release()
+
+ def _notify_listeners(self, event):
+ self._listeners_lock.acquire()
+ try:
+ listeners = tuple(self._listeners)
+ finally:
+ self._listeners_lock.release()
+
+ for cb, mask in listeners:
+ if event.code & mask:
+ try:
+ cb(event)
+ except:
+ logger.exception('Error notifying listener')
+
+ def _real_add_job(self, job, jobstore, wakeup):
+ job.compute_next_run_time(datetime.now())
+ if not job.next_run_time:
+ raise ValueError('Not adding job since it would never be run')
+
+ self._jobstores_lock.acquire()
+ try:
+ try:
+ store = self._jobstores[jobstore]
+ except KeyError:
+ raise KeyError('No such job store: %s' % jobstore)
+ store.add_job(job)
+ finally:
+ self._jobstores_lock.release()
+
+ # Notify listeners that a new job has been added
+ event = JobStoreEvent(EVENT_JOBSTORE_JOB_ADDED, jobstore, job)
+ self._notify_listeners(event)
+
+ logger.info('Added job "%s" to job store "%s"', job, jobstore)
+
+ # Notify the scheduler about the new job
+ if wakeup:
+ self._wakeup.set()
+
+ def add_job(self, trigger, func, args, kwargs, jobstore='default',
+ **options):
+ """
+ Adds the given job to the job list and notifies the scheduler thread.
+ Any extra keyword arguments are passed along to the constructor of the
+ :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+
+ :param trigger: trigger that determines when ``func`` is called
+ :param func: callable to run at the given time
+ :param args: list of positional arguments to call func with
+ :param kwargs: dict of keyword arguments to call func with
+ :param jobstore: alias of the job store to store the job in
+ :rtype: :class:`~apscheduler.job.Job`
+ """
+ job = Job(trigger, func, args or [], kwargs or {},
+ options.pop('misfire_grace_time', self.misfire_grace_time),
+ options.pop('coalesce', self.coalesce), **options)
+ if not self.running:
+ self._pending_jobs.append((job, jobstore))
+ logger.info('Adding job tentatively -- it will be properly '
+ 'scheduled when the scheduler starts')
+ else:
+ self._real_add_job(job, jobstore, True)
+ return job
+
+ def _remove_job(self, job, alias, jobstore):
+ jobstore.remove_job(job)
+
+ # Notify listeners that a job has been removed
+ event = JobStoreEvent(EVENT_JOBSTORE_JOB_REMOVED, alias, job)
+ self._notify_listeners(event)
+
+ logger.info('Removed job "%s"', job)
+
+ def add_date_job(self, func, date, args=None, kwargs=None, **options):
+ """
+ Schedules a job to be completed on a specific date and time.
+ Any extra keyword arguments are passed along to the constructor of the
+ :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+
+ :param func: callable to run at the given time
+ :param date: the date/time to run the job at
+ :param name: name of the job
+ :param jobstore: stored the job in the named (or given) job store
+ :param misfire_grace_time: seconds after the designated run time that
+ the job is still allowed to be run
+ :type date: :class:`datetime.date`
+ :rtype: :class:`~apscheduler.job.Job`
+ """
+ trigger = SimpleTrigger(date)
+ return self.add_job(trigger, func, args, kwargs, **options)
+
+ def add_interval_job(self, func, weeks=0, days=0, hours=0, minutes=0,
+ seconds=0, start_date=None, args=None, kwargs=None,
+ **options):
+ """
+ Schedules a job to be completed on specified intervals.
+ Any extra keyword arguments are passed along to the constructor of the
+ :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+
+ :param func: callable to run
+ :param weeks: number of weeks to wait
+ :param days: number of days to wait
+ :param hours: number of hours to wait
+ :param minutes: number of minutes to wait
+ :param seconds: number of seconds to wait
+ :param start_date: when to first execute the job and start the
+ counter (default is after the given interval)
+ :param args: list of positional arguments to call func with
+ :param kwargs: dict of keyword arguments to call func with
+ :param name: name of the job
+ :param jobstore: alias of the job store to add the job to
+ :param misfire_grace_time: seconds after the designated run time that
+ the job is still allowed to be run
+ :rtype: :class:`~apscheduler.job.Job`
+ """
+ interval = timedelta(weeks=weeks, days=days, hours=hours,
+ minutes=minutes, seconds=seconds)
+ trigger = IntervalTrigger(interval, start_date)
+ return self.add_job(trigger, func, args, kwargs, **options)
+
+ def add_cron_job(self, func, year=None, month=None, day=None, week=None,
+ day_of_week=None, hour=None, minute=None, second=None,
+ start_date=None, args=None, kwargs=None, **options):
+ """
+ Schedules a job to be completed on times that match the given
+ expressions.
+ Any extra keyword arguments are passed along to the constructor of the
+ :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+
+ :param func: callable to run
+ :param year: year to run on
+ :param month: month to run on
+ :param day: day of month to run on
+ :param week: week of the year to run on
+ :param day_of_week: weekday to run on (0 = Monday)
+ :param hour: hour to run on
+ :param second: second to run on
+ :param args: list of positional arguments to call func with
+ :param kwargs: dict of keyword arguments to call func with
+ :param name: name of the job
+ :param jobstore: alias of the job store to add the job to
+ :param misfire_grace_time: seconds after the designated run time that
+ the job is still allowed to be run
+ :return: the scheduled job
+ :rtype: :class:`~apscheduler.job.Job`
+ """
+ trigger = CronTrigger(year=year, month=month, day=day, week=week,
+ day_of_week=day_of_week, hour=hour,
+ minute=minute, second=second,
+ start_date=start_date)
+ return self.add_job(trigger, func, args, kwargs, **options)
+
+ def cron_schedule(self, **options):
+ """
+ Decorator version of :meth:`add_cron_job`.
+ This decorator does not wrap its host function.
+ Unscheduling decorated functions is possible by passing the ``job``
+ attribute of the scheduled function to :meth:`unschedule_job`.
+ Any extra keyword arguments are passed along to the constructor of the
+ :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+ """
+ def inner(func):
+ func.job = self.add_cron_job(func, **options)
+ return func
+ return inner
+
+ def interval_schedule(self, **options):
+ """
+ Decorator version of :meth:`add_interval_job`.
+ This decorator does not wrap its host function.
+ Unscheduling decorated functions is possible by passing the ``job``
+ attribute of the scheduled function to :meth:`unschedule_job`.
+ Any extra keyword arguments are passed along to the constructor of the
+ :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+ """
+ def inner(func):
+ func.job = self.add_interval_job(func, **options)
+ return func
+ return inner
+
+ def get_jobs(self):
+ """
+ Returns a list of all scheduled jobs.
+
+ :return: list of :class:`~apscheduler.job.Job` objects
+ """
+ self._jobstores_lock.acquire()
+ try:
+ jobs = []
+ for jobstore in itervalues(self._jobstores):
+ jobs.extend(jobstore.jobs)
+ return jobs
+ finally:
+ self._jobstores_lock.release()
+
+ def unschedule_job(self, job):
+ """
+ Removes a job, preventing it from being run any more.
+ """
+ self._jobstores_lock.acquire()
+ try:
+ for alias, jobstore in iteritems(self._jobstores):
+ if job in list(jobstore.jobs):
+ self._remove_job(job, alias, jobstore)
+ return
+ finally:
+ self._jobstores_lock.release()
+
+ raise KeyError('Job "%s" is not scheduled in any job store' % job)
+
+ def unschedule_func(self, func):
+ """
+ Removes all jobs that would execute the given function.
+ """
+ found = False
+ self._jobstores_lock.acquire()
+ try:
+ for alias, jobstore in iteritems(self._jobstores):
+ for job in list(jobstore.jobs):
+ if job.func == func:
+ self._remove_job(job, alias, jobstore)
+ found = True
+ finally:
+ self._jobstores_lock.release()
+
+ if not found:
+ raise KeyError('The given function is not scheduled in this '
+ 'scheduler')
+
+ def print_jobs(self, out=None):
+ """
+ Prints out a textual listing of all jobs currently scheduled on this
+ scheduler.
+
+ :param out: a file-like object to print to (defaults to **sys.stdout**
+ if nothing is given)
+ """
+ out = out or sys.stdout
+ job_strs = []
+ self._jobstores_lock.acquire()
+ try:
+ for alias, jobstore in iteritems(self._jobstores):
+ job_strs.append('Jobstore %s:' % alias)
+ if jobstore.jobs:
+ for job in jobstore.jobs:
+ job_strs.append(' %s' % job)
+ else:
+ job_strs.append(' No scheduled jobs')
+ finally:
+ self._jobstores_lock.release()
+
+ out.write(os.linesep.join(job_strs) + os.linesep)
+
+ def _run_job(self, job, run_times):
+ """
+ Acts as a harness that runs the actual job code in a thread.
+ """
+ for run_time in run_times:
+ # See if the job missed its run time window, and handle possible
+ # misfires accordingly
+ difference = datetime.now() - run_time
+ grace_time = timedelta(seconds=job.misfire_grace_time)
+ if difference > grace_time:
+ # Notify listeners about a missed run
+ event = JobEvent(EVENT_JOB_MISSED, job, run_time)
+ self._notify_listeners(event)
+ logger.warning('Run time of job "%s" was missed by %s',
+ job, difference)
+ else:
+ try:
+ job.add_instance()
+ except MaxInstancesReachedError:
+ event = JobEvent(EVENT_JOB_MISSED, job, run_time)
+ self._notify_listeners(event)
+ logger.warning('Execution of job "%s" skipped: '
+ 'maximum number of running instances '
+ 'reached (%d)', job, job.max_instances)
+ break
+
+ logger.info('Running job "%s" (scheduled at %s)', job,
+ run_time)
+
+ try:
+ retval = job.func(*job.args, **job.kwargs)
+ except:
+ # Notify listeners about the exception
+ exc, tb = sys.exc_info()[1:]
+ event = JobEvent(EVENT_JOB_ERROR, job, run_time,
+ exception=exc, traceback=tb)
+ self._notify_listeners(event)
+
+ logger.exception('Job "%s" raised an exception', job)
+ else:
+ # Notify listeners about successful execution
+ event = JobEvent(EVENT_JOB_EXECUTED, job, run_time,
+ retval=retval)
+ self._notify_listeners(event)
+
+ logger.info('Job "%s" executed successfully', job)
+
+ job.remove_instance()
+
+ # If coalescing is enabled, don't attempt any further runs
+ if job.coalesce:
+ break
+
+ def _process_jobs(self, now):
+ """
+ Iterates through jobs in every jobstore, starts pending jobs
+ and figures out the next wakeup time.
+ """
+ next_wakeup_time = None
+ self._jobstores_lock.acquire()
+ try:
+ for alias, jobstore in iteritems(self._jobstores):
+ for job in tuple(jobstore.jobs):
+ run_times = job.get_run_times(now)
+ if run_times:
+ self._threadpool.submit(self._run_job, job, run_times)
+
+ # Increase the job's run count
+ if job.coalesce:
+ job.runs += 1
+ else:
+ job.runs += len(run_times)
+
+ # Update the job, but don't keep finished jobs around
+ if job.compute_next_run_time(
+ now + timedelta(microseconds=1)):
+ jobstore.update_job(job)
+ else:
+ self._remove_job(job, alias, jobstore)
+
+ if not next_wakeup_time:
+ next_wakeup_time = job.next_run_time
+ elif job.next_run_time:
+ next_wakeup_time = min(next_wakeup_time,
+ job.next_run_time)
+ return next_wakeup_time
+ finally:
+ self._jobstores_lock.release()
+
+ def _main_loop(self):
+ """Executes jobs on schedule."""
+
+ logger.info('Scheduler started')
+ self._notify_listeners(SchedulerEvent(EVENT_SCHEDULER_START))
+
+ self._wakeup.clear()
+ while not self._stopped:
+ logger.debug('Looking for jobs to run')
+ now = datetime.now()
+ next_wakeup_time = self._process_jobs(now)
+
+ # Sleep until the next job is scheduled to be run,
+ # a new job is added or the scheduler is stopped
+ if next_wakeup_time is not None:
+ wait_seconds = time_difference(next_wakeup_time, now)
+ logger.debug('Next wakeup is due at %s (in %f seconds)',
+ next_wakeup_time, wait_seconds)
+ try:
+ self._wakeup.wait(wait_seconds)
+ except IOError: # Catch errno 514 on some Linux kernels
+ pass
+ self._wakeup.clear()
+ elif self.standalone:
+ logger.debug('No jobs left; shutting down scheduler')
+ self.shutdown()
+ break
+ else:
+ logger.debug('No jobs; waiting until a job is added')
+ try:
+ self._wakeup.wait()
+ except IOError: # Catch errno 514 on some Linux kernels
+ pass
+ self._wakeup.clear()
+
+ logger.info('Scheduler has been shut down')
+ self._notify_listeners(SchedulerEvent(EVENT_SCHEDULER_SHUTDOWN))
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
new file mode 100644
index 0000000..8ec47da
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/threadpool.py
@@ -0,0 +1,133 @@
+"""
+Generic thread pool class. Modeled after Java's ThreadPoolExecutor.
+Please note that this ThreadPool does *not* fully implement the PEP 3148
+ThreadPool!
+"""
+
+from threading import Thread, Lock, currentThread
+from weakref import ref
+import logging
+import atexit
+
+try:
+ from queue import Queue, Empty
+except ImportError:
+ from Queue import Queue, Empty
+
+logger = logging.getLogger(__name__)
+_threadpools = set()
+
+
+# Worker threads are daemonic in order to let the interpreter exit without
+# an explicit shutdown of the thread pool. The following trick is necessary
+# to allow worker threads to finish cleanly.
+def _shutdown_all():
+ for pool_ref in tuple(_threadpools):
+ pool = pool_ref()
+ if pool:
+ pool.shutdown()
+
+atexit.register(_shutdown_all)
+
+
+class ThreadPool(object):
+ def __init__(self, core_threads=0, max_threads=20, keepalive=1):
+ """
+ :param core_threads: maximum number of persistent threads in the pool
+ :param max_threads: maximum number of total threads in the pool
+ :param thread_class: callable that creates a Thread object
+ :param keepalive: seconds to keep non-core worker threads waiting
+ for new tasks
+ """
+ self.core_threads = core_threads
+ self.max_threads = max(max_threads, core_threads, 1)
+ self.keepalive = keepalive
+ self._queue = Queue()
+ self._threads_lock = Lock()
+ self._threads = set()
+ self._shutdown = False
+
+ _threadpools.add(ref(self))
+ logger.info('Started thread pool with %d core threads and %s maximum '
+ 'threads', core_threads, max_threads or 'unlimited')
+
+ def _adjust_threadcount(self):
+ self._threads_lock.acquire()
+ try:
+ if self.num_threads < self.max_threads:
+ self._add_thread(self.num_threads < self.core_threads)
+ finally:
+ self._threads_lock.release()
+
+ def _add_thread(self, core):
+ t = Thread(target=self._run_jobs, args=(core,))
+ t.setDaemon(True)
+ t.start()
+ self._threads.add(t)
+
+ def _run_jobs(self, core):
+ logger.debug('Started worker thread')
+ block = True
+ timeout = None
+ if not core:
+ block = self.keepalive > 0
+ timeout = self.keepalive
+
+ while True:
+ try:
+ func, args, kwargs = self._queue.get(block, timeout)
+ except Empty:
+ break
+
+ if self._shutdown:
+ break
+
+ try:
+ func(*args, **kwargs)
+ except:
+ logger.exception('Error in worker thread')
+
+ self._threads_lock.acquire()
+ self._threads.remove(currentThread())
+ self._threads_lock.release()
+
+ logger.debug('Exiting worker thread')
+
+ @property
+ def num_threads(self):
+ return len(self._threads)
+
+ def submit(self, func, *args, **kwargs):
+ if self._shutdown:
+ raise RuntimeError('Cannot schedule new tasks after shutdown')
+
+ self._queue.put((func, args, kwargs))
+ self._adjust_threadcount()
+
+ def shutdown(self, wait=True):
+ if self._shutdown:
+ return
+
+ logging.info('Shutting down thread pool')
+ self._shutdown = True
+ _threadpools.remove(ref(self))
+
+ self._threads_lock.acquire()
+ for _ in range(self.num_threads):
+ self._queue.put((None, None, None))
+ self._threads_lock.release()
+
+ if wait:
+ self._threads_lock.acquire()
+ threads = tuple(self._threads)
+ self._threads_lock.release()
+ for thread in threads:
+ thread.join()
+
+ def __repr__(self):
+ if self.max_threads:
+ threadcount = '%d/%d' % (self.num_threads, self.max_threads)
+ else:
+ threadcount = '%d' % self.num_threads
+
+ return '<ThreadPool at %x; threads=%s>' % (id(self), threadcount)
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/__init__.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/__init__.py
new file mode 100644
index 0000000..74a9788
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/__init__.py
@@ -0,0 +1,3 @@
+from apscheduler.triggers.cron import CronTrigger
+from apscheduler.triggers.interval import IntervalTrigger
+from apscheduler.triggers.simple import SimpleTrigger
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py
new file mode 100644
index 0000000..9e69f72
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/__init__.py
@@ -0,0 +1,144 @@
+from datetime import date, datetime
+
+from apscheduler.triggers.cron.fields import *
+from apscheduler.util import datetime_ceil, convert_to_datetime, iteritems
+
+
+class CronTrigger(object):
+ FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour',
+ 'minute', 'second')
+ FIELDS_MAP = {'year': BaseField,
+ 'month': BaseField,
+ 'week': WeekField,
+ 'day': DayOfMonthField,
+ 'day_of_week': DayOfWeekField,
+ 'hour': BaseField,
+ 'minute': BaseField,
+ 'second': BaseField}
+
+ def __init__(self, **values):
+ self.start_date = values.pop('start_date', None)
+ if self.start_date:
+ self.start_date = convert_to_datetime(self.start_date)
+
+ # Check field names and yank out all None valued fields
+ for key, value in list(iteritems(values)):
+ if key not in self.FIELD_NAMES:
+ raise TypeError('Invalid field name: %s' % key)
+ if value is None:
+ del values[key]
+
+ self.fields = []
+ assign_defaults = False
+ for field_name in self.FIELD_NAMES:
+ if field_name in values:
+ exprs = values.pop(field_name)
+ is_default = False
+ assign_defaults = not values
+ elif assign_defaults:
+ exprs = DEFAULT_VALUES[field_name]
+ is_default = True
+ else:
+ exprs = '*'
+ is_default = True
+
+ field_class = self.FIELDS_MAP[field_name]
+ field = field_class(field_name, exprs, is_default)
+ self.fields.append(field)
+
+ def _increment_field_value(self, dateval, fieldnum):
+ """
+ Increments the designated field and resets all less significant fields
+ to their minimum values.
+
+ :type dateval: datetime
+ :type fieldnum: int
+ :type amount: int
+ :rtype: tuple
+ :return: a tuple containing the new date, and the number of the field
+ that was actually incremented
+ """
+ i = 0
+ values = {}
+ while i < len(self.fields):
+ field = self.fields[i]
+ if not field.REAL:
+ if i == fieldnum:
+ fieldnum -= 1
+ i -= 1
+ else:
+ i += 1
+ continue
+
+ if i < fieldnum:
+ values[field.name] = field.get_value(dateval)
+ i += 1
+ elif i > fieldnum:
+ values[field.name] = field.get_min(dateval)
+ i += 1
+ else:
+ value = field.get_value(dateval)
+ maxval = field.get_max(dateval)
+ if value == maxval:
+ fieldnum -= 1
+ i -= 1
+ else:
+ values[field.name] = value + 1
+ i += 1
+
+ return datetime(**values), fieldnum
+
+ def _set_field_value(self, dateval, fieldnum, new_value):
+ values = {}
+ for i, field in enumerate(self.fields):
+ if field.REAL:
+ if i < fieldnum:
+ values[field.name] = field.get_value(dateval)
+ elif i > fieldnum:
+ values[field.name] = field.get_min(dateval)
+ else:
+ values[field.name] = new_value
+
+ return datetime(**values)
+
+ def get_next_fire_time(self, start_date):
+ if self.start_date:
+ start_date = max(start_date, self.start_date)
+ next_date = datetime_ceil(start_date)
+ fieldnum = 0
+ while 0 <= fieldnum < len(self.fields):
+ field = self.fields[fieldnum]
+ curr_value = field.get_value(next_date)
+ next_value = field.get_next_value(next_date)
+
+ if next_value is None:
+ # No valid value was found
+ next_date, fieldnum = self._increment_field_value(
+ next_date, fieldnum - 1)
+ elif next_value > curr_value:
+ # A valid, but higher than the starting value, was found
+ if field.REAL:
+ next_date = self._set_field_value(
+ next_date, fieldnum, next_value)
+ fieldnum += 1
+ else:
+ next_date, fieldnum = self._increment_field_value(
+ next_date, fieldnum)
+ else:
+ # A valid value was found, no changes necessary
+ fieldnum += 1
+
+ if fieldnum >= 0:
+ return next_date
+
+ def __str__(self):
+ options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
+ if not f.is_default]
+ return 'cron[%s]' % (', '.join(options))
+
+ def __repr__(self):
+ options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
+ if not f.is_default]
+ if self.start_date:
+ options.append("start_date='%s'" % self.start_date.isoformat(' '))
+ return '<%s (%s)>' % (self.__class__.__name__, ', '.join(options))
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py
new file mode 100644
index 0000000..b5d2919
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/expressions.py
@@ -0,0 +1,194 @@
+"""
+This module contains the expressions applicable for CronTrigger's fields.
+"""
+
+from calendar import monthrange
+import re
+
+from apscheduler.util import asint
+
+__all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression',
+ 'WeekdayPositionExpression', 'LastDayOfMonthExpression')
+
+
+WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
+
+
+class AllExpression(object):
+ value_re = re.compile(r'\*(?:/(?P<step>\d+))?$')
+
+ def __init__(self, step=None):
+ self.step = asint(step)
+ if self.step == 0:
+ raise ValueError('Increment must be higher than 0')
+
+ def get_next_value(self, date, field):
+ start = field.get_value(date)
+ minval = field.get_min(date)
+ maxval = field.get_max(date)
+ start = max(start, minval)
+
+ if not self.step:
+ next = start
+ else:
+ distance_to_next = (self.step - (start - minval)) % self.step
+ next = start + distance_to_next
+
+ if next <= maxval:
+ return next
+
+ def __str__(self):
+ if self.step:
+ return '*/%d' % self.step
+ return '*'
+
+ def __repr__(self):
+ return "%s(%s)" % (self.__class__.__name__, self.step)
+
+
+class RangeExpression(AllExpression):
+ value_re = re.compile(
+ r'(?P<first>\d+)(?:-(?P<last>\d+))?(?:/(?P<step>\d+))?$')
+
+ def __init__(self, first, last=None, step=None):
+ AllExpression.__init__(self, step)
+ first = asint(first)
+ last = asint(last)
+ if last is None and step is None:
+ last = first
+ if last is not None and first > last:
+ raise ValueError('The minimum value in a range must not be '
+ 'higher than the maximum')
+ self.first = first
+ self.last = last
+
+ def get_next_value(self, date, field):
+ start = field.get_value(date)
+ minval = field.get_min(date)
+ maxval = field.get_max(date)
+
+ # Apply range limits
+ minval = max(minval, self.first)
+ if self.last is not None:
+ maxval = min(maxval, self.last)
+ start = max(start, minval)
+
+ if not self.step:
+ next = start
+ else:
+ distance_to_next = (self.step - (start - minval)) % self.step
+ next = start + distance_to_next
+
+ if next <= maxval:
+ return next
+
+ def __str__(self):
+ if self.last != self.first and self.last is not None:
+ range = '%d-%d' % (self.first, self.last)
+ else:
+ range = str(self.first)
+
+ if self.step:
+ return '%s/%d' % (range, self.step)
+ return range
+
+ def __repr__(self):
+ args = [str(self.first)]
+ if self.last != self.first and self.last is not None or self.step:
+ args.append(str(self.last))
+ if self.step:
+ args.append(str(self.step))
+ return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
+
+
+class WeekdayRangeExpression(RangeExpression):
+ value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?',
+ re.IGNORECASE)
+
+ def __init__(self, first, last=None):
+ try:
+ first_num = WEEKDAYS.index(first.lower())
+ except ValueError:
+ raise ValueError('Invalid weekday name "%s"' % first)
+
+ if last:
+ try:
+ last_num = WEEKDAYS.index(last.lower())
+ except ValueError:
+ raise ValueError('Invalid weekday name "%s"' % last)
+ else:
+ last_num = None
+
+ RangeExpression.__init__(self, first_num, last_num)
+
+ def __str__(self):
+ if self.last != self.first and self.last is not None:
+ return '%s-%s' % (WEEKDAYS[self.first], WEEKDAYS[self.last])
+ return WEEKDAYS[self.first]
+
+ def __repr__(self):
+ args = ["'%s'" % WEEKDAYS[self.first]]
+ if self.last != self.first and self.last is not None:
+ args.append("'%s'" % WEEKDAYS[self.last])
+ return "%s(%s)" % (self.__class__.__name__, ', '.join(args))
+
+
+class WeekdayPositionExpression(AllExpression):
+ options = ['1st', '2nd', '3rd', '4th', '5th', 'last']
+ value_re = re.compile(r'(?P<option_name>%s) +(?P<weekday_name>(?:\d+|\w+))'
+ % '|'.join(options), re.IGNORECASE)
+
+ def __init__(self, option_name, weekday_name):
+ try:
+ self.option_num = self.options.index(option_name.lower())
+ except ValueError:
+ raise ValueError('Invalid weekday position "%s"' % option_name)
+
+ try:
+ self.weekday = WEEKDAYS.index(weekday_name.lower())
+ except ValueError:
+ raise ValueError('Invalid weekday name "%s"' % weekday_name)
+
+ def get_next_value(self, date, field):
+ # Figure out the weekday of the month's first day and the number
+ # of days in that month
+ first_day_wday, last_day = monthrange(date.year, date.month)
+
+ # Calculate which day of the month is the first of the target weekdays
+ first_hit_day = self.weekday - first_day_wday + 1
+ if first_hit_day <= 0:
+ first_hit_day += 7
+
+ # Calculate what day of the month the target weekday would be
+ if self.option_num < 5:
+ target_day = first_hit_day + self.option_num * 7
+ else:
+ target_day = first_hit_day + ((last_day - first_hit_day) / 7) * 7
+
+ if target_day <= last_day and target_day >= date.day:
+ return target_day
+
+ def __str__(self):
+ return '%s %s' % (self.options[self.option_num],
+ WEEKDAYS[self.weekday])
+
+ def __repr__(self):
+ return "%s('%s', '%s')" % (self.__class__.__name__,
+ self.options[self.option_num],
+ WEEKDAYS[self.weekday])
+
+
+class LastDayOfMonthExpression(AllExpression):
+ value_re = re.compile(r'last', re.IGNORECASE)
+
+ def __init__(self):
+ pass
+
+ def get_next_value(self, date, field):
+ return monthrange(date.year, date.month)[1]
+
+ def __str__(self):
+ return 'last'
+
+ def __repr__(self):
+ return "%s()" % self.__class__.__name__
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py
new file mode 100644
index 0000000..be5e5e3
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/cron/fields.py
@@ -0,0 +1,100 @@
+"""
+Fields represent CronTrigger options which map to :class:`~datetime.datetime`
+fields.
+"""
+
+from calendar import monthrange
+
+from apscheduler.triggers.cron.expressions import *
+
+__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField',
+ 'WeekField', 'DayOfMonthField', 'DayOfWeekField')
+
+
+MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1,
+ 'day_of_week': 0, 'hour': 0, 'minute': 0, 'second': 0}
+MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53,
+ 'day_of_week': 6, 'hour': 23, 'minute': 59, 'second': 59}
+DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*',
+ 'day_of_week': '*', 'hour': 0, 'minute': 0, 'second': 0}
+
+
+class BaseField(object):
+ REAL = True
+ COMPILERS = [AllExpression, RangeExpression]
+
+ def __init__(self, name, exprs, is_default=False):
+ self.name = name
+ self.is_default = is_default
+ self.compile_expressions(exprs)
+
+ def get_min(self, dateval):
+ return MIN_VALUES[self.name]
+
+ def get_max(self, dateval):
+ return MAX_VALUES[self.name]
+
+ def get_value(self, dateval):
+ return getattr(dateval, self.name)
+
+ def get_next_value(self, dateval):
+ smallest = None
+ for expr in self.expressions:
+ value = expr.get_next_value(dateval, self)
+ if smallest is None or (value is not None and value < smallest):
+ smallest = value
+
+ return smallest
+
+ def compile_expressions(self, exprs):
+ self.expressions = []
+
+ # Split a comma-separated expression list, if any
+ exprs = str(exprs).strip()
+ if ',' in exprs:
+ for expr in exprs.split(','):
+ self.compile_expression(expr)
+ else:
+ self.compile_expression(exprs)
+
+ def compile_expression(self, expr):
+ for compiler in self.COMPILERS:
+ match = compiler.value_re.match(expr)
+ if match:
+ compiled_expr = compiler(**match.groupdict())
+ self.expressions.append(compiled_expr)
+ return
+
+ raise ValueError('Unrecognized expression "%s" for field "%s"' %
+ (expr, self.name))
+
+ def __str__(self):
+ expr_strings = (str(e) for e in self.expressions)
+ return ','.join(expr_strings)
+
+ def __repr__(self):
+ return "%s('%s', '%s')" % (self.__class__.__name__, self.name,
+ str(self))
+
+
+class WeekField(BaseField):
+ REAL = False
+
+ def get_value(self, dateval):
+ return dateval.isocalendar()[1]
+
+
+class DayOfMonthField(BaseField):
+ COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression,
+ LastDayOfMonthExpression]
+
+ def get_max(self, dateval):
+ return monthrange(dateval.year, dateval.month)[1]
+
+
+class DayOfWeekField(BaseField):
+ REAL = False
+ COMPILERS = BaseField.COMPILERS + [WeekdayRangeExpression]
+
+ def get_value(self, dateval):
+ return dateval.weekday()
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py
new file mode 100644
index 0000000..dd16d77
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/interval.py
@@ -0,0 +1,39 @@
+from datetime import datetime, timedelta
+from math import ceil
+
+from apscheduler.util import convert_to_datetime, timedelta_seconds
+
+
+class IntervalTrigger(object):
+ def __init__(self, interval, start_date=None):
+ if not isinstance(interval, timedelta):
+ raise TypeError('interval must be a timedelta')
+ if start_date:
+ start_date = convert_to_datetime(start_date)
+
+ self.interval = interval
+ self.interval_length = timedelta_seconds(self.interval)
+ if self.interval_length == 0:
+ self.interval = timedelta(seconds=1)
+ self.interval_length = 1
+
+ if start_date is None:
+ self.start_date = datetime.now() + self.interval
+ else:
+ self.start_date = convert_to_datetime(start_date)
+
+ def get_next_fire_time(self, start_date):
+ if start_date < self.start_date:
+ return self.start_date
+
+ timediff_seconds = timedelta_seconds(start_date - self.start_date)
+ next_interval_num = int(ceil(timediff_seconds / self.interval_length))
+ return self.start_date + self.interval * next_interval_num
+
+ def __str__(self):
+ return 'interval[%s]' % str(self.interval)
+
+ def __repr__(self):
+ return "<%s (interval=%s, start_date=%s)>" % (
+ self.__class__.__name__, repr(self.interval),
+ repr(self.start_date))
[33/50] [abbrv] git commit: AMBARI-6927. Incorrect task name for
decommission and recommission operations hostname.(xiwang)
Posted by jo...@apache.org.
AMBARI-6927. Incorrect task name for decommission and recommission operations hostname.(xiwang)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b39b998e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b39b998e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b39b998e
Branch: refs/heads/branch-alerts-dev
Commit: b39b998ece510c00f2bae6b9430618ebd9b78d2a
Parents: e1eaf3a
Author: Xi Wang <xi...@apache.org>
Authored: Tue Aug 19 14:48:58 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Aug 19 15:15:57 2014 -0700
----------------------------------------------------------------------
ambari-web/app/utils/helper.js | 5 +++++
1 file changed, 5 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/b39b998e/ambari-web/app/utils/helper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/helper.js b/ambari-web/app/utils/helper.js
index d31491c..c22759d 100644
--- a/ambari-web/app/utils/helper.js
+++ b/ambari-web/app/utils/helper.js
@@ -431,6 +431,11 @@ App.format = {
result = result + ' ' + self.role(item);
}
});
+
+ if (result.indexOf('Decommission:') > -1 || result.indexOf('Recommission:') > -1) {
+ // for Decommission command, make sure the hostname is in lower case
+ result = result.split(':')[0] + ': ' + result.split(':')[1].toLowerCase();
+ }
if (result === ' Nagios Update Ignore Actionexecute') {
result = Em.I18n.t('common.maintenance.task');
}
[28/50] [abbrv] git commit: AMBARI-6921. Slider Apps View is blocked
by using old unnecessary global configs in code (srimanth)
Posted by jo...@apache.org.
AMBARI-6921. Slider Apps View is blocked by using old unnecessary global configs in code (srimanth)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5aaa32c1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5aaa32c1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5aaa32c1
Branch: refs/heads/branch-alerts-dev
Commit: 5aaa32c19fdca3811d0a942cb1589afbf0448534
Parents: 933f7f8
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Tue Aug 19 11:42:53 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Tue Aug 19 11:50:35 2014 -0700
----------------------------------------------------------------------
.../view/slider/SliderAppsViewControllerImpl.java | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/5aaa32c1/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
index adfa68f..0a3282f 100644
--- a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
+++ b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
@@ -154,9 +154,9 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
}
// Check security
if (cluster.getDesiredConfigs() != null
- && cluster.getDesiredConfigs().containsKey("global")) {
+ && cluster.getDesiredConfigs().containsKey("hadoop-env")) {
Map<String, String> globalConfig = ambariClient.getConfiguration(
- clusterInfo, "global", cluster.getDesiredConfigs().get("global"));
+ clusterInfo, "hadoop-env", cluster.getDesiredConfigs().get("hadoop-env"));
if (globalConfig != null
&& globalConfig.containsKey("security_enabled")) {
String securityValue = globalConfig.get("security_enabled");
@@ -458,19 +458,19 @@ public class SliderAppsViewControllerImpl implements SliderAppsViewController {
AmbariService zkService = ambariClient.getService(ambariCluster,
"ZOOKEEPER");
if (zkService != null && ambariCluster.getDesiredConfigs() != null
- && ambariCluster.getDesiredConfigs().containsKey("global")
+ && ambariCluster.getDesiredConfigs().containsKey("zookeeper-env")
&& ambariCluster.getDesiredConfigs().containsKey("yarn-site")
&& ambariCluster.getDesiredConfigs().containsKey("core-site")) {
- Map<String, String> globalConfigs = ambariClient.getConfiguration(
- ambariCluster, "global",
- ambariCluster.getDesiredConfigs().get("global"));
+ Map<String, String> zkConfigs = ambariClient.getConfiguration(
+ ambariCluster, "zookeeper-env",
+ ambariCluster.getDesiredConfigs().get("zookeeper-env"));
Map<String, String> yarnSiteConfigs = ambariClient.getConfiguration(
ambariCluster, "yarn-site",
ambariCluster.getDesiredConfigs().get("yarn-site"));
Map<String, String> coreSiteConfigs = ambariClient.getConfiguration(
ambariCluster, "core-site",
ambariCluster.getDesiredConfigs().get("core-site"));
- String zkPort = globalConfigs.get("clientPort");
+ String zkPort = zkConfigs.get("clientPort");
String hdfsPath = coreSiteConfigs.get("fs.defaultFS");
String rmAddress = yarnSiteConfigs.get("yarn.resourcemanager.address");
String rmSchedulerAddress = yarnSiteConfigs
[05/50] [abbrv] git commit: AMBARI-6896. Clients set to install after
go back to "Assign Slaves and Clients" in Add Host wizard (Max Shepel via
alexantonenko)
Posted by jo...@apache.org.
AMBARI-6896. Clients set to install after go back to "Assign Slaves and Clients" in Add Host wizard (Max Shepel via alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/947899ec
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/947899ec
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/947899ec
Branch: refs/heads/branch-alerts-dev
Commit: 947899ec670aea29f00ca1aa95ea8ec226c64807
Parents: 620978b
Author: Alex Antonenko <hi...@gmail.com>
Authored: Mon Aug 18 19:42:19 2014 +0300
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Mon Aug 18 19:42:19 2014 +0300
----------------------------------------------------------------------
.../app/controllers/wizard/step6_controller.js | 19 ++++++++++++++-----
1 file changed, 14 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/947899ec/ambari-web/app/controllers/wizard/step6_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step6_controller.js b/ambari-web/app/controllers/wizard/step6_controller.js
index a3e97ce..9fe8685 100644
--- a/ambari-web/app/controllers/wizard/step6_controller.js
+++ b/ambari-web/app/controllers/wizard/step6_controller.js
@@ -60,6 +60,12 @@ App.WizardStep6Controller = Em.Controller.extend({
isLoaded: false,
/**
+ * Indication if user has chosen hosts to install clients
+ * @type {bool}
+ */
+ isClientsSet: false,
+
+ /**
* Define state for submit button
* @type {bool}
*/
@@ -425,11 +431,14 @@ App.WizardStep6Controller = Em.Controller.extend({
* @param hostsObj
*/
selectClientHost: function (hostsObj) {
- var nonMasterHost = hostsObj.findProperty('hasMaster', false);
- var clientHost = !!nonMasterHost ? nonMasterHost : hostsObj[hostsObj.length - 1]; // last host
- var clientCheckBox = clientHost.get('checkboxes').findProperty('component', 'CLIENT');
- if (clientCheckBox) {
- clientCheckBox.set('checked', true);
+ if (!this.get('isClientsSet')) {
+ var nonMasterHost = hostsObj.findProperty('hasMaster', false);
+ var clientHost = !!nonMasterHost ? nonMasterHost : hostsObj[hostsObj.length - 1]; // last host
+ var clientCheckBox = clientHost.get('checkboxes').findProperty('component', 'CLIENT');
+ if (clientCheckBox) {
+ clientCheckBox.set('checked', true);
+ }
+ this.set('isClientsSet', true);
}
},
[19/50] [abbrv] git commit: AMBARI-6899. Add unit test to ensure
iptables stay off (if they were initially turned off) (dlysnichenko)
Posted by jo...@apache.org.
AMBARI-6899. Add unit test to ensure iptables stay off (if they were initially turned off) (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d8927c8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d8927c8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d8927c8
Branch: refs/heads/branch-alerts-dev
Commit: 7d8927c8e3026306ad246a18180e083346c00d76
Parents: 0116db8
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Aug 19 17:18:22 2014 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Aug 19 17:19:13 2014 +0300
----------------------------------------------------------------------
.../src/main/python/ambari_agent/HostInfo.py | 104 +-------------
.../test/python/ambari_agent/TestHostInfo.py | 60 +++-----
.../python/ambari_agent/TestRegistration.py | 3 +-
.../src/main/python/ambari_commons/__init__.py | 3 +
.../src/main/python/ambari_commons/firewall.py | 140 +++++++++++++++++++
ambari-server/src/main/python/ambari-server.py | 113 +++------------
.../src/test/python/TestAmbariServer.py | 133 ++++++++++++------
7 files changed, 277 insertions(+), 279 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d8927c8/ambari-agent/src/main/python/ambari_agent/HostInfo.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/HostInfo.py b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
index bd0c286..ff91dc4 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
@@ -32,23 +32,14 @@ import hostname
from PackagesAnalyzer import PackagesAnalyzer
from HostCheckReportFileHandler import HostCheckReportFileHandler
from Hardware import Hardware
-from ambari_commons import OSCheck, OSConst
+from ambari_commons import OSCheck, OSConst, Firewall
import socket
logger = logging.getLogger()
-# OS info
-OS_VERSION = OSCheck().get_os_major_version()
-OS_TYPE = OSCheck.get_os_type()
-OS_FAMILY = OSCheck.get_os_family()
-
# service cmd
SERVICE_CMD = "/sbin/service"
-# on ubuntu iptables service is called ufw
-if OS_FAMILY == OSConst.DEBIAN_FAMILY:
- SERVICE_CMD = "/usr/sbin/service"
-
class HostInfo:
# List of project names to be used to find alternatives folders etc.
@@ -324,25 +315,8 @@ class HostInfo:
else:
return ""
- def getFirewallObject(self):
- if OS_TYPE == OSConst.OS_UBUNTU:
- return UbuntuFirewallChecks()
- elif OS_TYPE == OSConst.OS_FEDORA and int(OS_VERSION) >= 18:
- return Fedora18FirewallChecks()
- elif OS_FAMILY == OSConst.SUSE_FAMILY:
- return SuseFirewallChecks()
- else:
- return FirewallChecks()
-
- def getFirewallObjectTypes(self):
- # To support test code, so tests can loop through the types
- return (FirewallChecks,
- UbuntuFirewallChecks,
- Fedora18FirewallChecks,
- SuseFirewallChecks)
-
def checkIptables(self):
- return self.getFirewallObject().check_iptables()
+ return Firewall().getFirewallObject().check_iptables()
""" Return various details about the host
componentsMapped: indicates if any components are mapped to this host
@@ -425,80 +399,6 @@ class HostInfo:
pass
return False
-
-class FirewallChecks(object):
- def __init__(self):
- self.FIREWALL_SERVICE_NAME = "iptables"
- self.SERVICE_CMD = SERVICE_CMD
- self.SERVICE_SUBCMD = "status"
-
- def get_command(self):
- return "%s %s %s" % (self.SERVICE_CMD, self.FIREWALL_SERVICE_NAME, self.SERVICE_SUBCMD)
-
- def check_result(self, retcode, out, err):
- return retcode == 0
-
- def check_iptables(self):
- retcode, out, err = self.run_os_command(self.get_command())
- return self.check_result(retcode, out, err)
-
- def get_running_result(self):
- # To support test code. Expected ouput from run_os_command.
- return (0, "", "")
-
- def get_stopped_result(self):
- # To support test code. Expected output from run_os_command.
- return (3, "", "")
-
- def run_os_command(self, cmd):
- if type(cmd) == str:
- cmd = shlex.split(cmd)
-
- try:
- process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE,
- stderr=subprocess.PIPE)
- (stdoutdata, stderrdata) = process.communicate()
- return process.returncode, stdoutdata, stderrdata
- except OSError:
- return self.get_stopped_result()
-
-
-class UbuntuFirewallChecks(FirewallChecks):
- def __init__(self):
- super(UbuntuFirewallChecks, self).__init__()
-
- self.FIREWALL_SERVICE_NAME = "ufw"
- self.SERVICE_CMD = 'service'
-
- def check_result(self, retcode, out, err):
- # On ubuntu, the status command returns 0 whether running or not
- return out and len(out) > 0 and out.strip() != "ufw stop/waiting"
-
- def get_running_result(self):
- # To support test code. Expected ouput from run_os_command.
- return (0, "ufw start/running", "")
-
- def get_stopped_result(self):
- # To support test code. Expected output from run_os_command.
- return (0, "ufw stop/waiting", "")
-
-
-class Fedora18FirewallChecks(FirewallChecks):
- def __init__(self):
- self.FIREWALL_SERVICE_NAME = "firewalld.service"
-
- def get_command(self):
- return "systemctl is-active firewalld.service"
-
-
-class SuseFirewallChecks(FirewallChecks):
- def __init__(self):
- self.FIREWALL_SERVICE_NAME = "SuSEfirewall2"
-
- def get_command(self):
- return "/sbin/SuSEfirewall2 status"
-
-
def main(argv=None):
h = HostInfo()
struct = {}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d8927c8/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
index ce4604d..08e7b47 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
@@ -32,11 +32,10 @@ with patch("platform.linux_distribution", return_value = ('redhat','11','Final')
from ambari_agent.HostCheckReportFileHandler import HostCheckReportFileHandler
from ambari_agent.PackagesAnalyzer import PackagesAnalyzer
from ambari_agent.HostInfo import HostInfo
- from ambari_agent.HostInfo import FirewallChecks
from ambari_agent.Hardware import Hardware
from ambari_agent.AmbariConfig import AmbariConfig
from resource_management.core.system import System
- from ambari_commons import OSCheck
+ from ambari_commons import OSCheck, Firewall, FirewallChecks ,OSConst
@patch.object(System, "os_family", new = 'redhat')
class TestHostInfo(TestCase):
@@ -269,6 +268,7 @@ class TestHostInfo(TestCase):
hostInfo = HostInfo()
dict = {}
hostInfo.register(dict, False, False)
+ self.assertTrue(cit_mock.called)
self.assertTrue(gir_mock.called)
self.assertTrue(gpd_mock.called)
self.assertTrue(aip_mock.called)
@@ -321,6 +321,7 @@ class TestHostInfo(TestCase):
self.verifyReturnedValues(dict)
self.assertTrue(os_umask_mock.call_count == 2)
+ cit_mock.reset_mock()
hostInfo = HostInfo()
dict = {}
hostInfo.register(dict, False, False)
@@ -328,6 +329,7 @@ class TestHostInfo(TestCase):
self.assertTrue(gpd_mock.called)
self.assertTrue(aip_mock.called)
self.assertTrue(cit_mock.called)
+ self.assertEqual(1, cit_mock.call_count)
for existingPkg in ["pkg1", "pkg2"]:
self.assertTrue(existingPkg in dict['installedPackages'])
@@ -515,13 +517,16 @@ class TestHostInfo(TestCase):
self.assertEquals(result[0]['name'], 'config1')
self.assertEquals(result[0]['target'], 'real_path_to_conf')
+ @patch.object(OSCheck, "get_os_family")
+ @patch.object(OSCheck, "get_os_type")
+ @patch.object(OSCheck, "get_os_major_version")
@patch.object(FirewallChecks, "run_os_command")
- def test_IpTablesRunning(self, run_os_command_mock):
- hostInfo = HostInfo()
- for firewallType in hostInfo.getFirewallObjectTypes():
- firewall = firewallType()
- run_os_command_mock.return_value = firewall.get_running_result()
- self.assertTrue(firewall.check_iptables())
+ def test_IpTablesRunning(self, run_os_command_mock, get_os_major_version_mock, get_os_type_mock, get_os_family_mock):
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+ run_os_command_mock.return_value = 0, "Table: filter", ""
+ self.assertTrue(Firewall().getFirewallObject().check_iptables())
+
@patch.object(HostInfo, "osdiskAvailableSpace")
def test_createAlerts(self, osdiskAvailableSpace_mock):
@@ -538,30 +543,6 @@ class TestHostInfo(TestCase):
self.assertEquals(1, len(result))
-
- @patch("subprocess.Popen")
- def test_run_os_command_exception(self, popen_mock):
- def base_test():
- return "base test"
-
- def sub_test():
- return "output 1", "error 1"
-
- base_test.communicate = sub_test
- base_test.returncode = 0
-
- hostInfo = HostInfo()
- for firewallType in hostInfo.getFirewallObjectTypes():
- firewall = firewallType()
-
- popen_mock.side_effect = None
- popen_mock.return_value = base_test
- self.assertTrue(firewall.check_iptables())
-
- popen_mock.side_effect = OSError('File not found')
- popen_mock.return_value = None
- self.assertFalse(firewall.check_iptables())
-
@patch.object(socket, "getfqdn")
@patch.object(socket, "gethostbyname")
@patch.object(socket, "gethostname")
@@ -586,13 +567,16 @@ class TestHostInfo(TestCase):
self.assertFalse(hostInfo.checkReverseLookup())
+
+ @patch.object(OSCheck, "get_os_family")
+ @patch.object(OSCheck, "get_os_type")
+ @patch.object(OSCheck, "get_os_major_version")
@patch.object(FirewallChecks, "run_os_command")
- def test_IpTablesStopped(self, run_os_command_mock):
- hostInfo = HostInfo()
- for firewallType in hostInfo.getFirewallObjectTypes():
- firewall = firewallType()
- run_os_command_mock.return_value = firewall.get_stopped_result()
- self.assertFalse(firewall.check_iptables())
+ def test_IpTablesStopped(self, run_os_command_mock, get_os_major_version_mock, get_os_type_mock, get_os_family_mock):
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+ run_os_command_mock.return_value = 3, "", ""
+ self.assertFalse(Firewall().getFirewallObject().check_iptables())
@patch("os.path.isfile")
@patch('__builtin__.open')
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d8927c8/ambari-agent/src/test/python/ambari_agent/TestRegistration.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestRegistration.py b/ambari-agent/src/test/python/ambari_agent/TestRegistration.py
index afd82b9..2a9f716 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestRegistration.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestRegistration.py
@@ -28,8 +28,7 @@ with patch("platform.linux_distribution", return_value = ('Suse','11','Final')):
from ambari_agent.Register import Register
from ambari_agent.AmbariConfig import AmbariConfig
from ambari_agent.HostInfo import HostInfo
- from ambari_agent.HostInfo import FirewallChecks
- from ambari_commons import OSCheck
+ from ambari_commons import OSCheck, Firewall, FirewallChecks
class TestRegistration(TestCase):
@patch.object(FirewallChecks, "run_os_command")
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d8927c8/ambari-common/src/main/python/ambari_commons/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/__init__.py b/ambari-common/src/main/python/ambari_commons/__init__.py
index b759e76..df4d7d6 100644
--- a/ambari-common/src/main/python/ambari_commons/__init__.py
+++ b/ambari-common/src/main/python/ambari_commons/__init__.py
@@ -19,8 +19,11 @@ limitations under the License.
'''
from ambari_commons.os_check import OSCheck, OSConst
+from ambari_commons.firewall import Firewall, FirewallChecks
__all__ = [
'OSCheck',
'OSConst',
+ 'Firewall',
+ 'FirewallChecks'
]
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d8927c8/ambari-common/src/main/python/ambari_commons/firewall.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/firewall.py b/ambari-common/src/main/python/ambari_commons/firewall.py
new file mode 100644
index 0000000..1cf69ee
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_commons/firewall.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import subprocess
+import shlex
+from ambari_commons import OSCheck, OSConst
+
+
+class Firewall(object):
+ def __init__(self):
+ # OS info
+ self.OS_VERSION = OSCheck().get_os_major_version()
+ self.OS_TYPE = OSCheck.get_os_type()
+ self.OS_FAMILY = OSCheck.get_os_family()
+
+ def getFirewallObject(self):
+ if self.OS_TYPE == OSConst.OS_UBUNTU:
+ return UbuntuFirewallChecks()
+ elif self.OS_TYPE == OSConst.OS_FEDORA and int(self.OS_VERSION) >= 18:
+ return Fedora18FirewallChecks()
+ elif self.OS_FAMILY == OSConst.SUSE_FAMILY:
+ return SuseFirewallChecks()
+ else:
+ return FirewallChecks()
+
+class FirewallChecks(object):
+ def __init__(self):
+ self.FIREWALL_SERVICE_NAME = "iptables"
+ self.SERVICE_SUBCMD = "status"
+ # service cmd
+ self.SERVICE_CMD = "/sbin/service"
+ self.returncode = None
+ self.stdoutdata = None
+ self.stderrdata = None
+
+ def get_command(self):
+ return "%s %s %s" % (self.SERVICE_CMD, self.FIREWALL_SERVICE_NAME, self.SERVICE_SUBCMD)
+
+ def check_result(self, retcode, out, err):
+ result = False
+ if retcode == 3:
+ result = False
+ elif retcode == 0:
+ if "Table: filter" in out:
+ result = True
+ return result
+
+ def check_iptables(self):
+ try:
+ retcode, out, err = self.run_os_command(self.get_command())
+ return self.check_result(retcode, out, err)
+ except OSError:
+ return False
+
+ def run_os_command(self, cmd):
+ if type(cmd) == str:
+ cmd = shlex.split(cmd)
+
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdoutdata, stderrdata) = process.communicate()
+ self.returncode = process.returncode
+ self.stdoutdata = stdoutdata
+ self.stderrdata = stderrdata
+ return self.returncode, self.stdoutdata, self.stderrdata
+
+
+
+class UbuntuFirewallChecks(FirewallChecks):
+ def __init__(self):
+ super(UbuntuFirewallChecks, self).__init__()
+ self.FIREWALL_SERVICE_NAME = "ufw"
+
+ def get_command(self):
+ return "%s %s" % (self.FIREWALL_SERVICE_NAME, self.SERVICE_SUBCMD)
+
+ def check_result(self, retcode, out, err):
+ # On ubuntu, the status command returns 0 whether running or not
+ result = False
+ if retcode == 0:
+ if "Status: inactive" in out:
+ result = False
+ elif "Status: active" in out:
+ result = True
+ return result
+
+ def get_running_result(self):
+ # To support test code. Expected ouput from run_os_command.
+ return (0, "ufw start/running", "")
+
+ def get_stopped_result(self):
+ # To support test code. Expected output from run_os_command.
+ return (0, "ufw stop/waiting", "")
+
+
+class Fedora18FirewallChecks(FirewallChecks):
+
+ def get_command(self):
+ return "systemctl is-active iptables"
+
+ def check_result(self, retcode, out, err):
+ result = False
+ if retcode == 0:
+ if "active" in out:
+ result = True
+ return result
+
+class SuseFirewallChecks(FirewallChecks):
+ def __init__(self):
+ self.FIREWALL_SERVICE_NAME = "SuSEfirewall2"
+ self.SERVICE_SUBCMD = "status"
+
+ def get_command(self):
+ return "%s %s" % (self.FIREWALL_SERVICE_NAME, self.SERVICE_SUBCMD)
+
+ def check_result(self, retcode, out, err):
+ result = False
+ if retcode == 0:
+ if "SuSEfirewall2 not active" in out:
+ result = False
+ elif "### iptables" in out:
+ result = True
+ return result
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d8927c8/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index 079b29f..bb9870b 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -41,7 +41,7 @@ import random
import pwd
from ambari_server.resourceFilesKeeper import ResourceFilesKeeper, KeeperException
import json
-from ambari_commons import OSCheck, OSConst
+from ambari_commons import OSCheck, OSConst, Firewall
from ambari_server import utils
# debug settings
@@ -427,100 +427,6 @@ ASF_LICENSE_HEADER = '''
# limitations under the License.
'''
-
-class FirewallChecks(object):
- def __init__(self):
-
- self.FIREWALL_SERVICE_NAME = "iptables"
- self.SERVICE_CMD = SERVICE_CMD
- self.SERVICE_SUBCMD = "status"
-
- def get_command(self):
- return "%s %s %s" % (self.SERVICE_CMD, self.FIREWALL_SERVICE_NAME, self.SERVICE_SUBCMD)
-
- def check_result(self, retcode, out, err):
- return retcode == 0
-
- def check_iptables(self):
- retcode, out, err = run_os_command(self.get_command())
- if err and len(err) > 0:
- print err
- if self.check_result(retcode, out, err):
- print_warning_msg("%s is running. Confirm the necessary Ambari ports are accessible. " %
- self.FIREWALL_SERVICE_NAME +
- "Refer to the Ambari documentation for more details on ports.")
- ok = get_YN_input("OK to continue [y/n] (y)? ", True)
- if not ok:
- raise FatalException(1, None)
-
- def get_running_result(self):
- # To support test code. Expected ouput from run_os_command.
- return (0, "", "")
-
- def get_stopped_result(self):
- # To support test code. Expected output from run_os_command.
- return (3, "", "")
-
-
-class UbuntuFirewallChecks(FirewallChecks):
- def __init__(self):
- super(UbuntuFirewallChecks, self).__init__()
-
- self.FIREWALL_SERVICE_NAME = "ufw"
- self.SERVICE_CMD = utils.locate_file('service', '/usr/sbin')
-
- def check_result(self, retcode, out, err):
- # On ubuntu, the status command returns 0 whether running or not
- return out and len(out) > 0 and out.strip() != "ufw stop/waiting"
-
- def get_running_result(self):
- # To support test code. Expected ouput from run_os_command.
- return (0, "ufw start/running", "")
-
- def get_stopped_result(self):
- # To support test code. Expected output from run_os_command.
- return (0, "ufw stop/waiting", "")
-
-
-class Fedora18FirewallChecks(FirewallChecks):
- def __init__(self):
- self.FIREWALL_SERVICE_NAME = "firewalld.service"
-
- def get_command(self):
- return "systemctl is-active firewalld.service"
-
-
-class OpenSuseFirewallChecks(FirewallChecks):
- def __init__(self):
- self.FIREWALL_SERVICE_NAME = "SuSEfirewall2"
-
- def get_command(self):
- return "/sbin/SuSEfirewall2 status"
-
-
-def get_firewall_object():
- if OS_TYPE == OSConst.OS_UBUNTU:
- return UbuntuFirewallChecks()
- elif OS_TYPE == OSConst.OS_FEDORA and int(OS_VERSION) >= 18:
- return Fedora18FirewallChecks()
- elif OS_TYPE == OSConst.OS_OPENSUSE:
- return OpenSuseFirewallChecks()
- else:
- return FirewallChecks()
-
-
-def get_firewall_object_types():
- # To support test code, so tests can loop through the types
- return (FirewallChecks,
- UbuntuFirewallChecks,
- Fedora18FirewallChecks,
- OpenSuseFirewallChecks)
-
-
-def check_iptables():
- return get_firewall_object().check_iptables()
-
-
def get_conf_dir():
try:
conf_dir = os.environ[AMBARI_CONF_VAR]
@@ -2230,8 +2136,21 @@ def setup(args):
err = 'Failed to create user. Exiting.'
raise FatalException(retcode, err)
- print 'Checking iptables...'
- check_iptables()
+ print 'Checking firewall...'
+ firewall_obj = Firewall().getFirewallObject()
+ firewall_on = firewall_obj.check_iptables()
+ if firewall_obj.stderrdata and len(firewall_obj.stderrdata) > 0:
+ print firewall_obj.stderrdata
+ if firewall_on:
+ print_warning_msg("%s is running. Confirm the necessary Ambari ports are accessible. " %
+ firewall_obj.FIREWALL_SERVICE_NAME +
+ "Refer to the Ambari documentation for more details on ports.")
+ ok = get_YN_input("OK to continue [y/n] (y)? ", True)
+ if not ok:
+ raise FatalException(1, None)
+
+
+
# proceed jdbc properties if they were set
if args.jdbc_driver is not None and args.jdbc_db is not None:
http://git-wip-us.apache.org/repos/asf/ambari/blob/7d8927c8/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index b76e2bd..46078ac 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -31,6 +31,7 @@ import platform
import shutil
from pwd import getpwnam
from ambari_server.resourceFilesKeeper import ResourceFilesKeeper, KeeperException
+from ambari_commons import Firewall, OSCheck, OSConst, FirewallChecks
with patch("platform.linux_distribution", return_value = ('Suse','11','Final')):
# We have to use this import HACK because the filename contains a dash
@@ -1005,30 +1006,54 @@ class TestAmbariServer(TestCase):
self.assertTrue(set_file_permissions_mock.called)
- @patch.object(ambari_server, "run_os_command")
- @patch.object(ambari_server, "print_warning_msg")
- @patch.object(ambari_server, "get_YN_input")
- def test_check_iptables_is_running(self, get_YN_input_mock, print_warning_msg, run_os_command_mock):
- counter = 0
- for fwo_type in ambari_server.get_firewall_object_types():
- fwo = fwo_type()
- run_os_command_mock.return_value = fwo.get_running_result()
- get_YN_input_mock.side_effect = [True]
- fwo.check_iptables()
- self.assertEqual(len(print_warning_msg.call_args_list), counter+1)
- self.assertEqual(print_warning_msg.call_args_list[counter][0][0],
- "%s is running. Confirm the necessary Ambari ports are accessible. " % fwo.FIREWALL_SERVICE_NAME +
- "Refer to the Ambari documentation for more details on ports.")
- counter += 1
+ @patch.object(FirewallChecks, "run_os_command")
+ @patch.object(OSCheck, "get_os_family")
+ @patch.object(OSCheck, "get_os_type")
+ @patch.object(OSCheck, "get_os_major_version")
+ def test_check_iptables_is_running(self, get_os_major_version_mock, get_os_type_mock, get_os_family_mock, run_os_command_mock):
+
+ get_os_major_version_mock.return_value = 18
+ get_os_type_mock.return_value = OSConst.OS_FEDORA
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+
+ firewall_obj = Firewall().getFirewallObject()
+ run_os_command_mock.return_value = 0, "active", ""
+ self.assertEqual("Fedora18FirewallChecks", firewall_obj.__class__.__name__)
+ self.assertTrue(firewall_obj.check_iptables())
+ run_os_command_mock.return_value = 3, "", ""
+ self.assertFalse(firewall_obj.check_iptables())
+
+
+ get_os_type_mock.return_value = OSConst.OS_UBUNTU
+ get_os_family_mock.return_value = OSConst.DEBIAN_FAMILY
+
+ firewall_obj = Firewall().getFirewallObject()
+ run_os_command_mock.return_value = 0, "Status: active", ""
+ self.assertEqual("UbuntuFirewallChecks", firewall_obj.__class__.__name__)
+ self.assertTrue(firewall_obj.check_iptables())
+ run_os_command_mock.return_value = 0, "Status: inactive", ""
+ self.assertFalse(firewall_obj.check_iptables())
+
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.SUSE_FAMILY
+
+ firewall_obj = Firewall().getFirewallObject()
+ run_os_command_mock.return_value = 0, "### iptables", ""
+ self.assertEqual("SuseFirewallChecks", firewall_obj.__class__.__name__)
+ self.assertTrue(firewall_obj.check_iptables())
+ run_os_command_mock.return_value = 0, "SuSEfirewall2 not active", ""
+ self.assertFalse(firewall_obj.check_iptables())
+
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+
+ firewall_obj = Firewall().getFirewallObject()
+ run_os_command_mock.return_value = 0, "Table: filter", ""
+ self.assertEqual("FirewallChecks", firewall_obj.__class__.__name__)
+ self.assertTrue(firewall_obj.check_iptables())
+ run_os_command_mock.return_value = 3, "", ""
+ self.assertFalse(firewall_obj.check_iptables())
- @patch.object(ambari_server, "run_os_command")
- @patch.object(ambari_server, "print_warning_msg")
- def test_check_iptables_is_not_running(self, print_warning_msg, run_os_command_mock):
- for fwo_type in ambari_server.get_firewall_object_types():
- fwo = fwo_type()
- run_os_command_mock.return_value = fwo.get_stopped_result()
- fwo.check_iptables()
- self.assertFalse(print_warning_msg.called)
def test_dlprogress(self):
@@ -2301,6 +2326,10 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
result = ambari_server.find_jdk()
self.assertEqual(result, "two")
+ @patch.object(FirewallChecks, "run_os_command")
+ @patch.object(OSCheck, "get_os_family")
+ @patch.object(OSCheck, "get_os_type")
+ @patch.object(OSCheck, "get_os_major_version")
@patch("os.path.exists")
@patch("os.path.isfile")
@patch.object(ambari_server, "remove_file")
@@ -2312,7 +2341,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
@patch.object(ambari_server, "configure_postgres")
@patch.object(ambari_server, "setup_db")
@patch.object(ambari_server, "check_postgre_up")
- @patch.object(ambari_server, "check_iptables")
@patch.object(ambari_server, "check_ambari_user")
@patch.object(ambari_server, "check_jdbc_drivers")
@patch.object(ambari_server, "check_selinux")
@@ -2326,9 +2354,10 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
def test_setup(self, proceedJDBCProperties_mock, is_server_runing_mock, is_root_mock, store_local_properties_mock,
is_local_database_mock, store_remote_properties_mock,
setup_remote_db_mock, check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
- check_iptables_mock, check_postgre_up_mock, setup_db_mock, configure_postgres_mock,
+ check_postgre_up_mock, setup_db_mock, configure_postgres_mock,
download_jdk_mock, configure_os_settings_mock, get_YN_input,
- verify_setup_allowed_method, is_jdbc_user_changed_mock, remove_file_mock, isfile_mock, exists_mock):
+ verify_setup_allowed_method, is_jdbc_user_changed_mock, remove_file_mock, isfile_mock, exists_mock,
+ get_os_major_version_mock, get_os_type_mock,get_os_family_mock, run_os_command_mock):
args = MagicMock()
failed = False
is_server_runing_mock.return_value = (False, 0)
@@ -2337,6 +2366,9 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
verify_setup_allowed_method.return_value = 0
exists_mock.return_value = False
remove_file_mock.return_value = 0
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+ run_os_command_mock.return_value = 3,"",""
def reset_mocks():
is_jdbc_user_changed_mock.reset_mock()
@@ -2348,7 +2380,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
check_selinux_mock.reset_mock()
check_jdbc_drivers_mock.reset_mock()
check_ambari_user_mock.reset_mock()
- check_iptables_mock.reset_mock()
+ run_os_command_mock.reset_mock()
check_postgre_up_mock.reset_mock()
setup_db_mock.reset_mock()
configure_postgres_mock.reset_mock()
@@ -2372,7 +2404,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
check_selinux_mock.return_value = 0
check_ambari_user_mock.return_value = 0
check_jdbc_drivers_mock.return_value = 0
- check_iptables_mock.return_value = (0, "other")
check_postgre_up_mock.return_value = "running", 0, "", ""
setup_db_mock.return_value = (0, None, None)
setup_remote_db_mock.return_value = 0
@@ -2389,6 +2420,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
self.assertEqual(None, result)
self.assertTrue(check_ambari_user_mock.called)
+ self.assertEqual(1, run_os_command_mock.call_count)
self.assertEquals(True, store_remote_properties_mock.called)
self.assertEquals(False, store_local_properties_mock.called)
@@ -3652,13 +3684,17 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
os.unlink(fn2)
+
+ @patch.object(FirewallChecks, "run_os_command")
+ @patch.object(OSCheck, "get_os_family")
+ @patch.object(OSCheck, "get_os_type")
+ @patch.object(OSCheck, "get_os_major_version")
@patch.object(ambari_server, 'verify_setup_allowed')
@patch("sys.exit")
@patch.object(ambari_server, "get_YN_input")
@patch.object(ambari_server, "get_db_cli_tool")
@patch.object(ambari_server, "store_remote_properties")
@patch.object(ambari_server, "is_local_database")
- @patch.object(ambari_server, "check_iptables")
@patch.object(ambari_server, "check_jdbc_drivers")
@patch.object(ambari_server, "is_root")
@patch.object(ambari_server, "check_ambari_user")
@@ -3668,9 +3704,10 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
@patch.object(ambari_server, "check_selinux")
def test_setup_remote_db_wo_client(self, check_selinux_mock, raw_input, configure_os_settings_mock,
download_jdk_mock, check_ambari_user_mock, is_root_mock,
- check_jdbc_drivers_mock, check_iptables_mock, is_local_db_mock,
+ check_jdbc_drivers_mock, is_local_db_mock,
store_remote_properties_mock, get_db_cli_tool_mock, get_YN_input,
- exit_mock, verify_setup_allowed_method):
+ exit_mock, verify_setup_allowed_method,
+ get_os_major_version_mock, get_os_type_mock,get_os_family_mock, run_os_command_mock):
args = MagicMock()
args.jdbc_driver= None
args.jdbc_db = None
@@ -3679,7 +3716,9 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
is_local_db_mock.return_value = False
get_YN_input.return_value = False
check_selinux_mock.return_value = 0
- check_iptables_mock.return_value = (0, "other")
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+ run_os_command_mock.return_value = 3,"",""
store_remote_properties_mock.return_value = 0
get_db_cli_tool_mock.return_value = None
check_jdbc_drivers_mock.return_value = 0
@@ -3695,12 +3734,15 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
# Expected
self.assertTrue("Remote database setup aborted." in fe.reason)
+ @patch.object(FirewallChecks, "run_os_command")
+ @patch.object(OSCheck, "get_os_family")
+ @patch.object(OSCheck, "get_os_type")
+ @patch.object(OSCheck, "get_os_major_version")
@patch.object(ambari_server, 'verify_setup_allowed')
@patch("sys.exit")
@patch.object(ambari_server, "get_YN_input")
@patch.object(ambari_server, "get_db_cli_tool")
@patch.object(ambari_server, "is_local_database")
- @patch.object(ambari_server, "check_iptables")
@patch.object(ambari_server, "check_jdbc_drivers")
@patch.object(ambari_server, "is_root")
@patch.object(ambari_server, "check_ambari_user")
@@ -3709,14 +3751,18 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
@patch('__builtin__.raw_input')
def test_store_remote_properties(self, raw_input, configure_os_settings_mock,
download_jdk_mock, check_ambari_user_mock, is_root_mock,
- check_jdbc_drivers_mock, check_iptables_mock, is_local_db_mock,
- get_db_cli_tool_mock, get_YN_input, exit_mock, verify_setup_allowed_method):
+ check_jdbc_drivers_mock, is_local_db_mock,
+ get_db_cli_tool_mock, get_YN_input, exit_mock, verify_setup_allowed_method,
+ get_os_major_version_mock, get_os_type_mock,get_os_family_mock, run_os_command_mock
+ ):
raw_input.return_value = ""
is_root_mock.return_value = True
is_local_db_mock.return_value = False
get_YN_input.return_value = False
- check_iptables_mock.return_value = (0, "other")
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+ run_os_command_mock.return_value = 3,"",""
get_db_cli_tool_mock.return_value = None
check_jdbc_drivers_mock.return_value = 0
check_ambari_user_mock.return_value = 0
@@ -4849,6 +4895,10 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
except FatalException:
self.fail("Setup should be successful")
+ @patch.object(FirewallChecks, "run_os_command")
+ @patch.object(OSCheck, "get_os_family")
+ @patch.object(OSCheck, "get_os_type")
+ @patch.object(OSCheck, "get_os_major_version")
@patch.object(ambari_server, "is_jdbc_user_changed")
@patch.object(ambari_server, 'verify_setup_allowed')
@patch.object(ambari_server, "get_YN_input")
@@ -4856,7 +4906,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
@patch.object(ambari_server, "download_jdk")
@patch.object(ambari_server, "configure_postgres")
@patch.object(ambari_server, "check_postgre_up")
- @patch.object(ambari_server, "check_iptables")
@patch.object(ambari_server, "check_ambari_user")
@patch.object(ambari_server, "check_jdbc_drivers")
@patch.object(ambari_server, "check_selinux")
@@ -4870,9 +4919,11 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
def test_ambariServerSetupWithCustomDbName(self, raw_input, exit_mock, store_password_file_mock,
get_is_secure_mock, setup_db_mock, is_root_mock, is_local_database_mock,
check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock,
- check_iptables_mock, check_postgre_up_mock, configure_postgres_mock,
+ check_postgre_up_mock, configure_postgres_mock,
download_jdk_mock, configure_os_settings_mock, get_YN_input,
- verify_setup_allowed_method, is_jdbc_user_changed_mock):
+ verify_setup_allowed_method, is_jdbc_user_changed_mock,
+ get_os_major_version_mock, get_os_type_mock,
+ get_os_family_mock, run_os_command_mock):
args = MagicMock()
@@ -4883,7 +4934,6 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
check_selinux_mock.return_value = 0
check_ambari_user_mock.return_value = 0
check_jdbc_drivers_mock.return_value = 0
- check_iptables_mock.return_value = (0, "other")
check_postgre_up_mock.return_value = "running", 0, "", ""
is_local_database_mock.return_value = True
configure_postgres_mock.return_value = 0, "", ""
@@ -4893,6 +4943,9 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
setup_db_mock.return_value = (0, None, None)
get_is_secure_mock.return_value = False
store_password_file_mock.return_value = "password"
+ get_os_type_mock.return_value = ""
+ get_os_family_mock.return_value = OSConst.REDHAT_FAMILY
+ run_os_command_mock.return_value = 3,"",""
new_db = "newDBName"
args.dbms = "postgres"
[43/50] [abbrv] git commit: AMBARI-6860 - Alerts: Add Ability to
Invalidate Definition Hash For An Agent (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6860 - Alerts: Add Ability to Invalidate Definition Hash For An Agent (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9b6c02f6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9b6c02f6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9b6c02f6
Branch: refs/heads/branch-alerts-dev
Commit: 9b6c02f6e2f7fa6a34457a4ebb39e20b38630e2f
Parents: 93e61c0
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Aug 14 09:22:49 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 20 10:46:38 2014 -0400
----------------------------------------------------------------------
.../ambari/server/controller/AmbariServer.java | 7 +-
.../AlertDefinitionResourceProvider.java | 20 +--
.../server/state/alert/AlertDefinitionHash.java | 122 +++++++++++++++++++
.../src/main/resources/properties.json | 3 +-
.../AlertDefinitionResourceProviderTest.java | 54 ++++++--
.../state/alerts/AlertDefinitionHashTest.java | 112 ++++++++++++++---
6 files changed, 280 insertions(+), 38 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6c02f6/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 7b93836..e9b0c9e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -81,17 +81,18 @@ import org.apache.ambari.server.scheduler.ExecutionScheduleManager;
import org.apache.ambari.server.security.CertificateManager;
import org.apache.ambari.server.security.SecurityFilter;
import org.apache.ambari.server.security.SecurityHelper;
+import org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter;
import org.apache.ambari.server.security.authorization.AmbariLdapAuthenticationProvider;
import org.apache.ambari.server.security.authorization.AmbariLdapDataPopulator;
import org.apache.ambari.server.security.authorization.AmbariLocalUserDetailsService;
import org.apache.ambari.server.security.authorization.Users;
-import org.apache.ambari.server.security.authorization.AmbariAuthorizationFilter;
import org.apache.ambari.server.security.authorization.internal.AmbariInternalAuthenticationProvider;
import org.apache.ambari.server.security.unsecured.rest.CertificateDownload;
import org.apache.ambari.server.security.unsecured.rest.CertificateSign;
import org.apache.ambari.server.security.unsecured.rest.ConnectionInfo;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.ambari.server.utils.VersionUtils;
import org.apache.ambari.server.view.ViewRegistry;
@@ -533,7 +534,9 @@ public class AmbariServer {
injector.getInstance(Gson.class), ambariMetaInfo);
StackDependencyResourceProvider.init(ambariMetaInfo);
ClusterResourceProvider.init(injector.getInstance(BlueprintDAO.class), ambariMetaInfo, injector.getInstance(ConfigHelper.class));
- AlertDefinitionResourceProvider.init(injector.getInstance(AlertDefinitionDAO.class));
+ AlertDefinitionResourceProvider.init(
+ injector.getInstance(AlertDefinitionDAO.class),
+ injector.getInstance(AlertDefinitionHash.class));
PermissionResourceProvider.init(injector.getInstance(PermissionDAO.class));
ViewPermissionResourceProvider.init(injector.getInstance(PermissionDAO.class));
PrivilegeResourceProvider.init(injector.getInstance(PrivilegeDAO.class), injector.getInstance(UserDAO.class),
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6c02f6/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index 5ea6d3b..83bd7b1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -42,6 +42,7 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.state.alert.Scope;
import org.apache.ambari.server.state.alert.SourceType;
@@ -65,20 +66,23 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
protected static final String ALERT_DEF_COMPONENT_NAME = "AlertDefinition/component_name";
protected static final String ALERT_DEF_ENABLED = "AlertDefinition/enabled";
protected static final String ALERT_DEF_SCOPE = "AlertDefinition/scope";
- protected static final String ALERT_DEF_UUID = "AlertDefinition/uuid";
private static Set<String> pkPropertyIds = new HashSet<String>(
Arrays.asList(ALERT_DEF_ID, ALERT_DEF_NAME));
+
private static AlertDefinitionDAO alertDefinitionDAO = null;
private static Gson gson = new Gson();
+ private static AlertDefinitionHash alertDefinitionHash;
+
/**
* @param instance
*/
@Inject
- public static void init(AlertDefinitionDAO instance) {
+ public static void init(AlertDefinitionDAO instance, AlertDefinitionHash adh) {
alertDefinitionDAO = instance;
+ alertDefinitionHash = adh;
}
AlertDefinitionResourceProvider(Set<String> propertyIds,
@@ -120,6 +124,7 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
// !!! TODO multi-create in a transaction
for (AlertDefinitionEntity entity : entities) {
alertDefinitionDAO.create(entity);
+ alertDefinitionHash.invalidateHosts(entity);
}
}
@@ -219,7 +224,6 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
results.add(toResource(false, clusterName, entity, requestPropertyIds));
}
} else {
-
Cluster cluster = null;
try {
cluster = getManagementController().getClusters().getCluster(clusterName);
@@ -297,6 +301,8 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
entity.setHash(UUID.randomUUID().toString());
alertDefinitionDAO.merge(entity);
+
+ alertDefinitionHash.invalidateHosts(entity);
}
}
@@ -323,12 +329,13 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
LOG.info("Deleting alert definition {}", definitionId);
- final AlertDefinitionEntity ad = alertDefinitionDAO.findById(definitionId.longValue());
+ final AlertDefinitionEntity entity = alertDefinitionDAO.findById(definitionId.longValue());
modifyResources(new Command<Void>() {
@Override
public Void invoke() throws AmbariException {
- alertDefinitionDAO.remove(ad);
+ alertDefinitionDAO.remove(entity);
+ alertDefinitionHash.invalidateHosts(entity);
return null;
}
});
@@ -355,9 +362,6 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
setResourceProperty(resource, ALERT_DEF_LABEL, entity.getLabel(),
requestedIds);
- setResourceProperty(resource, ALERT_DEF_UUID, entity.getHash(),
- requestedIds);
-
if (!isCollection && null != resource.getPropertyValue(ALERT_DEF_SOURCE_TYPE)) {
try {
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6c02f6/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
index 1f31c35..7cbd4b3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
@@ -31,10 +31,13 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.RootServiceResponseFactory.Components;
+import org.apache.ambari.server.controller.RootServiceResponseFactory.Services;
import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
@@ -138,6 +141,39 @@ public class AlertDefinitionHash {
}
/**
+ * Invalidate all cached hashes causing subsequent lookups to recalculate.
+ */
+ public void invalidateAll() {
+ m_hashes.clear();
+ }
+
+ /**
+ * Invalidates the cached hash for the specified agent host.
+ *
+ * @param hostName
+ * the host to invalidate the cache for (not {@code null}).
+ */
+ public void invalidate(String hostName) {
+ m_hashes.remove(hostName);
+ }
+
+ /**
+ * Gets whether the alert definition has for the specified host has been
+ * calculated and cached.
+ *
+ * @param hostName
+ * the host.
+ * @return {@code true} if the hash was calculated; {@code false} otherwise.
+ */
+ public boolean isHashCached(String hostName) {
+ if (null == hostName) {
+ return false;
+ }
+
+ return m_hashes.containsKey(hostName);
+ }
+
+ /**
* Gets the alert definitions for the specified host. This will include the
* following types of alert definitions:
* <ul>
@@ -219,6 +255,92 @@ public class AlertDefinitionHash {
}
/**
+ * Invalidate the hashes of any host that would be affected by the specified
+ * definition.
+ *
+ * @param definition
+ * the definition to use to find the hosts to invlidate (not
+ * {@code null}).
+ */
+ public void invalidateHosts(AlertDefinitionEntity definition) {
+ long clusterId = definition.getClusterId();
+
+ // intercept host agent alerts; they affect all hosts
+ String definitionServiceName = definition.getServiceName();
+ String definitionComponentName = definition.getComponentName();
+ if (Services.AMBARI.equals(definitionServiceName)
+ && Components.AMBARI_AGENT.equals(definitionComponentName)) {
+
+ invalidateAll();
+ return;
+ }
+
+ Cluster cluster = null;
+ Map<String, Host> hosts = null;
+ try {
+ cluster = m_clusters.getClusterById(clusterId);
+ if (null != cluster) {
+ hosts = m_clusters.getHostsForCluster(cluster.getClusterName());
+ }
+
+ if (null == cluster) {
+ LOG.warn("Unable to lookup cluster with ID {}", clusterId);
+ }
+ } catch (Exception exception) {
+ LOG.error("Unable to lookup cluster with ID {}", clusterId, exception);
+ }
+
+ if (null == cluster) {
+ return;
+ }
+
+ // find all hosts that have the matching service and component
+ if (null != hosts) {
+ for (String hostName : hosts.keySet()) {
+ List<ServiceComponentHost> hostComponents = cluster.getServiceComponentHosts(hostName);
+ if (null == hostComponents || hostComponents.size() == 0) {
+ continue;
+ }
+
+ // if a host has a matching service/component, invalidate it
+ for (ServiceComponentHost component : hostComponents) {
+ String serviceName = component.getServiceName();
+ String componentName = component.getServiceComponentName();
+ if (serviceName.equals(definitionServiceName)
+ && componentName.equals(definitionComponentName)) {
+ invalidate(hostName);
+ }
+ }
+ }
+ }
+
+ // get the service that this alert definition is associated with
+ Map<String, Service> services = cluster.getServices();
+ Service service = services.get(definitionServiceName);
+ if (null == service) {
+ LOG.warn("The alert definition {} has an unknown service of {}",
+ definition.getDefinitionName(), definitionServiceName);
+ return;
+ }
+
+ // get all master components of the definition's service; any hosts that
+ // run the master should be invalidated as well
+ Map<String, ServiceComponent> components = service.getServiceComponents();
+ if (null != components) {
+ for (Entry<String, ServiceComponent> component : components.entrySet()) {
+ if (component.getValue().isMasterComponent()) {
+ Map<String, ServiceComponentHost> componentHosts = component.getValue().getServiceComponentHosts();
+ if (null != componentHosts) {
+ for (String componentHost : componentHosts.keySet()) {
+ invalidate(componentHost);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /**
* Calculates a unique hash value representing all of the alert definitions
* that should be scheduled to run on a given host. Alerts of type
* {@link SourceType#AGGREGATE} are not included in the hash since they are
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6c02f6/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index fec8aa2..6686f39 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -421,8 +421,7 @@
"AlertDefinition/interval",
"AlertDefinition/enabled",
"AlertDefinition/scope",
- "AlertDefinition/source",
- "AlertDefinition/uuid"
+ "AlertDefinition/source"
],
"Controller":[
"Controllers/name",
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6c02f6/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
index cf6ff42..864eb08 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.controller.internal;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
@@ -46,7 +47,9 @@ import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.easymock.Capture;
+import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@@ -57,16 +60,21 @@ import org.junit.Test;
public class AlertDefinitionResourceProviderTest {
AlertDefinitionDAO dao = null;
+ AlertDefinitionHash definitionHash = null;
private static String DEFINITION_UUID = UUID.randomUUID().toString();
@Before
public void before() {
dao = createStrictMock(AlertDefinitionDAO.class);
+ definitionHash = createNiceMock(AlertDefinitionHash.class);
- AlertDefinitionResourceProvider.init(dao);
+ AlertDefinitionResourceProvider.init(dao, definitionHash);
}
+ /**
+ * @throws Exception
+ */
@Test
public void testGetResourcesNoPredicate() throws Exception {
AlertDefinitionResourceProvider provider = createProvider(null);
@@ -79,13 +87,15 @@ public class AlertDefinitionResourceProviderTest {
assertEquals(0, results.size());
}
+ /**
+ * @throws Exception
+ */
@Test
public void testGetResourcesClusterPredicate() throws Exception {
Request request = PropertyHelper.getReadRequest(
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME,
AlertDefinitionResourceProvider.ALERT_DEF_ID,
AlertDefinitionResourceProvider.ALERT_DEF_NAME,
- AlertDefinitionResourceProvider.ALERT_DEF_UUID,
AlertDefinitionResourceProvider.ALERT_DEF_LABEL);
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -114,12 +124,12 @@ public class AlertDefinitionResourceProviderTest {
Assert.assertEquals("Mock Label",
r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_LABEL));
- Assert.assertEquals(DEFINITION_UUID,
- r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_UUID));
-
verify(amc, clusters, cluster, dao);
}
+ /**
+ * @throws Exception
+ */
@Test
public void testGetSingleResource() throws Exception {
Request request = PropertyHelper.getReadRequest(
@@ -158,6 +168,9 @@ public class AlertDefinitionResourceProviderTest {
Assert.assertNotNull(r.getPropertyValue("AlertDefinition/source/type"));
}
+ /**
+ * @throws Exception
+ */
@Test
public void testCreateResources() throws Exception {
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -171,7 +184,11 @@ public class AlertDefinitionResourceProviderTest {
dao.create(capture(entityCapture));
expectLastCall();
- replay(amc, clusters, cluster, dao);
+ // creating a single definition should invalidate hosts of the definition
+ definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class));
+ expectLastCall().once();
+
+ replay(amc, clusters, cluster, dao, definitionHash);
AlertDefinitionResourceProvider provider = createProvider(amc);
@@ -208,6 +225,9 @@ public class AlertDefinitionResourceProviderTest {
}
+ /**
+ * @throws Exception
+ */
@Test
public void testUpdateResources() throws Exception {
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -221,7 +241,11 @@ public class AlertDefinitionResourceProviderTest {
dao.create(capture(entityCapture));
expectLastCall();
- replay(amc, clusters, cluster, dao);
+ // updateing a single definition should invalidate hosts of the definition
+ definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class));
+ expectLastCall().once();
+
+ replay(amc, clusters, cluster, dao, definitionHash);
Map<String, Object> requestProps = new HashMap<String, Object>();
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
@@ -270,6 +294,9 @@ public class AlertDefinitionResourceProviderTest {
verify(amc, clusters, cluster, dao);
}
+ /**
+ * @throws Exception
+ */
@Test
public void testDeleteResources() throws Exception {
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -283,7 +310,11 @@ public class AlertDefinitionResourceProviderTest {
dao.create(capture(entityCapture));
expectLastCall();
- replay(amc, clusters, cluster, dao);
+ // deleting a single definition should invalidate hosts of the definition
+ definitionHash.invalidateHosts(EasyMock.anyObject(AlertDefinitionEntity.class));
+ expectLastCall().once();
+
+ replay(amc, clusters, cluster, dao, definitionHash);
AlertDefinitionResourceProvider provider = createProvider(amc);
@@ -323,6 +354,10 @@ public class AlertDefinitionResourceProviderTest {
}
+ /**
+ * @param amc
+ * @return
+ */
private AlertDefinitionResourceProvider createProvider(AmbariManagementController amc) {
return new AlertDefinitionResourceProvider(
PropertyHelper.getPropertyIds(Resource.Type.AlertDefinition),
@@ -330,6 +365,9 @@ public class AlertDefinitionResourceProviderTest {
amc);
}
+ /**
+ * @return
+ */
private List<AlertDefinitionEntity> getMockEntities() {
AlertDefinitionEntity entity = new AlertDefinitionEntity();
entity.setClusterId(Long.valueOf(1L));
http://git-wip-us.apache.org/repos/asf/ambari/blob/9b6c02f6/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
index 937417a..c8fce49 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
@@ -66,6 +66,10 @@ public class AlertDefinitionHashTest extends TestCase {
private static final String CLUSTERNAME = "cluster1";
private static final String HOSTNAME = "c6401.ambari.apache.org";
+ private List<AlertDefinitionEntity> m_agentDefinitions;
+ private AlertDefinitionEntity m_hdfsService;
+ AlertDefinitionEntity m_hdfsHost;
+
/**
*
*/
@@ -124,6 +128,9 @@ public class AlertDefinitionHashTest extends TestCase {
expect(m_mockClusters.getCluster((String) anyObject())).andReturn(
m_mockCluster).atLeastOnce();
+ expect(m_mockClusters.getClusterById(EasyMock.anyInt())).andReturn(
+ m_mockCluster).atLeastOnce();
+
// cluster mock
expect(m_mockCluster.getClusterId()).andReturn(Long.valueOf(1)).anyTimes();
expect(m_mockCluster.getClusterName()).andReturn(CLUSTERNAME).anyTimes();
@@ -132,21 +139,21 @@ public class AlertDefinitionHashTest extends TestCase {
m_mockCluster.getServiceComponentHosts(EasyMock.anyObject(String.class))).andReturn(
serviceComponentHosts).anyTimes();
- AlertDefinitionEntity hdfsService = new AlertDefinitionEntity();
- hdfsService.setDefinitionId(1L);
- hdfsService.setClusterId(1L);
- hdfsService.setHash(UUID.randomUUID().toString());
- hdfsService.setServiceName("HDFS");
- hdfsService.setComponentName("NAMENODE");
- hdfsService.setScope(Scope.SERVICE);
-
- AlertDefinitionEntity hdfsHost = new AlertDefinitionEntity();
- hdfsHost.setDefinitionId(2L);
- hdfsHost.setClusterId(1L);
- hdfsHost.setHash(UUID.randomUUID().toString());
- hdfsHost.setServiceName("HDFS");
- hdfsHost.setComponentName("DATANODE");
- hdfsHost.setScope(Scope.HOST);
+ m_hdfsService = new AlertDefinitionEntity();
+ m_hdfsService.setDefinitionId(1L);
+ m_hdfsService.setClusterId(1L);
+ m_hdfsService.setHash(UUID.randomUUID().toString());
+ m_hdfsService.setServiceName("HDFS");
+ m_hdfsService.setComponentName("NAMENODE");
+ m_hdfsService.setScope(Scope.SERVICE);
+
+ m_hdfsHost = new AlertDefinitionEntity();
+ m_hdfsHost.setDefinitionId(2L);
+ m_hdfsHost.setClusterId(1L);
+ m_hdfsHost.setHash(UUID.randomUUID().toString());
+ m_hdfsHost.setServiceName("HDFS");
+ m_hdfsHost.setComponentName("DATANODE");
+ m_hdfsHost.setScope(Scope.HOST);
AlertDefinitionEntity agentScoped = new AlertDefinitionEntity();
agentScoped.setDefinitionId(3L);
@@ -159,15 +166,17 @@ public class AlertDefinitionHashTest extends TestCase {
EasyMock.expect(
m_mockDao.findByServiceMaster(EasyMock.anyInt(),
(Set<String>) EasyMock.anyObject())).andReturn(
- Collections.singletonList(hdfsService)).anyTimes();
+ Collections.singletonList(m_hdfsService)).anyTimes();
EasyMock.expect(
m_mockDao.findByServiceComponent(EasyMock.anyInt(),
EasyMock.anyObject(String.class), EasyMock.anyObject(String.class))).andReturn(
- Collections.singletonList(hdfsHost)).anyTimes();
+ Collections.singletonList(m_hdfsHost)).anyTimes();
+ m_agentDefinitions = new ArrayList<AlertDefinitionEntity>();
+ m_agentDefinitions.add(agentScoped);
EasyMock.expect(m_mockDao.findAgentScoped(EasyMock.anyInt())).andReturn(
- Collections.singletonList(agentScoped)).anyTimes();
+ m_agentDefinitions).anyTimes();
EasyMock.replay(m_mockClusters, m_mockCluster, m_mockDao);
m_hash = m_injector.getInstance(AlertDefinitionHash.class);
@@ -205,6 +214,73 @@ public class AlertDefinitionHashTest extends TestCase {
}
/**
+ * Test {@link AlertDefinitionHash#invalidateAll()}.
+ */
+ @Test
+ public void testInvalidateAll() {
+ String hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotNull(hash);
+
+ m_hash.invalidateAll();
+
+ String newHash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertEquals(hash, newHash);
+
+ m_hash.invalidateAll();
+
+ // add a new alert definition, forcing new hash
+ AlertDefinitionEntity agentScoped = new AlertDefinitionEntity();
+ agentScoped.setDefinitionId(System.currentTimeMillis());
+ agentScoped.setClusterId(1L);
+ agentScoped.setHash(UUID.randomUUID().toString());
+ agentScoped.setServiceName("AMBARI");
+ agentScoped.setComponentName("AMBARI_AGENT");
+ agentScoped.setScope(Scope.HOST);
+
+ m_agentDefinitions.add(agentScoped);
+
+ newHash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotSame(hash, newHash);
+ }
+
+ /**
+ * Test {@link AlertDefinitionHash#isHashCached(String)}.
+ */
+ @Test
+ public void testIsHashCached() {
+ assertFalse(m_hash.isHashCached(HOSTNAME));
+ String hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotNull(hash);
+ assertTrue(m_hash.isHashCached(HOSTNAME));
+
+ m_hash.invalidate(HOSTNAME);
+ assertFalse(m_hash.isHashCached(HOSTNAME));
+ hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotNull(hash);
+ assertTrue(m_hash.isHashCached(HOSTNAME));
+
+ m_hash.invalidateAll();
+ assertFalse(m_hash.isHashCached(HOSTNAME));
+ hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotNull(hash);
+ assertTrue(m_hash.isHashCached(HOSTNAME));
+ }
+
+ /**
+ * Test {@link AlertDefinitionHash#invalidateHosts(AlertDefinitionEntity)}.
+ */
+ @Test
+ public void testInvalidateHosts() {
+ assertFalse(m_hash.isHashCached(HOSTNAME));
+ String hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotNull(hash);
+ assertTrue(m_hash.isHashCached(HOSTNAME));
+
+ m_hash.invalidateHosts(m_hdfsHost);
+ assertFalse(m_hash.isHashCached(HOSTNAME));
+ }
+
+ /**
*
*/
private class MockModule implements Module {
[17/50] [abbrv] git commit: AMBARI-6912. Browser uses a lot of
processor and memory with opened all background operations. (akovalenko)
Posted by jo...@apache.org.
AMBARI-6912. Browser uses a lot of processor and memory with opened all background operations. (akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/26c1edca
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/26c1edca
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/26c1edca
Branch: refs/heads/branch-alerts-dev
Commit: 26c1edca8565a4db3fbd9a68d2622dff874a9dda
Parents: 6cfdd1c
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Tue Aug 19 15:30:56 2014 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Tue Aug 19 15:33:24 2014 +0300
----------------------------------------------------------------------
.../global/background_operations_controller.js | 2 +-
.../templates/common/host_progress_popup.hbs | 2 +-
ambari-web/app/utils/host_progress_popup.js | 131 +++++++++++++------
3 files changed, 96 insertions(+), 39 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/26c1edca/ambari-web/app/controllers/global/background_operations_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/global/background_operations_controller.js b/ambari-web/app/controllers/global/background_operations_controller.js
index f6cb80b..e6461d6 100644
--- a/ambari-web/app/controllers/global/background_operations_controller.js
+++ b/ambari-web/app/controllers/global/background_operations_controller.js
@@ -230,7 +230,7 @@ App.BackgroundOperationsController = Em.Controller.extend({
});
this.get("services").unshift(rq);
//To sort DESC by request id
- this.set("services", this.get("services").sort( function(a,b) { return b.get('id') - a.get('id'); })) ;
+ this.set("services", this.get("services").sort( function(a,b) { return b.get('id') - a.get('id'); }));
}
runningServices += ~~isRunning;
}, this);
http://git-wip-us.apache.org/repos/asf/ambari/blob/26c1edca/ambari-web/app/templates/common/host_progress_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/host_progress_popup.hbs b/ambari-web/app/templates/common/host_progress_popup.hbs
index d598ec7..d60dd30 100644
--- a/ambari-web/app/templates/common/host_progress_popup.hbs
+++ b/ambari-web/app/templates/common/host_progress_popup.hbs
@@ -57,7 +57,7 @@
<div class="time-summary start-time-text">{{servicesInfo.startTime}}</div>
<div class="time-summary duration-text">{{servicesInfo.duration}}</div>
<div class="progress-bar span2">
- <div {{bindAttr class="servicesInfo.isInProgress:progress-striped :active servicesInfo.barColor :progress"}}>
+ <div {{bindAttr class="servicesInfo.isInProgress:progress-striped servicesInfo.isInProgress:active servicesInfo.barColor :progress"}}>
<div class="bar" {{bindAttr style="servicesInfo.barWidth"}}></div>
</div>
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/26c1edca/ambari-web/app/utils/host_progress_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/host_progress_popup.js b/ambari-web/app/utils/host_progress_popup.js
index 1d3b184..4d27132 100644
--- a/ambari-web/app/utils/host_progress_popup.js
+++ b/ambari-web/app/utils/host_progress_popup.js
@@ -27,7 +27,7 @@ App.HostPopup = Em.Object.create({
name: 'hostPopup',
- servicesInfo: null,
+ servicesInfo: [],
hosts: null,
inputData: null,
@@ -174,7 +174,7 @@ App.HostPopup = Em.Object.create({
* clear info popup data
*/
clearHostPopup: function () {
- this.set('servicesInfo', null);
+ this.set('servicesInfo', []);
this.set('hosts', null);
this.set('inputData', null);
this.set('serviceName', "");
@@ -321,6 +321,15 @@ App.HostPopup = Em.Object.create({
}
},
+ // map to get css class with styles by service status
+ statusesStyleMap: {
+ 'FAILED': ['FAILED', 'icon-exclamation-sign', 'progress-danger', false],
+ 'ABORTED': ['ABORTED', 'icon-minus', 'progress-warning', false],
+ 'TIMEDOUT': ['TIMEDOUT', 'icon-time', 'progress-warning', false],
+ 'IN_PROGRESS': ['IN_PROGRESS', 'icon-cogs', 'progress-info', true],
+ 'COMPLETED': ['SUCCESS', 'icon-ok', 'progress-success', false]
+ },
+
/**
* Create services obj data structure for popup
* Set data for services
@@ -328,56 +337,105 @@ App.HostPopup = Em.Object.create({
*/
onServiceUpdate: function (isServiceListHidden) {
if (this.get('isBackgroundOperations') && this.get("inputData")) {
- var self = this;
- var allNewServices = [];
- var statuses = {
- 'FAILED': ['FAILED', 'icon-exclamation-sign', 'progress-danger', false],
- 'ABORTED': ['ABORTED', 'icon-minus', 'progress-warning', false],
- 'TIMEDOUT': ['TIMEDOUT', 'icon-time', 'progress-warning', false],
- 'IN_PROGRESS': ['IN_PROGRESS', 'icon-cogs', 'progress-info', true],
- 'COMPLETED': ['SUCCESS', 'icon-ok', 'progress-success', false]
- };
- var pendingStatus = ['PENDING', 'icon-cog', 'progress-info', true];
- this.set("servicesInfo", null);
- this.get("inputData").forEach(function (service) {
- var status = statuses[service.status] || pendingStatus;
+ var statuses = this.get('statusesStyleMap');
+ var servicesInfo = this.get("servicesInfo");
+ var currentServices = [];
+ this.get("inputData").forEach(function (service, index) {
+ var updatedService;
var id = service.id;
- var newService = Ember.Object.create({
- id: id,
- displayName: service.displayName,
- progress: service.progress,
- status: App.format.taskStatus(status[0]),
- isRunning: service.isRunning,
- name: service.name,
- isVisible: true,
- startTime: date.startTime(service.startTime),
- duration: date.durationSummary(service.startTime, service.endTime),
- icon: status[1],
- barColor: status[2],
- isInProgress: status[3],
- barWidth: "width:" + service.progress + "%;",
- sourceRequestScheduleId: service.get('sourceRequestScheduleId'),
- contextCommand: service.get('contextCommand')
- });
+ currentServices.push(id);
+ var existedService = servicesInfo.findProperty('id', id);
+ updatedService = existedService;
+ if (existedService) {
+ updatedService = this.updateService(existedService, service);
+ } else {
+ updatedService = this.createService(service);
+ servicesInfo.insertAt(index, updatedService);
+ }
if (App.get('supports.abortRequests')) {
var abortable = !Em.keys(statuses).contains(service.status) || service.status == 'IN_PROGRESS';
if (!abortable) {
var abortedRequests = this.get('abortedRequests');
this.set('abortedRequests', abortedRequests.without(id));
}
- newService.setProperties({
+ updatedService.setProperties({
abortable: abortable,
abortClassName: 'abort' + id
});
}
- allNewServices.push(newService);
}, this);
- self.set('servicesInfo', allNewServices);
+ this.removeOldServices(servicesInfo, currentServices);
this.setBackgroundOperationHeader(isServiceListHidden);
}
},
/**
+ * Create service object from transmitted data
+ * @param service
+ */
+ createService: function (service) {
+ var statuses = this.get('statusesStyleMap');
+ var pendingStatus = ['PENDING', 'icon-cog', 'progress-info', true];
+ var status = statuses[service.status] || pendingStatus;
+ return Ember.Object.create({
+ id: service.id,
+ displayName: service.displayName,
+ progress: service.progress,
+ status: App.format.taskStatus(status[0]),
+ isRunning: service.isRunning,
+ name: service.name,
+ isVisible: true,
+ startTime: date.startTime(service.startTime),
+ duration: date.durationSummary(service.startTime, service.endTime),
+ icon: status[1],
+ barColor: status[2],
+ isInProgress: status[3],
+ barWidth: "width:" + service.progress + "%;",
+ sourceRequestScheduleId: service.get('sourceRequestScheduleId'),
+ contextCommand: service.get('contextCommand')
+ });
+ },
+
+ /**
+ * Update properties of existed service with new data
+ * @param service
+ * @param newData
+ * @returns {Ember.Object}
+ */
+ updateService: function (service, newData) {
+ var statuses = this.get('statusesStyleMap');
+ var pendingStatus = ['PENDING', 'icon-cog', 'progress-info', true];
+ var status = statuses[newData.status] || pendingStatus;
+ return service.setProperties({
+ progress: newData.progress,
+ status: App.format.taskStatus(status[0]),
+ isRunning: newData.isRunning,
+ startTime: date.startTime(newData.startTime),
+ duration: date.durationSummary(newData.startTime, newData.endTime),
+ icon: status[1],
+ barColor: status[2],
+ isInProgress: status[3],
+ barWidth: "width:" + newData.progress + "%;",
+ sourceRequestScheduleId: newData.get('sourceRequestScheduleId'),
+ contextCommand: newData.get('contextCommand')
+ });
+ },
+
+ /**
+ * remove old requests
+ * as API returns 10, or 20 , or 30 ...etc latest request, the requests that absent in response should be removed
+ * @param services
+ * @param currentServicesIds
+ */
+ removeOldServices: function (services, currentServicesIds) {
+ services.forEach(function (service, index, services) {
+ if (!currentServicesIds.contains(service.id)) {
+ services.removeAt(index, 1);
+ }
+ });
+ },
+
+ /**
* create task Ember object
* @param {Object} _task
* @return {Em.Object}
@@ -546,7 +604,7 @@ App.HostPopup = Em.Object.create({
}
}
if (App.get('supports.abortRequests')) {
- var operation = this.get('servicesInfo') && this.get('servicesInfo').findProperty('name', this.get('serviceName'));
+ var operation = this.get('servicesInfo').findProperty('name', this.get('serviceName'));
if (!operation || (operation && operation.get('progress') == 100)) {
this.set('operationInfo', null);
} else {
@@ -561,7 +619,6 @@ App.HostPopup = Em.Object.create({
*/
createPopup: function () {
var self = this;
- var hostsInfo = this.get("hosts");
var servicesInfo = this.get("servicesInfo");
var isBackgroundOperations = this.get('isBackgroundOperations');
var categoryObject = Em.Object.extend({
[40/50] [abbrv] git commit: AMBARI-6936. Download Client Configs for
MapReduce2 download Yarn configs.(vbrodetskyi)
Posted by jo...@apache.org.
AMBARI-6936. Download Client Configs for MapReduce2 download Yarn configs.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ed09f6a2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ed09f6a2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ed09f6a2
Branch: refs/heads/branch-alerts-dev
Commit: ed09f6a223e837212f6d44d75a204f133c91ab6d
Parents: 7581b9a
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Wed Aug 20 15:47:34 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Wed Aug 20 15:47:49 2014 +0300
----------------------------------------------------------------------
.../stacks/HDP/2.0.6/services/YARN/metainfo.xml | 13 ++++---------
1 file changed, 4 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ed09f6a2/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
index 8bb8dd7..c2e995d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
@@ -177,8 +177,8 @@
<configFiles>
<configFile>
<type>xml</type>
- <fileName>yarn-site.xml</fileName>
- <dictionaryName>yarn-site</dictionaryName>
+ <fileName>mapred-site.xml</fileName>
+ <dictionaryName>mapred-site</dictionaryName>
</configFile>
<configFile>
<type>xml</type>
@@ -187,14 +187,9 @@
</configFile>
<configFile>
<type>env</type>
- <fileName>yarn-env.sh</fileName>
- <dictionaryName>yarn-env</dictionaryName>
+ <fileName>mapred-env.sh</fileName>
+ <dictionaryName>mapred-env</dictionaryName>
</configFile>
- <configFile>
- <type>env</type>
- <fileName>yarn-log4j.properties</fileName>
- <dictionaryName>yarn-log4j</dictionaryName>
- </configFile>
</configFiles>
</component>
</components>
[35/50] [abbrv] git commit: AMBARI-6932. Security wizard: HDFS user
name should not be shown in standalone storm deployment. (jaimin)
Posted by jo...@apache.org.
AMBARI-6932. Security wizard: HDFS user name should not be shown in standalone storm deployment. (jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/068cafa9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/068cafa9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/068cafa9
Branch: refs/heads/branch-alerts-dev
Commit: 068cafa9056c490d8abdd4e9bd9322b6f71ca2f3
Parents: 356e17a
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Tue Aug 19 19:11:52 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Tue Aug 19 19:11:52 2014 -0700
----------------------------------------------------------------------
.../main/admin/security/add/step2.js | 21 ++++++++++----------
ambari-web/app/data/HDP2/secure_properties.js | 4 ++--
2 files changed, 13 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/068cafa9/ambari-web/app/controllers/main/admin/security/add/step2.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/security/add/step2.js b/ambari-web/app/controllers/main/admin/security/add/step2.js
index 4b28a30..857099f 100644
--- a/ambari-web/app/controllers/main/admin/security/add/step2.js
+++ b/ambari-web/app/controllers/main/admin/security/add/step2.js
@@ -343,18 +343,19 @@ App.MainAdminSecurityAddStep2Controller = Em.Controller.extend({
*/
addUserPrincipals: function (serviceConfigs, securityUsers) {
var generalService = serviceConfigs.findProperty('serviceName', 'GENERAL').configs;
- var isHbaseService = serviceConfigs.someProperty('serviceName', 'HBASE');
- var hbaseUserPrincipal = generalService.findProperty('name', 'hbase_principal_name');
- var hbaseUserKeytab = generalService.findProperty('name', 'hbase_user_keytab');
- var hbaseUser = securityUsers.findProperty('name', 'hbase_user');
-
this.setUserPrincipalValue(securityUsers.findProperty('name', 'smokeuser'), generalService.findProperty('name', 'smokeuser_principal_name'));
- this.setUserPrincipalValue(securityUsers.findProperty('name', 'hdfs_user'), generalService.findProperty('name', 'hdfs_principal_name'));
+ var servicesWithUserPrincipals = ['HDFS','HBASE'];
- if (isHbaseService && this.setUserPrincipalValue(hbaseUser, hbaseUserPrincipal)) {
- hbaseUserPrincipal.isVisible = true;
- hbaseUserKeytab.isVisible = true;
- }
+ servicesWithUserPrincipals.forEach(function(serviceName){
+ var isServiceInstalled = serviceConfigs.someProperty('serviceName', serviceName);
+ var userPricipal = generalService.findProperty('name', serviceName.toLowerCase() + '_principal_name');
+ var userKeytab = generalService.findProperty('name', serviceName.toLowerCase() + '_user_keytab');
+ var userName = securityUsers.findProperty('name', serviceName.toLowerCase() + '_user');
+ if (isServiceInstalled && this.setUserPrincipalValue(userName, userPricipal)) {
+ userPricipal.isVisible = true;
+ userKeytab.isVisible = true;
+ }
+ },this);
},
/**
* set default value of user principal
http://git-wip-us.apache.org/repos/asf/ambari/blob/068cafa9/ambari-web/app/data/HDP2/secure_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/secure_properties.js b/ambari-web/app/data/HDP2/secure_properties.js
index 93555b4..6904d6e 100644
--- a/ambari-web/app/data/HDP2/secure_properties.js
+++ b/ambari-web/app/data/HDP2/secure_properties.js
@@ -119,7 +119,7 @@ module.exports =
"defaultValue": "hdfs",
"description": "This is the principal name for HDFS user",
"displayType": "principal",
- "isVisible": true,
+ "isVisible": false,
"isOverridable": false,
"isReconfigurable": false,
"serviceName": "GENERAL",
@@ -133,7 +133,7 @@ module.exports =
"defaultValue": "/etc/security/keytabs/hdfs.headless.keytab",
"description": "Path to keytab file for HDFS user",
"displayType": "directory",
- "isVisible": true,
+ "isVisible": false,
"isOverridable": false,
"serviceName": "GENERAL",
"filename": "hadoop-env.xml",
[50/50] [abbrv] git commit: AMBARI-6924 - Alerts: Updating an
AlertDefinition With Partial Property Maps (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6924 - Alerts: Updating an AlertDefinition With Partial Property Maps (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/31f9ff83
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/31f9ff83
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/31f9ff83
Branch: refs/heads/branch-alerts-dev
Commit: 31f9ff8366823d3978535b6dbbd90a0f23a5c498
Parents: 0ac9cb3
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Aug 20 11:03:03 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 20 11:03:03 2014 -0400
----------------------------------------------------------------------
.../AlertDefinitionResourceProvider.java | 343 ++++++++++---------
.../AlertDefinitionResourceProviderTest.java | 31 +-
2 files changed, 212 insertions(+), 162 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/31f9ff83/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index bed25e7..1417bdb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -50,6 +50,7 @@ import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.state.alert.Scope;
import org.apache.ambari.server.state.alert.SourceType;
+import org.apache.commons.lang.StringUtils;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
@@ -136,7 +137,9 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
String clusterName = null;
for (Map<String, Object> requestMap : requestMaps) {
- entities.add(toCreateEntity(requestMap));
+ AlertDefinitionEntity entity = new AlertDefinitionEntity();
+ populateEntity(entity, requestMap);
+ entities.add(entity);
if (null == clusterName) {
clusterName = (String) requestMap.get(ALERT_DEF_CLUSTER_NAME);
@@ -155,122 +158,6 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
enqueueAgentCommands(clusterName, invalidatedHosts);
}
- private AlertDefinitionEntity toCreateEntity(Map<String, Object> requestMap)
- throws AmbariException {
-
- String clusterName = (String) requestMap.get(ALERT_DEF_CLUSTER_NAME);
-
- if (null == clusterName || clusterName.isEmpty()) {
- throw new IllegalArgumentException("Invalid argument, cluster name is required");
- }
-
- if (!requestMap.containsKey(ALERT_DEF_INTERVAL)) {
- throw new IllegalArgumentException("Check interval must be specified");
- }
-
- Integer interval = Integer.valueOf((String) requestMap.get(ALERT_DEF_INTERVAL));
-
- if (!requestMap.containsKey(ALERT_DEF_NAME)) {
- throw new IllegalArgumentException("Definition name must be specified");
- }
-
- if (!requestMap.containsKey(ALERT_DEF_SERVICE_NAME)) {
- throw new IllegalArgumentException("Service name must be specified");
- }
-
- if (!requestMap.containsKey(ALERT_DEF_SOURCE_TYPE)) {
- throw new IllegalArgumentException(String.format(
- "Source type must be specified and one of %s", EnumSet.allOf(
- SourceType.class)));
- }
-
- // !!! Alert structures contain nested objects; reconstruct a valid
- // JSON from the flat, exploded properties so that a Source instance can
- // be properly persisted
- JsonObject source = new JsonObject();
- JsonObject reporting = new JsonObject();
- JsonObject reportingOk = new JsonObject();
- JsonObject reportingWarning = new JsonObject();
- JsonObject reportingCritical = new JsonObject();
-
- for (Entry<String, Object> entry : requestMap.entrySet()) {
- String propCat = PropertyHelper.getPropertyCategory(entry.getKey());
- String propName = PropertyHelper.getPropertyName(entry.getKey());
-
- if (propCat.equals(ALERT_DEF) && "source".equals(propName)) {
- source.addProperty(propName, entry.getValue().toString());
- }
-
- if (propCat.equals(ALERT_DEF_SOURCE)) {
- source.addProperty(propName, entry.getValue().toString());
- }
-
- if (propCat.equals(ALERT_DEF_SOURCE_REPORTING)) {
- reporting.addProperty(propName, entry.getValue().toString());
- }
-
- if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_OK)) {
- reportingOk.addProperty(propName, entry.getValue().toString());
- }
-
- if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_WARNING)) {
- reportingWarning.addProperty(propName, entry.getValue().toString());
- }
-
- if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_CRITICAL)) {
- reportingCritical.addProperty(propName, entry.getValue().toString());
- }
- }
-
- if (0 == source.entrySet().size()) {
- throw new IllegalArgumentException("Source must be specified");
- }
-
- if (reportingOk.entrySet().size() > 0) {
- reporting.add("ok", reportingOk);
- }
-
- if (reportingWarning.entrySet().size() > 0) {
- reporting.add("warning", reportingWarning);
- }
-
- if (reportingCritical.entrySet().size() > 0) {
- reporting.add("critical", reportingCritical);
- }
-
- if (reporting.entrySet().size() > 0) {
- source.add("reporting", reporting);
- }
-
- Cluster cluster = getManagementController().getClusters().getCluster(clusterName);
-
- AlertDefinitionEntity entity = new AlertDefinitionEntity();
- entity.setClusterId(Long.valueOf(cluster.getClusterId()));
- entity.setComponentName((String) requestMap.get(ALERT_DEF_COMPONENT_NAME));
- entity.setDefinitionName((String) requestMap.get(ALERT_DEF_NAME));
- entity.setLabel((String) requestMap.get(ALERT_DEF_LABEL));
-
- boolean enabled = requestMap.containsKey(ALERT_DEF_ENABLED) ?
- Boolean.parseBoolean((String)requestMap.get(ALERT_DEF_ENABLED)) : true;
-
- entity.setEnabled(enabled);
- entity.setHash(UUID.randomUUID().toString());
- entity.setScheduleInterval(interval);
- entity.setServiceName((String) requestMap.get(ALERT_DEF_SERVICE_NAME));
- entity.setSourceType((String) requestMap.get(ALERT_DEF_SOURCE_TYPE));
- entity.setSource(source.toString());
-
- Scope scope = null;
- String desiredScope = (String) requestMap.get(ALERT_DEF_SCOPE);
- if (null != desiredScope && desiredScope.length() > 0) {
- scope = Scope.valueOf(desiredScope);
- }
-
- entity.setScope(scope);
-
- return entity;
- }
-
@Override
public Set<Resource> getResources(Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException,
@@ -343,51 +230,14 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
}
}
- if (propertyMap.containsKey(ALERT_DEF_NAME)) {
- entity.setDefinitionName((String) propertyMap.get(ALERT_DEF_NAME));
- }
-
- if (propertyMap.containsKey(ALERT_DEF_ENABLED)) {
- entity.setEnabled(Boolean.parseBoolean(
- (String) propertyMap.get(ALERT_DEF_ENABLED)));
- }
-
- if (propertyMap.containsKey(ALERT_DEF_INTERVAL)) {
- entity.setScheduleInterval(Integer.valueOf(
- (String) propertyMap.get(ALERT_DEF_INTERVAL)));
- }
-
- if (propertyMap.containsKey(ALERT_DEF_SCOPE)){
- Scope scope = null;
- String desiredScope = (String) propertyMap.get(ALERT_DEF_SCOPE);
-
- if (null != desiredScope && desiredScope.length() > 0) {
- scope = Scope.valueOf((desiredScope));
- }
-
- entity.setScope(scope);
+ try{
+ populateEntity(entity, propertyMap);
+ alertDefinitionDAO.merge(entity);
+ invalidatedHosts.addAll(alertDefinitionHash.invalidateHosts(entity));
}
-
-
- if (propertyMap.containsKey(ALERT_DEF_SOURCE_TYPE)) {
- entity.setSourceType((String) propertyMap.get(ALERT_DEF_SOURCE_TYPE));
+ catch( AmbariException ae ){
+ LOG.error("Unable to find cluster when updating alert definition", ae);
}
-
- JsonObject jsonObj = new JsonObject();
-
- for (Entry<String, Object> entry : propertyMap.entrySet()) {
- String propCat = PropertyHelper.getPropertyCategory(entry.getKey());
- String propName = PropertyHelper.getPropertyName(entry.getKey());
-
- if (propCat.equals(ALERT_DEF_SOURCE)) {
- jsonObj.addProperty(propName, entry.getValue().toString());
- }
- }
-
- entity.setHash(UUID.randomUUID().toString());
-
- alertDefinitionDAO.merge(entity);
- invalidatedHosts.addAll(alertDefinitionHash.invalidateHosts(entity));
}
}
@@ -440,7 +290,180 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
notifyDelete(Resource.Type.AlertDefinition, predicate);
return getRequestStatus(null);
+ }
+ /**
+ * Merges the map of properties into the specified entity. If the entity is
+ * being created, an {@link IllegalArgumentException} is thrown when a
+ * required property is absent. When updating, missing properties are assume
+ * to not have changed.
+ *
+ * @param entity
+ * the entity to merge the properties into (not {@code null}).
+ * @param requestMap
+ * the map of properties (not {@code null}).
+ * @throws AmbariException
+ */
+ private void populateEntity(AlertDefinitionEntity entity,
+ Map<String, Object> requestMap) throws AmbariException {
+
+ // some fields are required on creation; on update we keep what's there
+ boolean bCreate = true;
+ if (null != entity.getDefinitionId()) {
+ bCreate = false;
+ }
+
+ String clusterName = (String) requestMap.get(ALERT_DEF_CLUSTER_NAME);
+ String definitionName = (String) requestMap.get(ALERT_DEF_NAME);
+ String serviceName = (String) requestMap.get(ALERT_DEF_SERVICE_NAME);
+ String componentName = (String) requestMap.get(ALERT_DEF_COMPONENT_NAME);
+ String sourceType = (String) requestMap.get(ALERT_DEF_SOURCE_TYPE);
+ String label = (String) requestMap.get(ALERT_DEF_LABEL);
+ String desiredScope = (String) requestMap.get(ALERT_DEF_SCOPE);
+
+ Integer interval = null;
+ if (requestMap.containsKey(ALERT_DEF_INTERVAL)) {
+ interval = Integer.valueOf((String) requestMap.get(ALERT_DEF_INTERVAL));
+ }
+
+ Boolean enabled = null;
+ if (requestMap.containsKey(ALERT_DEF_ENABLED)) {
+ enabled = Boolean.parseBoolean((String) requestMap.get(ALERT_DEF_ENABLED));
+ } else if (bCreate) {
+ enabled = Boolean.TRUE;
+ }
+
+ Scope scope = null;
+ if (null != desiredScope && desiredScope.length() > 0) {
+ scope = Scope.valueOf(desiredScope);
+ }
+
+ if (StringUtils.isEmpty(clusterName)) {
+ throw new IllegalArgumentException(
+ "Invalid argument, cluster name is required");
+ }
+
+ if (bCreate && !requestMap.containsKey(ALERT_DEF_INTERVAL)) {
+ throw new IllegalArgumentException("Check interval must be specified");
+ }
+
+ if (bCreate && StringUtils.isEmpty(definitionName)) {
+ throw new IllegalArgumentException("Definition name must be specified");
+ }
+
+ if (bCreate && StringUtils.isEmpty(serviceName)) {
+ throw new IllegalArgumentException("Service name must be specified");
+ }
+
+ if (bCreate && StringUtils.isEmpty(sourceType)) {
+ throw new IllegalArgumentException(String.format(
+ "Source type must be specified and one of %s",
+ EnumSet.allOf(SourceType.class)));
+ }
+
+ // !!! Alert structures contain nested objects; reconstruct a valid
+ // JSON from the flat, exploded properties so that a Source instance can
+ // be properly persisted
+ JsonObject source = new JsonObject();
+ JsonObject reporting = new JsonObject();
+ JsonObject reportingOk = new JsonObject();
+ JsonObject reportingWarning = new JsonObject();
+ JsonObject reportingCritical = new JsonObject();
+
+ for (Entry<String, Object> entry : requestMap.entrySet()) {
+ String propCat = PropertyHelper.getPropertyCategory(entry.getKey());
+ String propName = PropertyHelper.getPropertyName(entry.getKey());
+
+ if (propCat.equals(ALERT_DEF) && "source".equals(propName)) {
+ source.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE)) {
+ source.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING)) {
+ reporting.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_OK)) {
+ reportingOk.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_WARNING)) {
+ reportingWarning.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_CRITICAL)) {
+ reportingCritical.addProperty(propName, entry.getValue().toString());
+ }
+ }
+
+ if (reportingOk.entrySet().size() > 0) {
+ reporting.add("ok", reportingOk);
+ }
+
+ if (reportingWarning.entrySet().size() > 0) {
+ reporting.add("warning", reportingWarning);
+ }
+
+ if (reportingCritical.entrySet().size() > 0) {
+ reporting.add("critical", reportingCritical);
+ }
+
+ if (reporting.entrySet().size() > 0) {
+ source.add("reporting", reporting);
+ }
+
+ if (bCreate && 0 == source.entrySet().size()) {
+ throw new IllegalArgumentException("Source must be specified");
+ }
+
+ Cluster cluster = getManagementController().getClusters().getCluster(
+ clusterName);
+
+ // at this point, we have either validated all required properties or
+ // we are using the exiting entity properties where not defined, so we
+ // can do simply null checks
+ entity.setClusterId(Long.valueOf(cluster.getClusterId()));
+
+ if (null != componentName) {
+ entity.setComponentName(componentName);
+ }
+
+ if (null != definitionName) {
+ entity.setDefinitionName(definitionName);
+ }
+
+ if (null != label) {
+ entity.setLabel(label);
+ }
+
+ if (null != enabled) {
+ entity.setEnabled(enabled);
+ }
+
+ if (null != interval) {
+ entity.setScheduleInterval(interval);
+ }
+
+ if (null != serviceName) {
+ entity.setServiceName(serviceName);
+ }
+
+ if (null != sourceType) {
+ entity.setSourceType(sourceType);
+ }
+
+ if (null != source) {
+ entity.setSource(source.toString());
+ }
+
+ if (null != scope) {
+ entity.setScope(scope);
+ }
+
+ entity.setHash(UUID.randomUUID().toString());
}
private Resource toResource(boolean isCollection, String clusterName,
http://git-wip-us.apache.org/repos/asf/ambari/blob/31f9ff83/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
index 7df999e..61ceb7b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
@@ -289,6 +289,8 @@ public class AlertDefinitionResourceProviderTest {
*/
@Test
public void testUpdateResources() throws Exception {
+ Gson gson = m_factory.getGson();
+
AmbariManagementController amc = createMock(AmbariManagementController.class);
Clusters clusters = createMock(Clusters.class);
Cluster cluster = createMock(Cluster.class);
@@ -309,13 +311,23 @@ public class AlertDefinitionResourceProviderTest {
replay(amc, clusters, cluster, dao, definitionHash);
+ Source source = getMockSource();
+ String sourceString = gson.toJson(source);
+
Map<String, Object> requestProps = new HashMap<String, Object>();
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def");
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_LABEL, "Label");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME, "HDFS");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE, "METRIC");
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE,
+ sourceString);
+
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_ENABLED,
+ Boolean.TRUE.toString());
+
Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
AlertDefinitionResourceProvider provider = createProvider(amc);
@@ -335,6 +347,9 @@ public class AlertDefinitionResourceProviderTest {
String oldName = entity.getDefinitionName();
String oldHash = entity.getHash();
+ Integer oldInterval = entity.getScheduleInterval();
+ boolean oldEnabled = entity.getEnabled();
+ String oldSource = entity.getSource();
resetToStrict(dao);
expect(dao.findById(1L)).andReturn(entity).anyTimes();
@@ -344,16 +359,28 @@ public class AlertDefinitionResourceProviderTest {
requestProps = new HashMap<String, Object>();
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_ID, "1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
- requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "1");
- requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def1");
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "2");
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def2");
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_LABEL, "Label 2");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME, "HDFS");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE, "METRIC");
+
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE,
+ sourceString.replaceAll("CPU", "CPU2"));
+
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_ENABLED,
+ Boolean.FALSE.toString());
+
request = PropertyHelper.getUpdateRequest(requestProps, null);
provider.updateResources(request, p);
Assert.assertFalse(oldHash.equals(entity.getHash()));
Assert.assertFalse(oldName.equals(entity.getDefinitionName()));
+ Assert.assertFalse(oldInterval.equals(entity.getScheduleInterval()));
+ Assert.assertFalse(oldEnabled == entity.getEnabled());
+ Assert.assertFalse(oldSource.equals(entity.getSource()));
+ Assert.assertTrue(entity.getSource().contains("CPU2"));
verify(amc, clusters, cluster, dao);
}
[10/50] [abbrv] git commit: AMBARI-6901. Flume agents are absent
after configuring flume.conf and restart. (jaimin)
Posted by jo...@apache.org.
AMBARI-6901. Flume agents are absent after configuring flume.conf and restart. (jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e7d864ca
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e7d864ca
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e7d864ca
Branch: refs/heads/branch-alerts-dev
Commit: e7d864ca0f0eed4f7011e34131ec6f7f7756d377
Parents: 1a9abc4
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Aug 18 13:21:45 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Aug 18 13:23:22 2014 -0700
----------------------------------------------------------------------
.../services/FLUME/configuration/flume-conf.xml | 2 +-
ambari-web/app/data/HDP2/site_properties.js | 24 ++++++++++----------
2 files changed, 13 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d864ca/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-conf.xml
index 838491d..74a4c15 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-conf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/configuration/flume-conf.xml
@@ -22,7 +22,7 @@
<configuration supports_final="false">
<property>
- <name>flume-content</name>
+ <name>content</name>
<description>Describe all the Flume agent configurations</description>
<value>
# Flume agent config
http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d864ca/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index edadbc8..169840f 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -1640,7 +1640,7 @@ module.exports =
/********************************************* flume-agent *****************************/
{
"id": "site property",
- "name": "flume-content",
+ "name": "content",
"displayName": "content",
"showLabel": false,
"isRequired": false,
@@ -1966,17 +1966,6 @@ module.exports =
},
{
"id": "puppet var",
- "name": "min_user_id",
- "displayName": "Minimum user ID for submitting job",
- "isOverridable": true,
- "displayType": "int",
- "isVisible": true,
- "serviceName": "YARN",
- "filename": "yarn-env.xml",
- "category": "Advanced"
- },
- {
- "id": "puppet var",
"name": "yarn_pid_dir_prefix",
"displayName": "YARN PID Dir Prefix",
"description": "",
@@ -1991,6 +1980,17 @@ module.exports =
},
{
"id": "puppet var",
+ "name": "min_user_id",
+ "displayName": "Minimum user ID for submitting job",
+ "isOverridable": true,
+ "displayType": "int",
+ "isVisible": true,
+ "serviceName": "YARN",
+ "filename": "yarn-env.xml",
+ "category": "Advanced yarn-env"
+ },
+ {
+ "id": "puppet var",
"name": "ats_host",
"displayName": "App Timeline Server",
"description": "Application Timeline Server Host",
[44/50] [abbrv] AMBARI-6880 - Alerts: Send Definitions Down Via
Commands to the Agent (jonathanhurley)
Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/8e481286/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
index c8fce49..1cb6eac 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.server.state.alerts;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.expect;
+import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -36,11 +37,14 @@ import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.state.alert.Scope;
+import org.apache.commons.codec.binary.Hex;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
@@ -131,6 +135,12 @@ public class AlertDefinitionHashTest extends TestCase {
expect(m_mockClusters.getClusterById(EasyMock.anyInt())).andReturn(
m_mockCluster).atLeastOnce();
+ Map<String, Host> clusterHosts = new HashMap<String, Host>();
+ clusterHosts.put(HOSTNAME, null);
+
+ expect(m_mockClusters.getHostsForCluster(EasyMock.eq(CLUSTERNAME))).andReturn(
+ clusterHosts).anyTimes();
+
// cluster mock
expect(m_mockCluster.getClusterId()).andReturn(Long.valueOf(1)).anyTimes();
expect(m_mockCluster.getClusterName()).andReturn(CLUSTERNAME).anyTimes();
@@ -146,6 +156,7 @@ public class AlertDefinitionHashTest extends TestCase {
m_hdfsService.setServiceName("HDFS");
m_hdfsService.setComponentName("NAMENODE");
m_hdfsService.setScope(Scope.SERVICE);
+ m_hdfsService.setScheduleInterval(1);
m_hdfsHost = new AlertDefinitionEntity();
m_hdfsHost.setDefinitionId(2L);
@@ -154,6 +165,7 @@ public class AlertDefinitionHashTest extends TestCase {
m_hdfsHost.setServiceName("HDFS");
m_hdfsHost.setComponentName("DATANODE");
m_hdfsHost.setScope(Scope.HOST);
+ m_hdfsHost.setScheduleInterval(1);
AlertDefinitionEntity agentScoped = new AlertDefinitionEntity();
agentScoped.setDefinitionId(3L);
@@ -162,6 +174,7 @@ public class AlertDefinitionHashTest extends TestCase {
agentScoped.setServiceName("AMBARI");
agentScoped.setComponentName("AMBARI_AGENT");
agentScoped.setScope(Scope.HOST);
+ agentScoped.setScheduleInterval(1);
EasyMock.expect(
m_mockDao.findByServiceMaster(EasyMock.anyInt(),
@@ -207,7 +220,7 @@ public class AlertDefinitionHashTest extends TestCase {
*/
@Test
public void testGetAlertDefinitions() {
- Set<AlertDefinitionEntity> definitions = m_hash.getAlertDefinitions(
+ List<AlertDefinition> definitions = m_hash.getAlertDefinitions(
CLUSTERNAME, HOSTNAME);
assertEquals(3, definitions.size());
@@ -236,6 +249,7 @@ public class AlertDefinitionHashTest extends TestCase {
agentScoped.setServiceName("AMBARI");
agentScoped.setComponentName("AMBARI_AGENT");
agentScoped.setScope(Scope.HOST);
+ agentScoped.setScheduleInterval(1);
m_agentDefinitions.add(agentScoped);
@@ -248,22 +262,22 @@ public class AlertDefinitionHashTest extends TestCase {
*/
@Test
public void testIsHashCached() {
- assertFalse(m_hash.isHashCached(HOSTNAME));
+ assertFalse(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
String hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
assertNotNull(hash);
- assertTrue(m_hash.isHashCached(HOSTNAME));
+ assertTrue(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
m_hash.invalidate(HOSTNAME);
- assertFalse(m_hash.isHashCached(HOSTNAME));
+ assertFalse(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
assertNotNull(hash);
- assertTrue(m_hash.isHashCached(HOSTNAME));
+ assertTrue(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
m_hash.invalidateAll();
- assertFalse(m_hash.isHashCached(HOSTNAME));
+ assertFalse(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
assertNotNull(hash);
- assertTrue(m_hash.isHashCached(HOSTNAME));
+ assertTrue(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
}
/**
@@ -271,13 +285,63 @@ public class AlertDefinitionHashTest extends TestCase {
*/
@Test
public void testInvalidateHosts() {
- assertFalse(m_hash.isHashCached(HOSTNAME));
+ assertFalse(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
+ String hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotNull(hash);
+ assertTrue(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
+
+ Set<String> invalidatedHosts = m_hash.invalidateHosts(m_hdfsHost);
+ assertFalse(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
+ assertNotNull(invalidatedHosts);
+ assertEquals(1, invalidatedHosts.size());
+ assertTrue(invalidatedHosts.contains(HOSTNAME));
+ }
+
+ /**
+ *
+ */
+ @Test
+ public void testInvalidateHost() {
+ assertFalse(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
+ assertFalse(m_hash.isHashCached("foo", HOSTNAME));
+
String hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
assertNotNull(hash);
- assertTrue(m_hash.isHashCached(HOSTNAME));
+ assertTrue(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
+ assertFalse(m_hash.isHashCached("foo", HOSTNAME));
+
+ // invalidate the fake cluster and ensure the original cluster still
+ // contains a cached valie
+ m_hash.invalidate("foo", HOSTNAME);
+ assertTrue(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
+ assertFalse(m_hash.isHashCached("foo", HOSTNAME));
+
+ m_hash.invalidateAll();
+ assertFalse(m_hash.isHashCached(CLUSTERNAME, HOSTNAME));
+ assertFalse(m_hash.isHashCached("foo", HOSTNAME));
+ }
+
+ @Test
+ public void testHashingAlgorithm() throws Exception {
+ List<String> uuids = new ArrayList<String>();
+ uuids.add(m_hdfsService.getHash());
+ uuids.add(m_hdfsHost.getHash());
+
+ for (AlertDefinitionEntity entity : m_agentDefinitions) {
+ uuids.add(entity.getHash());
+ }
+
+ Collections.sort(uuids);
+
+ MessageDigest digest = MessageDigest.getInstance("MD5");
+ for (String uuid : uuids) {
+ digest.update(uuid.getBytes());
+ }
+
+ byte[] hashBytes = digest.digest();
+ String expected = Hex.encodeHexString(hashBytes);
- m_hash.invalidateHosts(m_hdfsHost);
- assertFalse(m_hash.isHashCached(HOSTNAME));
+ assertEquals(expected, m_hash.getHash(CLUSTERNAME, HOSTNAME));
}
/**
[06/50] [abbrv] git commit: AMBARI-6892. YARN unit-tests contains
un-existed "mapred-queue-acls.xml" test resource (aonishuk)
Posted by jo...@apache.org.
AMBARI-6892. YARN unit-tests contains un-existed "mapred-queue-acls.xml" test resource (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eefa2fd5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eefa2fd5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eefa2fd5
Branch: refs/heads/branch-alerts-dev
Commit: eefa2fd57ba0e747f9b894bacc25084633395564
Parents: 947899e
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Aug 18 20:09:46 2014 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Aug 18 20:09:46 2014 +0300
----------------------------------------------------------------------
.../test/python/stacks/2.0.6/YARN/test_historyserver.py | 8 --------
.../python/stacks/2.0.6/YARN/test_mapreduce2_client.py | 8 --------
.../test/python/stacks/2.0.6/YARN/test_nodemanager.py | 8 --------
.../python/stacks/2.0.6/YARN/test_resourcemanager.py | 8 --------
.../test/python/stacks/2.0.6/YARN/test_yarn_client.py | 12 ------------
.../python/stacks/2.1/YARN/test_apptimelineserver.py | 4 ----
6 files changed, 48 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/eefa2fd5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 6f12b60..86e3353 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -285,10 +285,6 @@ class TestHistoryServer(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -486,10 +482,6 @@ class TestHistoryServer(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/eefa2fd5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index e28fa45..4218a59 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -144,10 +144,6 @@ class TestMapReduce2Client(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -292,10 +288,6 @@ class TestMapReduce2Client(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/eefa2fd5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 0e621a2..597324a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -285,10 +285,6 @@ class TestNodeManager(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -486,10 +482,6 @@ class TestNodeManager(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/eefa2fd5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index 66551c1..8410b56 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -229,10 +229,6 @@ class TestResourceManager(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -375,10 +371,6 @@ class TestResourceManager(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/eefa2fd5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index 9944cc3..032f334 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -145,10 +145,6 @@ class TestYarnClient(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -293,10 +289,6 @@ class TestYarnClient(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -436,10 +428,6 @@ class TestYarnClient(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/eefa2fd5/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index 4de15c1..7dbbd9a 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -184,10 +184,6 @@ class TestAppTimelineServer(RMFTestCase):
configurations = self.getConfig()['configurations']['mapred-site'],
configuration_attributes = self.getConfig()['configuration_attributes']['mapred-site']
)
- self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
- owner = 'mapred',
- group = 'hadoop',
- )
self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
owner = 'hdfs',
group = 'hadoop',
[48/50] [abbrv] git commit: AMBARI-6890 - Alerts: Send Empty Alert
Definition With Hash To Agents (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6890 - Alerts: Send Empty Alert Definition With Hash To Agents (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/72ebd263
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/72ebd263
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/72ebd263
Branch: refs/heads/branch-alerts-dev
Commit: 72ebd2633bb10b611978cedf81dc34eeb3c9f4eb
Parents: 14e79ed
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Aug 18 10:03:00 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 20 10:52:41 2014 -0400
----------------------------------------------------------------------
.../ambari/server/agent/AgentCommand.java | 24 +++-
.../ambari/server/agent/CancelCommand.java | 5 +-
.../ambari/server/agent/ExecutionCommand.java | 109 ++++++++--------
.../ambari/server/agent/HeartBeatResponse.java | 42 +++---
.../ambari/server/agent/NagiosAlertCommand.java | 11 +-
.../ambari/server/agent/StatusCommand.java | 43 +++----
.../AlertDefinitionResourceProvider.java | 8 +-
.../server/orm/entities/AlertHistoryEntity.java | 55 ++++----
.../apache/ambari/server/utils/StageUtils.java | 54 ++++----
.../stacks/HDP/2.0.6/services/HDFS/alerts.json | 58 +++++++++
.../server/api/services/AmbariMetaInfoTest.java | 2 -
.../AmbariManagementControllerTest.java | 21 +--
.../AlertDefinitionResourceProviderTest.java | 2 +
.../ambari/server/utils/TestStageUtils.java | 127 ++++++++++---------
.../stacks/HDP/2.0.5/services/HDFS/alerts.json | 51 ++++++++
15 files changed, 374 insertions(+), 238 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
index 29805a1..6e8aab1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/AgentCommand.java
@@ -17,16 +17,34 @@
*/
package org.apache.ambari.server.agent;
+import com.google.gson.Gson;
+
+/**
+ * The base class for all agent commands. Concrete implementations are
+ * serialized by Gson ({@link Gson}) and should be annotated with Gson
+ * annotations (not Jackson).
+ */
public abstract class AgentCommand {
private AgentCommandType commandType;
+ /**
+ * Constructor. Although not required for Gson, it's a good idea to have it so
+ * that we don't need to worry about unsafe object construction that bypasses
+ * the constructors.
+ * <p/>
+ * Subclasses should always use {@link #AgentCommand(AgentCommandType)}
+ */
public AgentCommand() {
- this.commandType = AgentCommandType.STATUS_COMMAND;
+ commandType = AgentCommandType.STATUS_COMMAND;
}
+ /**
+ * Constructor. Must be invoked by all concrete subsclasses to properly set
+ * the type.
+ */
public AgentCommand(AgentCommandType type) {
- this.commandType = type;
+ commandType = type;
}
public enum AgentCommandType {
@@ -41,7 +59,7 @@ public abstract class AgentCommand {
public AgentCommandType getCommandType() {
return commandType;
}
-
+
public void setCommandType(AgentCommandType commandType) {
this.commandType = commandType;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/agent/CancelCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/CancelCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/CancelCommand.java
index 55de9ea..7aa24c8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/CancelCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/CancelCommand.java
@@ -18,10 +18,6 @@
package org.apache.ambari.server.agent;
import com.google.gson.annotations.SerializedName;
-import org.codehaus.jackson.annotate.JsonProperty;
-
-import java.util.HashMap;
-import java.util.Map;
/**
* Command to report the status of a list of services in roles.
@@ -35,6 +31,7 @@ public class CancelCommand extends AgentCommand {
@SerializedName("target_task_id")
private long targetTaskId;
+ @SerializedName("reason")
private String reason;
public long getTargetTaskId() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index fdf96df..0600f27 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -22,12 +22,12 @@ import java.util.HashSet;
import java.util.Map;
import java.util.Set;
-import com.google.gson.annotations.SerializedName;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.codehaus.jackson.annotate.JsonProperty;
+
+import com.google.gson.annotations.SerializedName;
/**
@@ -35,66 +35,95 @@ import org.codehaus.jackson.annotate.JsonProperty;
* persisted in the database for recovery.
*/
public class ExecutionCommand extends AgentCommand {
-
+
private static Log LOG = LogFactory.getLog(ExecutionCommand.class);
-
+
public ExecutionCommand() {
super(AgentCommandType.EXECUTION_COMMAND);
}
+ @SerializedName("clusterName")
private String clusterName;
+
+ @SerializedName("taskId")
private long taskId;
+
+ @SerializedName("commandId")
private String commandId;
+
+ @SerializedName("hostname")
private String hostname;
+
+ @SerializedName("role")
private String role;
+
+ @SerializedName("hostLevelParams")
private Map<String, String> hostLevelParams = new HashMap<String, String>();
+
+ @SerializedName("roleParams")
private Map<String, String> roleParams = null;
+
+ @SerializedName("roleCommand")
private RoleCommand roleCommand;
- private Map<String, Set<String>> clusterHostInfo =
+
+ @SerializedName("clusterHostInfo")
+ private Map<String, Set<String>> clusterHostInfo =
new HashMap<String, Set<String>>();
+
+ @SerializedName("configurations")
private Map<String, Map<String, String>> configurations;
+
@SerializedName("configuration_attributes")
private Map<String, Map<String, Map<String, String>>> configurationAttributes;
+
+ @SerializedName("configurationTags")
private Map<String, Map<String, String>> configurationTags;
+
+ @SerializedName("forceRefreshConfigTags")
private Set<String> forceRefreshConfigTags = new HashSet<String>();
+
+ @SerializedName("commandParams")
private Map<String, String> commandParams;
+
+ @SerializedName("serviceName")
private String serviceName;
+
+ @SerializedName("componentName")
private String componentName;
/**
* Used for ignoring nagios alerts at agent
*/
+ @SerializedName("passiveInfo")
private Set<Map<String,String>> passiveInfo;
- @JsonProperty("commandId")
public String getCommandId() {
- return this.commandId;
+ return commandId;
}
-
- @JsonProperty("commandId")
+
public void setCommandId(String commandId) {
this.commandId = commandId;
}
-
+
@Override
public boolean equals(Object other) {
if (!(other instanceof ExecutionCommand)) {
return false;
}
ExecutionCommand o = (ExecutionCommand) other;
- return (this.commandId.equals(o.commandId) &&
- this.hostname.equals(o.hostname) &&
- this.role.equals(o.role) &&
- this.roleCommand.equals(o.roleCommand));
+ return (commandId.equals(o.commandId) &&
+ hostname.equals(o.hostname) &&
+ role.equals(o.role) &&
+ roleCommand.equals(o.roleCommand));
}
-
+
@Override
public String toString() {
try {
return StageUtils.jaxbToString(this);
} catch (Exception ex) {
LOG.warn("Exception in json conversion", ex);
- return "Exception in json conversion";
+ return "Exception in json conversion";
}
}
@@ -103,97 +132,79 @@ public class ExecutionCommand extends AgentCommand {
return (hostname + commandId + role).hashCode();
}
- @JsonProperty("taskId")
public long getTaskId() {
return taskId;
}
- @JsonProperty("taskId")
public void setTaskId(long taskId) {
this.taskId = taskId;
}
- @JsonProperty("role")
public String getRole() {
return role;
}
- @JsonProperty("role")
public void setRole(String role) {
this.role = role;
}
- @JsonProperty("roleParams")
public Map<String, String> getRoleParams() {
return roleParams;
}
- @JsonProperty("roleParams")
public void setRoleParams(Map<String, String> roleParams) {
this.roleParams = roleParams;
}
- @JsonProperty("roleCommand")
public RoleCommand getRoleCommand() {
return roleCommand;
}
- @JsonProperty("roleCommand")
public void setRoleCommand(RoleCommand cmd) {
- this.roleCommand = cmd;
+ roleCommand = cmd;
}
-
- @JsonProperty("clusterName")
+
public String getClusterName() {
return clusterName;
}
-
- @JsonProperty("clusterName")
+
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
- @JsonProperty("hostname")
public String getHostname() {
return hostname;
}
- @JsonProperty("hostname")
public void setHostname(String hostname) {
this.hostname = hostname;
}
- @JsonProperty("hostLevelParams")
public Map<String, String> getHostLevelParams() {
return hostLevelParams;
}
- @JsonProperty("hostLevelParams")
public void setHostLevelParams(Map<String, String> params) {
- this.hostLevelParams = params;
+ hostLevelParams = params;
}
- @JsonProperty("clusterHostInfo")
public Map<String, Set<String>> getClusterHostInfo() {
return clusterHostInfo;
}
- @JsonProperty("clusterHostInfo")
public void setClusterHostInfo(Map<String, Set<String>> clusterHostInfo) {
this.clusterHostInfo = clusterHostInfo;
}
-
- @JsonProperty("configurations")
+
public Map<String, Map<String, String>> getConfigurations() {
return configurations;
}
- @JsonProperty("configurations")
public void setConfigurations(Map<String, Map<String, String>> configurations) {
this.configurations = configurations;
}
/**
- * @return Returns the set of config-types that have to be propagated to actual-config of component of given custom command, if command is successfully finished.
+ * @return Returns the set of config-types that have to be propagated to actual-config of component of given custom command, if command is successfully finished.
*/
public Set<String> getForceRefreshConfigTags() {
return forceRefreshConfigTags;
@@ -203,42 +214,34 @@ public class ExecutionCommand extends AgentCommand {
this.forceRefreshConfigTags = forceRefreshConfigTags;
}
- @JsonProperty("configuration_attributes")
public Map<String, Map<String, Map<String, String>>> getConfigurationAttributes() {
return configurationAttributes;
}
- @JsonProperty("configuration_attributes")
public void setConfigurationAttributes(Map<String, Map<String, Map<String, String>>> configurationAttributes) {
this.configurationAttributes = configurationAttributes;
}
- @JsonProperty("commandParams")
public Map<String, String> getCommandParams() {
return commandParams;
}
- @JsonProperty("commandParams")
public void setCommandParams(Map<String, String> commandParams) {
this.commandParams = commandParams;
}
- @JsonProperty("serviceName")
public String getServiceName() {
return serviceName;
}
- @JsonProperty("serviceName")
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
- @JsonProperty("componentName")
public String getComponentName() {
return componentName;
}
- @JsonProperty("componentName")
public void setComponentName(String componentName) {
this.componentName = componentName;
}
@@ -248,22 +251,22 @@ public class ExecutionCommand extends AgentCommand {
*/
public void setConfigurationTags(Map<String, Map<String, String>> configTags) {
configurationTags = configTags;
- }
+ }
/**
- * @return the configuration tags
+ * @return the configuration tags
*/
public Map<String, Map<String, String>> getConfigurationTags() {
return configurationTags;
}
-
+
/**
* @return the passive info for the cluster
*/
public Set<Map<String, String>> getPassiveInfo() {
return passiveInfo;
}
-
+
/**
* @param info the passive info for the cluster
*/
@@ -277,7 +280,6 @@ public class ExecutionCommand extends AgentCommand {
* incapsulated inside command.
*/
public static interface KeyNames {
-
String COMMAND_TIMEOUT = "command_timeout";
String SCRIPT = "script";
String SCRIPT_TYPE = "script_type";
@@ -306,7 +308,6 @@ public class ExecutionCommand extends AgentCommand {
String SERVICE_CHECK = "SERVICE_CHECK"; // TODO: is it standart command? maybe add it to RoleCommand enum?
String CUSTOM_COMMAND = "custom_command";
-
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
index 0dff507..1e9dc12 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
@@ -21,78 +21,80 @@ package org.apache.ambari.server.agent;
import java.util.ArrayList;
import java.util.List;
-import org.codehaus.jackson.annotate.JsonProperty;
+import com.google.gson.annotations.SerializedName;
/**
* Controller to Agent response data model.
*/
public class HeartBeatResponse {
+ @SerializedName("responseId")
private long responseId;
+ @SerializedName("executionCommands")
private List<ExecutionCommand> executionCommands = new ArrayList<ExecutionCommand>();
+
+ @SerializedName("statusCommands")
private List<StatusCommand> statusCommands = new ArrayList<StatusCommand>();
+
+ @SerializedName("cancelCommands")
private List<CancelCommand> cancelCommands = new ArrayList<CancelCommand>();
/**
* {@link AlertDefinitionCommand}s are used to isntruct the agent as to which
- * alert definitions it needs to schedule.
+ * alert definitions it needs to schedule. A {@code null} value here indicates
+ * that no data is to be sent and no change is required on the agent. This is
+ * different from sending an empty list where the empty list would instruct
+ * the agent to abandon all alert definitions that are scheduled.
*/
- @JsonProperty("alertDefinitionCommands")
- private List<AlertDefinitionCommand> alertDefinitionCommands = new ArrayList<AlertDefinitionCommand>();
-
+ @SerializedName("alertDefinitionCommands")
+ private List<AlertDefinitionCommand> alertDefinitionCommands = null;
+ @SerializedName("registrationCommand")
private RegistrationCommand registrationCommand;
+ @SerializedName("restartAgent")
private boolean restartAgent = false;
+
+ @SerializedName("hasMappedComponents")
private boolean hasMappedComponents = false;
- @JsonProperty("responseId")
public long getResponseId() {
return responseId;
}
- @JsonProperty("responseId")
public void setResponseId(long responseId) {
this.responseId=responseId;
}
- @JsonProperty("executionCommands")
public List<ExecutionCommand> getExecutionCommands() {
return executionCommands;
}
- @JsonProperty("executionCommands")
public void setExecutionCommands(List<ExecutionCommand> executionCommands) {
this.executionCommands = executionCommands;
}
- @JsonProperty("statusCommands")
public List<StatusCommand> getStatusCommands() {
return statusCommands;
}
- @JsonProperty("statusCommands")
public void setStatusCommands(List<StatusCommand> statusCommands) {
this.statusCommands = statusCommands;
}
- @JsonProperty("cancelCommands")
public List<CancelCommand> getCancelCommands() {
return cancelCommands;
}
- @JsonProperty("cancelCommands")
public void setCancelCommands(List<CancelCommand> cancelCommands) {
this.cancelCommands = cancelCommands;
}
- @JsonProperty("registrationCommand")
public RegistrationCommand getRegistrationCommand() {
return registrationCommand;
}
- @JsonProperty("registrationCommand")
public void setRegistrationCommand(RegistrationCommand registrationCommand) {
this.registrationCommand = registrationCommand;
}
@@ -119,22 +121,18 @@ public class HeartBeatResponse {
alertDefinitionCommands = commands;
}
- @JsonProperty("restartAgent")
public boolean isRestartAgent() {
return restartAgent;
}
- @JsonProperty("restartAgent")
public void setRestartAgent(boolean restartAgent) {
this.restartAgent = restartAgent;
}
- @JsonProperty("hasMappedComponents")
public boolean hasMappedComponents() {
return hasMappedComponents;
}
- @JsonProperty("hasMappedComponents")
public void setHasMappedComponents(boolean hasMappedComponents) {
this.hasMappedComponents = hasMappedComponents;
}
@@ -152,6 +150,12 @@ public class HeartBeatResponse {
}
public void addAlertDefinitionCommand(AlertDefinitionCommand command) {
+ // commands are added here when they are taken off the queue; there should
+ // be no thread contention and thus no worry about locks for the null check
+ if (null == alertDefinitionCommands) {
+ alertDefinitionCommands = new ArrayList<AlertDefinitionCommand>();
+ }
+
alertDefinitionCommands.add(command);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/agent/NagiosAlertCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/NagiosAlertCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/NagiosAlertCommand.java
index f8e2f26..bdf9039 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/NagiosAlertCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/NagiosAlertCommand.java
@@ -21,24 +21,27 @@ import java.util.Collection;
import org.apache.ambari.server.state.Alert;
+import com.google.gson.annotations.SerializedName;
+
/**
- * Specialized command that updates Nagios with alert data
+ * Specialized command that updates Nagios with alert data
*/
public class NagiosAlertCommand extends StatusCommand {
+ @SerializedName("alerts")
private Collection<Alert> alerts = null;
-
+
/**
* @param alerts
*/
public void setAlerts(Collection<Alert> alertData) {
alerts = alertData;
}
-
+
/**
* @return the alerts
*/
public Collection<Alert> getAlerts() {
return alerts;
}
-
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
index 9ac8bed..6e08ef0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
@@ -20,8 +20,6 @@ package org.apache.ambari.server.agent;
import java.util.HashMap;
import java.util.Map;
-import org.codehaus.jackson.annotate.JsonProperty;
-
import com.google.gson.annotations.SerializedName;
/**
@@ -33,96 +31,91 @@ public class StatusCommand extends AgentCommand {
super(AgentCommandType.STATUS_COMMAND);
}
+ @SerializedName("clusterName")
private String clusterName;
+
+ @SerializedName("serviceName")
private String serviceName;
+
+ @SerializedName("componentName")
private String componentName;
+
+ @SerializedName("configurations")
private Map<String, Map<String, String>> configurations;
+
@SerializedName("configuration_attributes")
private Map<String, Map<String, Map<String, String>>> configurationAttributes;
+
+ @SerializedName("commandParams")
private Map<String, String> commandParams = new HashMap<String, String>();
+
+ @SerializedName("hostLevelParams")
private Map<String, String> hostLevelParams = new HashMap<String, String>();
+
+ @SerializedName("hostname")
private String hostname = null;
-
- @JsonProperty("clusterName")
public String getClusterName() {
return clusterName;
}
-
- @JsonProperty("clusterName")
+
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
- @JsonProperty("serviceName")
public String getServiceName() {
return serviceName;
}
- @JsonProperty("serviceName")
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
- @JsonProperty("componentName")
public String getComponentName() {
return componentName;
}
- @JsonProperty("componentName")
public void setComponentName(String componentName) {
this.componentName = componentName;
}
-
- @JsonProperty("configurations")
+
public Map<String, Map<String, String>> getConfigurations() {
return configurations;
}
- @JsonProperty("configurations")
public void setConfigurations(Map<String, Map<String, String>> configurations) {
this.configurations = configurations;
}
- @JsonProperty("configuration_attributes")
public Map<String, Map<String, Map<String, String>>> getConfigurationAttributes() {
return configurationAttributes;
}
- @JsonProperty("configuration_attributes")
public void setConfigurationAttributes(Map<String, Map<String, Map<String, String>>> configurationAttributes) {
this.configurationAttributes = configurationAttributes;
}
- @JsonProperty("hostLevelParams")
public Map<String, String> getHostLevelParams() {
return hostLevelParams;
}
- @JsonProperty("hostLevelParams")
public void setHostLevelParams(Map<String, String> params) {
- this.hostLevelParams = params;
+ hostLevelParams = params;
}
- @JsonProperty("commandParams")
public Map<String, String> getCommandParams() {
return commandParams;
}
- @JsonProperty("commandParams")
public void setCommandParams(Map<String, String> commandParams) {
this.commandParams = commandParams;
}
-
- @JsonProperty("hostname")
+
public void setHostname(String hostname) {
this.hostname = hostname;
}
- @JsonProperty("hostname")
public String getHostname() {
return hostname;
}
-
-
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index e3b5d93..f20a9a9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -273,9 +273,10 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
for (Map<String, Object> requestPropMap : request.getProperties()) {
for (Map<String, Object> propertyMap : getPropertyMaps(requestPropMap, predicate)) {
- Long id = (Long) propertyMap.get(ALERT_DEF_ID);
+ String stringId = (String) propertyMap.get(ALERT_DEF_ID);
+ long id = Long.parseLong(stringId);
- AlertDefinitionEntity entity = alertDefinitionDAO.findById(id.longValue());
+ AlertDefinitionEntity entity = alertDefinitionDAO.findById(id);
if (null == entity) {
continue;
}
@@ -359,7 +360,8 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
String clusterName = null;
for (final Resource resource : resources) {
- definitionIds.add((Long) resource.getPropertyValue(ALERT_DEF_ID));
+ Long id = (Long) resource.getPropertyValue(ALERT_DEF_ID);
+ definitionIds.add(id);
if (null == clusterName) {
clusterName = (String) resource.getPropertyValue(ALERT_DEF_CLUSTER_NAME);
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
index 502aca9..a671ae1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertHistoryEntity.java
@@ -90,7 +90,7 @@ public class AlertHistoryEntity {
* Unidirectional many-to-one association to {@link AlertDefinitionEntity}
*/
@ManyToOne
- @JoinColumn(name = "definition_id", nullable = false)
+ @JoinColumn(name = "alert_definition_id", nullable = false)
private AlertDefinitionEntity alertDefinition;
/**
@@ -101,7 +101,7 @@ public class AlertHistoryEntity {
/**
* Gets the unique ID for this alert instance.
- *
+ *
* @return the unique ID (never {@code null}).
*/
public Long getAlertId() {
@@ -110,7 +110,7 @@ public class AlertHistoryEntity {
/**
* Sets the unique ID for this alert instance.
- *
+ *
* @param alertId
* the unique ID (not {@code null}).
*/
@@ -120,7 +120,7 @@ public class AlertHistoryEntity {
/**
* Gets the instance identifier, if any, for this alert instance.
- *
+ *
* @return the instance ID or {@code null} if none.
*/
public String getAlertInstance() {
@@ -129,7 +129,7 @@ public class AlertHistoryEntity {
/**
* Sets the instance identifier, if any, for this alert instance.
- *
+ *
* @param alertInstance
* the instance ID or {@code null} if none.
*/
@@ -140,7 +140,7 @@ public class AlertHistoryEntity {
/**
* Gets the label for this alert instance. The label is typically an
* abbreviated form of the alert text.
- *
+ *
* @return the alert instance label or {@code null} if none.
* @see #getAlertText()
*/
@@ -150,7 +150,7 @@ public class AlertHistoryEntity {
/**
* Sets the label for this alert instance.
- *
+ *
* @param alertLabel
* the label or {@code null} if none.
*/
@@ -160,7 +160,7 @@ public class AlertHistoryEntity {
/**
* Gets the state of this alert instance.
- *
+ *
* @return the alert state (never {@code null}).
*/
public AlertState getAlertState() {
@@ -169,7 +169,7 @@ public class AlertHistoryEntity {
/**
* Sets the state of this alert instance.
- *
+ *
* @param alertState
* the alert state (not {@code null}).
*/
@@ -179,7 +179,7 @@ public class AlertHistoryEntity {
/**
* Gets the text of the alert instance.
- *
+ *
* @return the text of the alert instance or {@code null} if none.
*/
public String getAlertText() {
@@ -188,7 +188,7 @@ public class AlertHistoryEntity {
/**
* Sets the text of the alert instance.
- *
+ *
* @param alertText
* the text, or {@code null} if none.
*/
@@ -199,7 +199,7 @@ public class AlertHistoryEntity {
/**
* Gets the time that the alert instace was received. This will be the value,
* in milliseconds, since the UNIX/Java epoch, represented in UTC time.
- *
+ *
* @return the time of the alert instance (never {@code null}).
*/
public Long getAlertTimestamp() {
@@ -209,7 +209,7 @@ public class AlertHistoryEntity {
/**
* Sets the time that the alert instace was received. This should be the
* value, in milliseconds, since the UNIX/Java epoch, represented in UTC time.
- *
+ *
* @param alertTimestamp
* the time of the alert instance (not {@code null}).
*/
@@ -219,7 +219,7 @@ public class AlertHistoryEntity {
/**
* Gets the ID of the cluster that this alert is associated with.
- *
+ *
* @return the ID of the cluster for the server that this alert is for (never
* {@code null}).
*/
@@ -229,7 +229,7 @@ public class AlertHistoryEntity {
/**
* Sets the ID of the cluster that this alert is associated with.
- *
+ *
* @param clusterId
* the ID of the cluster for the server that this alert is for (never
* {@code null}).
@@ -242,7 +242,7 @@ public class AlertHistoryEntity {
* Gets the name of the component, if any, that this alert instance is for.
* Some alerts, such as those that are scoped for the entire service, do not
* have component names.
- *
+ *
* @return the name of the component, or {@code null} for none.
*/
public String getComponentName() {
@@ -254,7 +254,7 @@ public class AlertHistoryEntity {
* Component names are not required if the alert definition is scoped for a
* service. If specified, there is always a 1:1 mapping between alert
* definitions and components.
- *
+ *
* @param componentName
* the name of the component, or {@code null} if none.
*/
@@ -266,7 +266,7 @@ public class AlertHistoryEntity {
* Gets the name of the host that the alert is for. Some alerts do not run
* against hosts, such as aggregate alert definitions, so this may be
* {@code null}.
- *
+ *
* @return the name of the host or {@code null} if none.
*/
public String getHostName() {
@@ -275,7 +275,7 @@ public class AlertHistoryEntity {
/**
* Sets the name of the host that the alert is for.
- *
+ *
* @param hostName
* the name of the host or {@code null} if none.
*/
@@ -285,7 +285,7 @@ public class AlertHistoryEntity {
/**
* Gets the name of the service that the alert is defined for.
- *
+ *
* @return the name of the service (never {@code null}).
*/
public String getServiceName() {
@@ -295,7 +295,7 @@ public class AlertHistoryEntity {
/**
* Sets the name of the service that the alert is defined for. Every alert
* definition is related to exactly 1 service.
- *
+ *
* @param serviceName
* the name of the service (not {@code null}).
*/
@@ -307,7 +307,7 @@ public class AlertHistoryEntity {
* Gets the associated alert definition for this alert instance. The alert
* definition can be used to retrieve global information about an alert such
* as the interval and the name.
- *
+ *
* @return the alert definition (never {@code null}).
*/
public AlertDefinitionEntity getAlertDefinition() {
@@ -316,7 +316,7 @@ public class AlertHistoryEntity {
/**
* Sets the associated alert definition for this alert instance.
- *
+ *
* @param alertDefinition
* the alert definition (not {@code null}).
*/
@@ -329,16 +329,19 @@ public class AlertHistoryEntity {
*/
@Override
public boolean equals(Object object) {
- if (this == object)
+ if (this == object) {
return true;
+ }
- if (object == null || getClass() != object.getClass())
+ if (object == null || getClass() != object.getClass()) {
return false;
+ }
AlertHistoryEntity that = (AlertHistoryEntity) object;
- if (alertId != null ? !alertId.equals(that.alertId) : that.alertId != null)
+ if (alertId != null ? !alertId.equals(that.alertId) : that.alertId != null) {
return false;
+ }
return true;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
index 7160bcf..f0f5643 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/StageUtils.java
@@ -17,14 +17,31 @@
*/
package org.apache.ambari.server.utils;
-import com.google.common.base.Joiner;
-import com.google.gson.Gson;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import javax.xml.bind.JAXBException;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.agent.ExecutionCommand;
-import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.HostComponentAdminState;
@@ -35,30 +52,12 @@ import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEve
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonGenerationException;
-import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
-import javax.xml.bind.JAXBException;
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
+import com.google.common.base.Joiner;
+import com.google.gson.Gson;
public class StageUtils {
@@ -205,15 +204,6 @@ public class StageUtils {
return getGson().toJson(jaxbObj);
}
- public static ExecutionCommand stringToExecutionCommand(String json)
- throws JsonParseException, JsonMappingException, IOException {
- ObjectMapper mapper = new ObjectMapper();
- mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
- mapper.configure(SerializationConfig.Feature.USE_ANNOTATIONS, true);
- InputStream is = new ByteArrayInputStream(json.getBytes(Charset.forName("UTF8")));
- return mapper.readValue(is, ExecutionCommand.class);
- }
-
public static <T> T fromJson(String json, Class<T> clazz) throws IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/alerts.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/alerts.json
new file mode 100644
index 0000000..6a06d4d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/alerts.json
@@ -0,0 +1,58 @@
+{
+ "service": [
+ // datanode space aggregate
+ // datanode process aggregate
+ ],
+ "SECONDARY_NAMENODE": [
+ {
+ "name": "secondary_namenode_process",
+ "label": "Secondary NameNode process",
+ "interval": 1,
+ "scope": "service",
+ "source": {
+ "type": "PORT",
+ "config": "{{hdfs-site/dfs.namenode.secondary.http-address}}:50071",
+ "default": 50071
+ }
+ }
+ ],
+ "NAMENODE": [
+ // name node cpu utilization (metric)
+ {
+ "name": "namenode_cpu",
+ "label": "NameNode host CPU Utilization",
+ "scope": "host",
+ "source": {
+ "type": "METRIC",
+ "jmx": "java.lang:type=OperatingSystem/SystemCpuLoad",
+ "host": "{{hdfs-site/dfs.namenode.secondary.http-address}}"
+ }
+ },
+ // namenode process (port check)
+ {
+ "name": "namenode_process",
+ "label": "NameNode process",
+ "interval": 1,
+ "scope": "host",
+ "source": {
+ "type": "PORT",
+ "uri": "{{hdfs-site/dfs.namenode.http-address}}:50070"
+ }
+ },
+ {
+ "name": "hdfs_last_checkpoint",
+ "label": "Last Checkpoint Time",
+ "interval": 1,
+ "scope": "service",
+ "enabled": false
+ "source": {
+ "type": "SCRIPT",
+ "path": "scripts/alerts/last_checkpoint.py"
+ }
+ }
+ ],
+ "DATANODE": [
+ // datanode process (port check)
+ // datanode space
+ ]
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index b1b83fa..68cbc92 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -57,7 +57,6 @@ import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.stack.MetricDefinition;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
@@ -1403,7 +1402,6 @@ public class AmbariMetaInfoTest {
}
@Test
- @Ignore
public void testAlertsJson() throws Exception {
ServiceInfo svc = metaInfo.getService(STACK_NAME_HDP, "2.0.5", "HDFS");
Assert.assertNotNull(svc);
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 9ca5348..d36fd70 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -35,6 +35,7 @@ import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.io.StringReader;
import java.lang.reflect.Type;
import java.net.ConnectException;
import java.net.MalformedURLException;
@@ -4514,8 +4515,10 @@ public class AmbariManagementControllerTest {
//Check configs not stored with execution command
ExecutionCommandDAO executionCommandDAO = injector.getInstance(ExecutionCommandDAO.class);
ExecutionCommandEntity commandEntity = executionCommandDAO.findByPK(task.getTaskId());
- ExecutionCommand executionCommand =
- StageUtils.fromJson(new String(commandEntity.getCommand()), ExecutionCommand.class);
+
+ Gson gson = new Gson();
+ ExecutionCommand executionCommand = gson.fromJson(new StringReader(
+ new String(commandEntity.getCommand())), ExecutionCommand.class);
assertFalse(executionCommand.getConfigurationTags().isEmpty());
assertTrue(executionCommand.getConfigurations() == null || executionCommand.getConfigurations().isEmpty());
@@ -10329,7 +10332,7 @@ public class AmbariManagementControllerTest {
// Start
startService(clusterName, serviceName, false, false);
-
+
ServiceComponentHostRequest req = new ServiceComponentHostRequest(clusterName, serviceName,
componentName1, host1, "INSTALLED");
@@ -10339,24 +10342,24 @@ public class AmbariManagementControllerTest {
// succeed in creating a task
assertNotNull(resp);
-
+
// manually change live state to stopped as no running action manager
for (ServiceComponentHost sch :
clusters.getCluster(clusterName).getServiceComponentHosts(host1)) {
sch.setState(State.INSTALLED);
}
-
+
// no new commands since no targeted info
resp = controller.updateHostComponents(Collections.singleton(req), new HashMap<String, String>(), false);
assertNull(resp);
-
+
// role commands added for targeted command
resp = controller.updateHostComponents(Collections.singleton(req), requestProperties, false);
assertNotNull(resp);
-
+
}
-
-
+
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
index d21df88..333f674 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
@@ -283,6 +283,7 @@ public class AlertDefinitionResourceProviderTest {
Predicate p = new PredicateBuilder().property(
AlertDefinitionResourceProvider.ALERT_DEF_ID).equals("1").and().property(
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME).equals("c1").toPredicate();
+
// everything is mocked, there is no DB
entity.setDefinitionId(Long.valueOf(1));
@@ -295,6 +296,7 @@ public class AlertDefinitionResourceProviderTest {
replay(dao);
requestProps = new HashMap<String, Object>();
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_ID, "1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def1");
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
index 6e587f1..d3f0e37 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/TestStageUtils.java
@@ -20,9 +20,15 @@ package org.apache.ambari.server.utils;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import static org.powermock.api.easymock.PowerMock.mockStaticPartial;
+import static org.powermock.api.easymock.PowerMock.replayAll;
+import java.io.ByteArrayInputStream;
import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
import java.net.UnknownHostException;
+import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -42,8 +48,6 @@ import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
import org.apache.ambari.server.actionmanager.Stage;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.HostsMap;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.state.Cluster;
@@ -57,19 +61,17 @@ import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.junit.Before;
-import org.junit.Test;
import org.junit.Ignore;
+import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
-import static org.powermock.api.easymock.PowerMock.replayAll;
-import java.net.InetAddress;
-import static org.powermock.api.easymock.PowerMock.*;
import com.google.common.collect.ContiguousSet;
import com.google.common.collect.DiscreteDomain;
import com.google.common.collect.Range;
+import com.google.gson.Gson;
import com.google.inject.Guice;
import com.google.inject.Injector;
@@ -105,12 +107,12 @@ public class TestStageUtils {
Injector injector) throws AmbariException {
cl.setDesiredStackVersion(new StackId(STACK_ID));
cl.addService(serviceName);
-
+
for (Entry<String, List<Integer>> component : topology.entrySet()) {
-
+
String componentName = component.getKey();
cl.getService(serviceName).addServiceComponent(componentName);
-
+
for (Integer hostIndex : component.getValue()) {
cl.getService(serviceName)
.getServiceComponent(componentName)
@@ -151,14 +153,20 @@ public class TestStageUtils {
public void testJasonToExecutionCommand() throws JsonGenerationException,
JsonMappingException, JAXBException, IOException {
Stage s = StageUtils.getATestStage(1, 2, "host1", "clusterHostInfo");
- ExecutionCommand cmd = s.getExecutionCommands("host1").get(0).getExecutionCommand();
+ ExecutionCommand cmd = s.getExecutionCommands("host1").get(0).getExecutionCommand();
HashMap<String, Map<String,String>> configTags = new HashMap<String, Map<String,String>>();
Map<String, String> globalTag = new HashMap<String, String>();
globalTag.put("tag", "version1");
configTags.put("global", globalTag );
cmd.setConfigurationTags(configTags);
String json = StageUtils.jaxbToString(cmd);
- ExecutionCommand cmdDes = StageUtils.stringToExecutionCommand(json);
+
+ InputStream is = new ByteArrayInputStream(
+ json.getBytes(Charset.forName("UTF8")));
+
+ ExecutionCommand cmdDes = new Gson().fromJson(new InputStreamReader(is),
+ ExecutionCommand.class);
+
assertEquals(cmd.toString(), cmdDes.toString());
assertEquals(cmd, cmdDes);
}
@@ -195,20 +203,20 @@ public class TestStageUtils {
8672,
null,
8673);
-
+
fsm.addCluster("c1");
fsm.getCluster("c1").setDesiredStackVersion(new StackId(STACK_ID));
-
+
int index = 0;
-
+
for (String host: hostList) {
fsm.addHost(host);
-
+
Map<String, String> hostAttributes = new HashMap<String, String>();
hostAttributes.put("os_family", "redhat");
hostAttributes.put("os_release_version", "5.9");
fsm.getHost(host).setHostAttributes(hostAttributes);
-
+
fsm.getHost(host).setCurrentPingPort(pingPorts.get(index));
fsm.getHost(host).persist();
fsm.mapHostToCluster(host, "c1");
@@ -222,24 +230,24 @@ public class TestStageUtils {
List<Integer> datanodeIndexes = Arrays.asList(0,1,2,3,5,7,8,9);
hdfsTopology.put("DATANODE", new ArrayList<Integer>(datanodeIndexes));
addService(fsm.getCluster("c1"), hostList, hdfsTopology , "HDFS", injector);
-
+
//Add HBASE service
- Map<String, List<Integer>> hbaseTopology = new HashMap<String, List<Integer>>();
+ Map<String, List<Integer>> hbaseTopology = new HashMap<String, List<Integer>>();
hbaseTopology.put("HBASE_MASTER", Collections.singletonList(5));
List<Integer> regionServiceIndexes = Arrays.asList(1,3,5,8,9);
hbaseTopology.put("HBASE_REGIONSERVER", regionServiceIndexes);
addService(fsm.getCluster("c1"), hostList, hbaseTopology , "HBASE", injector);
-
+
//Add MAPREDUCE service
- Map<String, List<Integer>> mrTopology = new HashMap<String, List<Integer>>();
+ Map<String, List<Integer>> mrTopology = new HashMap<String, List<Integer>>();
mrTopology.put("JOBTRACKER", Collections.singletonList(5));
List<Integer> taskTrackerIndexes = Arrays.asList(1,2,3,4,5,7,9);
mrTopology.put("TASKTRACKER", taskTrackerIndexes);
addService(fsm.getCluster("c1"), hostList, mrTopology , "MAPREDUCE", injector);
-
-
+
+
//Add NONAME service
- Map<String, List<Integer>> nonameTopology = new HashMap<String, List<Integer>>();
+ Map<String, List<Integer>> nonameTopology = new HashMap<String, List<Integer>>();
nonameTopology.put("NONAME_SERVER", Collections.singletonList(7));
addService(fsm.getCluster("c1"), hostList, nonameTopology , "NONAME", injector);
@@ -259,39 +267,40 @@ public class TestStageUtils {
for (Host host: fsm.getHosts()) {
assertTrue(allHosts.contains(host.getHostName()));
}
-
-
+
+
//Check HDFS topology compression
Map<String, String> hdfsMapping = new HashMap<String, String>();
hdfsMapping.put("DATANODE", "slave_hosts");
hdfsMapping.put("NAMENODE", "namenode_host");
hdfsMapping.put("SECONDARY_NAMENODE", "snamenode_host");
checkServiceCompression(info, hdfsMapping, hdfsTopology, hostList);
-
-
+
+
//Check HBASE topology compression
Map<String, String> hbaseMapping = new HashMap<String, String>();
hbaseMapping.put("HBASE_MASTER", "hbase_master_hosts");
hbaseMapping.put("HBASE_REGIONSERVER", "hbase_rs_hosts");
checkServiceCompression(info, hbaseMapping, hbaseTopology, hostList);
-
+
//Check MAPREDUCE topology compression
Map<String, String> mrMapping = new HashMap<String, String>();
mrMapping.put("JOBTRACKER", "jtnode_host");
mrMapping.put("TASKTRACKER", "mapred_tt_hosts");
checkServiceCompression(info, mrMapping, mrTopology, hostList);
-
+
Set<String> actualPingPorts = info.get("all_ping_ports");
-
- if (pingPorts.contains(null))
+
+ if (pingPorts.contains(null)) {
assertEquals(new HashSet<Integer>(pingPorts).size(), actualPingPorts.size() + 1);
- else
+ } else {
assertEquals(new HashSet<Integer>(pingPorts).size(), actualPingPorts.size());
-
+ }
+
List<Integer> pingPortsActual = getRangeMappedDecompressedSet(actualPingPorts);
List<Integer> reindexedPorts = getReindexedList(pingPortsActual, new ArrayList<String>(allHosts), hostList);
-
+
//Treat null values
while (pingPorts.contains(null)) {
int indexOfNull = pingPorts.indexOf(null);
@@ -299,7 +308,7 @@ public class TestStageUtils {
}
assertEquals(pingPorts, reindexedPorts);
-
+
// check for no-name in the list
assertTrue(info.containsKey("noname_server_hosts"));
assertTrue(info.containsKey("decom_tt_hosts"));
@@ -316,36 +325,38 @@ public class TestStageUtils {
private void checkServiceCompression(Map<String, Set<String>> info,
Map<String, String> serviceMapping, Map<String, List<Integer>> serviceTopology,
List<String> hostList) {
-
-
+
+
for (Entry<String, List<Integer>> component: serviceTopology.entrySet()) {
-
+
String componentName = component.getKey();
-
+
List<Integer> componentIndexesExpected = component.getValue();
-
+
String roleName = serviceMapping.get(componentName);
-
+
assertTrue("No mapping for " + componentName , roleName != null);
-
+
Set<Integer> componentIndexesActual = getDecompressedSet(info.get(roleName));
-
+
Set<String> expectedComponentHosts = new HashSet<String>();
-
- for (Integer i: componentIndexesExpected)
+
+ for (Integer i: componentIndexesExpected) {
expectedComponentHosts.add(hostList.get(i));
-
+ }
+
Set<String> actualSlavesHosts = new HashSet<String>();
-
- for (Integer i: componentIndexesActual)
+
+ for (Integer i: componentIndexesActual) {
actualSlavesHosts.add(new ArrayList<String>(info.get(HOSTS_LIST)).get(i));
-
-
-
+ }
+
+
+
assertEquals(expectedComponentHosts, actualSlavesHosts);
-
+
}
-
+
}
private Set<Integer> getDecompressedSet(Set<String> set) {
@@ -379,7 +390,7 @@ public class TestStageUtils {
}
return resultSet;
}
-
+
private List<Integer> getRangeMappedDecompressedSet(Set<String> compressedSet) {
SortedMap<Integer, Integer> resultMap = new TreeMap<Integer, Integer>();
@@ -388,9 +399,10 @@ public class TestStageUtils {
String[] split = token.split(":");
- if (split.length != 2)
+ if (split.length != 2) {
throw new RuntimeException("Broken data, expected format - m:r, got - "
+ token);
+ }
Integer index = Integer.valueOf(split[0]);
@@ -401,8 +413,9 @@ public class TestStageUtils {
Set<Integer> decompressedSet = getDecompressedSet(rangeTokensSet);
- for (Integer i : decompressedSet)
+ for (Integer i : decompressedSet) {
resultMap.put(i, index);
+ }
}
@@ -411,7 +424,7 @@ public class TestStageUtils {
return resultList;
}
-
+
private List<Integer> getReindexedList(List<Integer> list,
List<String> currentIndexes, List<String> desiredIndexes) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/72ebd263/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json
new file mode 100644
index 0000000..85aa3ab
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json
@@ -0,0 +1,51 @@
+{
+ "service": [
+ ],
+ "SECONDARY_NAMENODE": [
+ {
+ "name": "secondary_namenode_process",
+ "label": "Secondary NameNode process",
+ "interval": 1,
+ "scope": "service",
+ "source": {
+ "type": "PORT",
+ "config": "{{hdfs-site/dfs.namenode.secondary.http-address}}:50071"
+ }
+ }
+ ],
+ "NAMENODE": [
+ {
+ "name": "namenode_cpu",
+ "label": "NameNode host CPU Utilization",
+ "scope": "host",
+ "source": {
+ "type": "METRIC",
+ "jmx": "java.lang:type=OperatingSystem/SystemCpuLoad",
+ "host": "{{hdfs-site/dfs.namenode.secondary.http-address}}"
+ }
+ },
+ {
+ "name": "namenode_process",
+ "label": "NameNode process",
+ "interval": 1,
+ "scope": "host",
+ "source": {
+ "type": "PORT",
+ "uri": "{{hdfs-site/dfs.namenode.http-address}}:50070"
+ }
+ },
+ {
+ "name": "hdfs_last_checkpoint",
+ "label": "Last Checkpoint Time",
+ "interval": 1,
+ "scope": "service",
+ "enabled": false,
+ "source": {
+ "type": "SCRIPT",
+ "path": "scripts/alerts/last_checkpoint.py"
+ }
+ }
+ ],
+ "DATANODE": [
+ ]
+}
[08/50] [abbrv] git commit: AMBARI-6897. View Unit Test Failure.
(mahadev)
Posted by jo...@apache.org.
AMBARI-6897. View Unit Test Failure. (mahadev)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/50e79839
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/50e79839
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/50e79839
Branch: refs/heads/branch-alerts-dev
Commit: 50e798395a1ca97ce0d0f2eb4979bed657227ea4
Parents: 32caa43
Author: Mahadev Konar <ma...@apache.org>
Authored: Mon Aug 18 10:49:53 2014 -0700
Committer: Mahadev Konar <ma...@apache.org>
Committed: Mon Aug 18 10:49:57 2014 -0700
----------------------------------------------------------------------
.../apache/ambari/server/view/ViewRegistry.java | 20 ++++++++---------
.../ambari/server/view/ViewRegistryTest.java | 23 +++++++++++---------
2 files changed, 22 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/50e79839/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
index 82d84f6..65a48b4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/view/ViewRegistry.java
@@ -1053,15 +1053,6 @@ public class ViewRegistry {
throws Exception {
String viewName = view.getName();
- // get or create an admin resource type to represent this view
- ResourceTypeEntity resourceTypeEntity = resourceTypeDAO.findByName(viewName);
- if (resourceTypeEntity == null) {
- resourceTypeEntity = new ResourceTypeEntity();
- resourceTypeEntity.setName(view.getName());
- resourceTypeDAO.create(resourceTypeEntity);
- }
- view.setResourceType(resourceTypeEntity);
-
ViewEntity persistedView = viewDAO.findByName(viewName);
// if the view is not yet persisted ...
@@ -1070,11 +1061,18 @@ public class ViewRegistry {
LOG.debug("Creating View " + viewName + ".");
}
+ // get or create an admin resource type to represent this view
+ ResourceTypeEntity resourceTypeEntity = resourceTypeDAO.findByName(viewName);
+ if (resourceTypeEntity == null) {
+ resourceTypeEntity = view.getResourceType();
+ resourceTypeDAO.create(resourceTypeEntity);
+ }
+
for( ViewInstanceEntity instance : view.getInstances()) {
// create an admin resource to represent this view instance
ResourceEntity resourceEntity = new ResourceEntity();
- resourceEntity.setResourceType(resourceTypeEntity);
+ resourceEntity.setResourceType(view.getResourceType());
resourceDAO.create(resourceEntity);
instance.setResource(resourceEntity);
@@ -1146,7 +1144,7 @@ public class ViewRegistry {
for (ViewInstanceEntity instance : xmlInstanceEntityMap.values()) {
// create an admin resource to represent this view instance
ResourceEntity resourceEntity = new ResourceEntity();
- resourceEntity.setResourceType(resourceTypeEntity);
+ resourceEntity.setResourceType(view.getResourceType());
resourceDAO.create(resourceEntity);
instance.setResource(resourceEntity);
http://git-wip-us.apache.org/repos/asf/ambari/blob/50e79839/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
index 77990a7..21a00ed 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/view/ViewRegistryTest.java
@@ -173,12 +173,10 @@ public class ViewRegistryTest {
ViewDAO vDAO = createMock(ViewDAO.class);
ResourceDAO rDAO = createNiceMock(ResourceDAO.class);
- ResourceTypeDAO rtDAO = createNiceMock(ResourceTypeDAO.class);
ViewInstanceDAO viDAO = createNiceMock(ViewInstanceDAO.class);
ViewRegistry.setViewDAO(vDAO);
ViewRegistry.setResourceDAO(rDAO);
- ViewRegistry.setResourceTypeDAO(rtDAO);
ViewRegistry.setInstanceDAO(viDAO);
ViewEntity viewDefinition = ViewEntityTest.getViewEntity();
@@ -261,15 +259,11 @@ public class ViewRegistryTest {
expect(vDAO.findAll()).andReturn(Collections.<ViewEntity>emptyList());
- expect(rtDAO.findByName("MY_VIEW{1.0.0}")).andReturn(null);
- rtDAO.create(EasyMock.anyObject(ResourceTypeEntity.class));
- EasyMock.expectLastCall().anyTimes();
-
expect(viDAO.merge(EasyMock.anyObject(ViewInstanceEntity.class))).andReturn(null).times(2);
// replay mocks
replay(configuration, viewDir, extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir,
- libDir, fileEntry, viewJarFile, enumeration, jarEntry, is, fos, rDAO, rtDAO, vDAO, viDAO);
+ libDir, fileEntry, viewJarFile, enumeration, jarEntry, is, fos, rDAO, vDAO, viDAO);
ViewRegistry registry = ViewRegistry.getInstance();
registry.setHelper(new TestViewRegistryHelper(viewConfigs, files, outputStreams, jarFiles));
@@ -280,7 +274,7 @@ public class ViewRegistryTest {
// verify mocks
verify(configuration, viewDir, extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir,
- libDir, fileEntry, viewJarFile, enumeration, jarEntry, is, fos, rDAO, rtDAO, vDAO, viDAO);
+ libDir, fileEntry, viewJarFile, enumeration, jarEntry, is, fos, rDAO, vDAO, viDAO);
}
@Test
@@ -386,6 +380,7 @@ public class ViewRegistryTest {
expect(fileEntry.toURI()).andReturn(new URI("file:./"));
expect(vDAO.findAll()).andReturn(Collections.<ViewEntity>emptyList());
+ expect(vDAO.findByName("MY_VIEW{1.0.0}")).andReturn(viewDefinition);
// replay mocks
replay(configuration, viewDir, extractedArchiveDir, viewArchive, archiveDir, entryFile, classesDir,
@@ -578,7 +573,7 @@ public class ViewRegistryTest {
ViewEntity viewEntity = getViewEntity(config, ambariConfig, getClass().getClassLoader(), "");
ViewInstanceEntity viewInstanceEntity = getViewInstanceEntity(viewEntity, config.getInstances().get(0));
- replay(viewDAO, viewInstanceDAO, securityHelper);
+ replay(viewDAO, viewInstanceDAO, securityHelper, rtDAO);
registry.addDefinition(viewEntity);
try {
@@ -616,7 +611,7 @@ public class ViewRegistryTest {
ViewInstanceEntity viewInstanceEntity = getViewInstanceEntity(viewEntity, config.getInstances().get(0));
viewInstanceEntity.setViewName("BOGUS_VIEW");
- replay(viewDAO, viewInstanceDAO, securityHelper);
+ replay(viewDAO, viewInstanceDAO, securityHelper, rtDAO);
registry.addDefinition(viewEntity);
try {
@@ -844,6 +839,14 @@ public class ViewRegistryTest {
public static void clear() {
ViewRegistry.getInstance().clear();
+
+ ViewRegistry.setInstanceDAO(null);
+ ViewRegistry.setMemberDAO(null);
+ ViewRegistry.setPrivilegeDAO(null);
+ ViewRegistry.setResourceDAO(null);
+ ViewRegistry.setResourceTypeDAO(null);
+ ViewRegistry.setSecurityHelper(null);
+ ViewRegistry.setUserDAO(null);
ViewRegistry.setViewDAO(null);
}
[04/50] [abbrv] git commit: AMBARI-6895 Log in with different users
cause wrong list of available views.(Buzhor Denys via atkach)
Posted by jo...@apache.org.
AMBARI-6895 Log in with different users cause wrong list of available views.(Buzhor Denys via atkach)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/620978b6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/620978b6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/620978b6
Branch: refs/heads/branch-alerts-dev
Commit: 620978b61182d1cbacffe11ceb79014823972aee
Parents: f0be435
Author: atkach <at...@hortonworks.com>
Authored: Mon Aug 18 19:21:38 2014 +0300
Committer: atkach <at...@hortonworks.com>
Committed: Mon Aug 18 19:21:38 2014 +0300
----------------------------------------------------------------------
ambari-web/app/controllers/global/cluster_controller.js | 2 ++
ambari-web/app/views/main/menu.js | 5 +++--
2 files changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/620978b6/ambari-web/app/controllers/global/cluster_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/global/cluster_controller.js b/ambari-web/app/controllers/global/cluster_controller.js
index ba1006c..d612039 100644
--- a/ambari-web/app/controllers/global/cluster_controller.js
+++ b/ambari-web/app/controllers/global/cluster_controller.js
@@ -364,6 +364,8 @@ App.ClusterController = Em.Controller.extend({
sender: this,
success: 'loadViewInstancesSuccess'
});
+ } else {
+ this.set('ambariViews', []);
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/620978b6/ambari-web/app/views/main/menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/menu.js b/ambari-web/app/views/main/menu.js
index c6e4683..888a806 100644
--- a/ambari-web/app/views/main/menu.js
+++ b/ambari-web/app/views/main/menu.js
@@ -30,7 +30,7 @@ App.MainMenuView = Em.CollectionView.extend({
return App.router.get('clusterController.ambariViews');
}.property('App.router.clusterController.ambariViews'),
- content:function(){
+ content: function(){
var result = [];
if (App.router.get('loggedIn')) {
@@ -62,7 +62,8 @@ App.MainMenuView = Em.CollectionView.extend({
}
return result;
- }.property('App.router.loggedIn', 'App.router.clusterController.isLoaded', 'App.supports.views', 'App.supports.mirroring', 'App.supports.secureCluster', 'App.supports.highAvailability'),
+ }.property('App.router.loggedIn', 'App.router.clusterController.isLoaded', 'App.supports.views', 'App.supports.mirroring',
+ 'App.supports.secureCluster', 'App.supports.highAvailability', 'views.length'),
/**
* Adds observer on lastSetURL and calls navigation sync procedure
[34/50] [abbrv] git commit: AMBARI-6929. HDFS heatmap should be
hidden in standalone storm deployment. (jaimin)
Posted by jo...@apache.org.
AMBARI-6929. HDFS heatmap should be hidden in standalone storm deployment. (jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/356e17af
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/356e17af
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/356e17af
Branch: refs/heads/branch-alerts-dev
Commit: 356e17af4d47f37185cd91e024333194fa5714ee
Parents: b39b998
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Tue Aug 19 18:04:57 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Tue Aug 19 18:05:09 2014 -0700
----------------------------------------------------------------------
.../app/controllers/main/charts/heatmap.js | 25 ++++++++++++--------
1 file changed, 15 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/356e17af/ambari-web/app/controllers/main/charts/heatmap.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/charts/heatmap.js b/ambari-web/app/controllers/main/charts/heatmap.js
index 49c6e25..8010ead 100644
--- a/ambari-web/app/controllers/main/charts/heatmap.js
+++ b/ambari-web/app/controllers/main/charts/heatmap.js
@@ -43,19 +43,24 @@ App.MainChartsHeatmapController = Em.Controller.extend({
App.MainChartHeatmapCpuWaitIOMetric.create()
/*, App.MainChartHeatmapProcessRunMetric.create()*/
]
- }),
- Em.Object.create({
- label: Em.I18n.t('charts.heatmap.category.hdfs'),
- category: 'hdfs',
- items: [
- App.MainChartHeatmapDFSBytesReadMetric.create(),
- App.MainChartHeatmapDFSBytesWrittenMetric.create(),
- App.MainChartHeatmapDFSGCTimeMillisMetric.create(),
- App.MainChartHeatmapDFSMemHeapUsedMetric.create()
- ]
})
];
+ if (App.HDFSService.find().get('length')) {
+ metrics.push(
+ Em.Object.create({
+ label: Em.I18n.t('charts.heatmap.category.hdfs'),
+ category: 'hdfs',
+ items: [
+ App.MainChartHeatmapDFSBytesReadMetric.create(),
+ App.MainChartHeatmapDFSBytesWrittenMetric.create(),
+ App.MainChartHeatmapDFSGCTimeMillisMetric.create(),
+ App.MainChartHeatmapDFSMemHeapUsedMetric.create()
+ ]
+ })
+ );
+ }
+
if (App.MapReduceService.find().get('length')) {
metrics.push(
Em.Object.create({
[14/50] [abbrv] git commit: AMBARI-6908 - Alerts: AlertDefinition
Should Have a Label (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6908 - Alerts: AlertDefinition Should Have a Label (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bbd9179a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bbd9179a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bbd9179a
Branch: refs/heads/branch-alerts-dev
Commit: bbd9179ad9e6fdc503649bf2cde5484ae8d78571
Parents: ab128d4
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Aug 18 21:01:32 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Aug 18 22:31:04 2014 -0400
----------------------------------------------------------------------
.../AlertDefinitionResourceProvider.java | 140 +++++++++++--------
.../orm/entities/AlertDefinitionEntity.java | 22 +++
.../server/upgrade/UpgradeCatalog170.java | 1 +
.../main/resources/Ambari-DDL-MySQL-CREATE.sql | 1 +
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 1 +
.../resources/Ambari-DDL-Postgres-CREATE.sql | 1 +
.../Ambari-DDL-Postgres-EMBEDDED-CREATE.sql | 1 +
.../src/main/resources/properties.json | 1 +
.../AlertDefinitionResourceProviderTest.java | 137 +++++++++---------
.../server/upgrade/UpgradeCatalog170Test.java | 4 +-
10 files changed, 182 insertions(+), 127 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index 6f00c27..07b033e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -57,6 +57,7 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
protected static final String ALERT_DEF_CLUSTER_NAME = "AlertDefinition/cluster_name";
protected static final String ALERT_DEF_ID = "AlertDefinition/id";
protected static final String ALERT_DEF_NAME = "AlertDefinition/name";
+ protected static final String ALERT_DEF_LABEL = "AlertDefinition/label";
protected static final String ALERT_DEF_INTERVAL = "AlertDefinition/interval";
protected static final String ALERT_DEF_SOURCE_TYPE = "AlertDefinition/source/type";
protected static final String ALERT_DEF_SOURCE = "AlertDefinition/source";
@@ -64,13 +65,13 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
protected static final String ALERT_DEF_COMPONENT_NAME = "AlertDefinition/component_name";
protected static final String ALERT_DEF_ENABLED = "AlertDefinition/enabled";
protected static final String ALERT_DEF_SCOPE = "AlertDefinition/scope";
-
+
private static Set<String> pkPropertyIds = new HashSet<String>(
Arrays.asList(ALERT_DEF_ID, ALERT_DEF_NAME));
private static AlertDefinitionDAO alertDefinitionDAO = null;
-
+
private static Gson gson = new Gson();
-
+
/**
* @param instance
*/
@@ -78,13 +79,13 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
public static void init(AlertDefinitionDAO instance) {
alertDefinitionDAO = instance;
}
-
+
AlertDefinitionResourceProvider(Set<String> propertyIds,
Map<Resource.Type, String> keyPropertyIds,
AmbariManagementController managementController) {
super(propertyIds, keyPropertyIds, managementController);
}
-
+
@Override
protected Set<String> getPKPropertyIds() {
return pkPropertyIds;
@@ -103,49 +104,55 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
}
});
notifyCreate(Resource.Type.AlertDefinition, request);
-
+
return getRequestStatus(null);
}
-
+
private void createAlertDefinitions(Set<Map<String, Object>> requestMaps)
throws AmbariException {
List<AlertDefinitionEntity> entities = new ArrayList<AlertDefinitionEntity>();
-
+
for (Map<String, Object> requestMap : requestMaps) {
entities.add(toCreateEntity(requestMap));
}
// !!! TODO multi-create in a transaction
- for (AlertDefinitionEntity entity : entities)
+ for (AlertDefinitionEntity entity : entities) {
alertDefinitionDAO.create(entity);
+ }
}
-
+
private AlertDefinitionEntity toCreateEntity(Map<String, Object> requestMap)
throws AmbariException {
String clusterName = (String) requestMap.get(ALERT_DEF_CLUSTER_NAME);
-
- if (null == clusterName || clusterName.isEmpty())
+
+ if (null == clusterName || clusterName.isEmpty()) {
throw new IllegalArgumentException("Invalid argument, cluster name is required");
-
- if (!requestMap.containsKey(ALERT_DEF_INTERVAL))
+ }
+
+ if (!requestMap.containsKey(ALERT_DEF_INTERVAL)) {
throw new IllegalArgumentException("Check interval must be specified");
-
+ }
+
Integer interval = Integer.valueOf((String) requestMap.get(ALERT_DEF_INTERVAL));
- if (!requestMap.containsKey(ALERT_DEF_NAME))
+ if (!requestMap.containsKey(ALERT_DEF_NAME)) {
throw new IllegalArgumentException("Definition name must be specified");
-
- if (!requestMap.containsKey(ALERT_DEF_SERVICE_NAME))
+ }
+
+ if (!requestMap.containsKey(ALERT_DEF_SERVICE_NAME)) {
throw new IllegalArgumentException("Service name must be specified");
-
- if (!requestMap.containsKey(ALERT_DEF_SOURCE_TYPE))
+ }
+
+ if (!requestMap.containsKey(ALERT_DEF_SOURCE_TYPE)) {
throw new IllegalArgumentException(String.format(
"Source type must be specified and one of %s", EnumSet.allOf(
SourceType.class)));
+ }
JsonObject jsonObj = new JsonObject();
-
+
for (Entry<String, Object> entry : requestMap.entrySet()) {
String propCat = PropertyHelper.getPropertyCategory(entry.getKey());
String propName = PropertyHelper.getPropertyName(entry.getKey());
@@ -155,26 +162,28 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
}
}
- if (0 == jsonObj.entrySet().size())
+ if (0 == jsonObj.entrySet().size()) {
throw new IllegalArgumentException("Source must be specified");
-
+ }
+
Cluster cluster = getManagementController().getClusters().getCluster(clusterName);
-
+
AlertDefinitionEntity entity = new AlertDefinitionEntity();
entity.setClusterId(Long.valueOf(cluster.getClusterId()));
entity.setComponentName((String) requestMap.get(ALERT_DEF_COMPONENT_NAME));
entity.setDefinitionName((String) requestMap.get(ALERT_DEF_NAME));
+ entity.setLabel((String) requestMap.get(ALERT_DEF_LABEL));
boolean enabled = requestMap.containsKey(ALERT_DEF_ENABLED) ?
Boolean.parseBoolean((String)requestMap.get(ALERT_DEF_ENABLED)) : true;
-
+
entity.setEnabled(enabled);
entity.setHash(UUID.randomUUID().toString());
entity.setScheduleInterval(interval);
entity.setServiceName((String) requestMap.get(ALERT_DEF_SERVICE_NAME));
entity.setSourceType((String) requestMap.get(ALERT_DEF_SOURCE_TYPE));
entity.setSource(jsonObj.toString());
-
+
Scope scope = null;
String desiredScope = (String) requestMap.get(ALERT_DEF_SCOPE);
if (null != desiredScope && desiredScope.length() > 0) {
@@ -190,17 +199,18 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
public Set<Resource> getResources(Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException,
NoSuchResourceException, NoSuchParentResourceException {
-
+
Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate);
-
+
Set<Resource> results = new HashSet<Resource>();
-
+
for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
String clusterName = (String) propertyMap.get(ALERT_DEF_CLUSTER_NAME);
-
- if (null == clusterName || clusterName.isEmpty())
+
+ if (null == clusterName || clusterName.isEmpty()) {
throw new IllegalArgumentException("Invalid argument, cluster name is required");
-
+ }
+
String id = (String) propertyMap.get(ALERT_DEF_ID);
if (null != id) {
AlertDefinitionEntity entity = alertDefinitionDAO.findById(Long.parseLong(id));
@@ -208,14 +218,14 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
results.add(toResource(false, clusterName, entity, requestPropertyIds));
}
} else {
-
+
Cluster cluster = null;
try {
cluster = getManagementController().getClusters().getCluster(clusterName);
} catch (AmbariException e) {
throw new NoSuchResourceException("Parent Cluster resource doesn't exist", e);
}
-
+
List<AlertDefinitionEntity> entities = alertDefinitionDAO.findAll(
cluster.getClusterId());
@@ -224,7 +234,7 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
}
}
}
-
+
return results;
}
@@ -236,40 +246,44 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
for (Map<String, Object> requestPropMap : request.getProperties()) {
for (Map<String, Object> propertyMap : getPropertyMaps(requestPropMap, predicate)) {
Long id = (Long) propertyMap.get(ALERT_DEF_ID);
-
+
AlertDefinitionEntity entity = alertDefinitionDAO.findById(id.longValue());
- if (null == entity)
+ if (null == entity) {
continue;
+ }
- if (propertyMap.containsKey(ALERT_DEF_NAME))
+ if (propertyMap.containsKey(ALERT_DEF_NAME)) {
entity.setDefinitionName((String) propertyMap.get(ALERT_DEF_NAME));
-
+ }
+
if (propertyMap.containsKey(ALERT_DEF_ENABLED)) {
entity.setEnabled(Boolean.parseBoolean(
(String) propertyMap.get(ALERT_DEF_ENABLED)));
}
-
+
if (propertyMap.containsKey(ALERT_DEF_INTERVAL)) {
entity.setScheduleInterval(Integer.valueOf(
(String) propertyMap.get(ALERT_DEF_INTERVAL)));
}
-
+
if (propertyMap.containsKey(ALERT_DEF_SCOPE)){
Scope scope = null;
String desiredScope = (String) propertyMap.get(ALERT_DEF_SCOPE);
-
- if (null != desiredScope && desiredScope.length() > 0)
+
+ if (null != desiredScope && desiredScope.length() > 0) {
scope = Scope.valueOf((desiredScope));
-
+ }
+
entity.setScope(scope);
}
-
- if (propertyMap.containsKey(ALERT_DEF_SOURCE_TYPE))
+
+ if (propertyMap.containsKey(ALERT_DEF_SOURCE_TYPE)) {
entity.setSourceType((String) propertyMap.get(ALERT_DEF_SOURCE_TYPE));
-
+ }
+
JsonObject jsonObj = new JsonObject();
-
+
for (Entry<String, Object> entry : propertyMap.entrySet()) {
String propCat = PropertyHelper.getPropertyCategory(entry.getKey());
String propName = PropertyHelper.getPropertyName(entry.getKey());
@@ -278,16 +292,16 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
jsonObj.addProperty(propName, entry.getValue().toString());
}
}
-
+
entity.setHash(UUID.randomUUID().toString());
-
+
alertDefinitionDAO.merge(entity);
}
}
-
+
notifyUpdate(Resource.Type.AlertDefinition, request, predicate);
- return getRequestStatus(null);
+ return getRequestStatus(null);
}
@Override
@@ -297,7 +311,7 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
Set<Resource> resources = getResources(
new RequestImpl(null, null, null, null), predicate);
-
+
Set<Long> definitionIds = new HashSet<Long>();
for (final Resource resource : resources) {
@@ -307,7 +321,7 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
for (Long definitionId : definitionIds) {
LOG.info("Deleting alert definition {}", definitionId);
-
+
final AlertDefinitionEntity ad = alertDefinitionDAO.findById(definitionId.longValue());
modifyResources(new Command<Void>() {
@@ -323,11 +337,11 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
return getRequestStatus(null);
}
-
+
private Resource toResource(boolean isCollection, String clusterName,
AlertDefinitionEntity entity, Set<String> requestedIds) {
Resource resource = new ResourceImpl(Resource.Type.AlertDefinition);
-
+
setResourceProperty(resource, ALERT_DEF_CLUSTER_NAME, clusterName, requestedIds);
setResourceProperty(resource, ALERT_DEF_ID, entity.getDefinitionId(), requestedIds);
setResourceProperty(resource, ALERT_DEF_NAME, entity.getDefinitionName(), requestedIds);
@@ -337,22 +351,24 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
setResourceProperty(resource, ALERT_DEF_ENABLED, Boolean.valueOf(entity.getEnabled()), requestedIds);
setResourceProperty(resource, ALERT_DEF_SCOPE, entity.getScope(), requestedIds);
setResourceProperty(resource, ALERT_DEF_SOURCE_TYPE, entity.getSourceType(), requestedIds);
-
+ setResourceProperty(resource, ALERT_DEF_LABEL, entity.getLabel(),
+ requestedIds);
+
if (!isCollection && null != resource.getPropertyValue(ALERT_DEF_SOURCE_TYPE)) {
-
+
try {
Map<String, String> map = gson.<Map<String, String>>fromJson(entity.getSource(), Map.class);
-
+
for (Entry<String, String> entry : map.entrySet()) {
String subProp = PropertyHelper.getPropertyId(ALERT_DEF_SOURCE, entry.getKey());
- resource.setProperty(subProp, entry.getValue());
+ resource.setProperty(subProp, entry.getValue());
}
} catch (Exception e) {
LOG.error("Could not coerce alert JSON into a type");
}
}
-
+
return resource;
}
-
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
index 23ad8f4..0062388 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
@@ -74,6 +74,9 @@ public class AlertDefinitionEntity {
@Column(name = "definition_name", nullable = false, length = 255)
private String definitionName;
+ @Column(name = "label", nullable = true, length = 255)
+ private String label;
+
@Column(name = "scope", length = 255)
@Enumerated(value = EnumType.STRING)
private Scope scope;
@@ -353,6 +356,25 @@ public class AlertDefinitionEntity {
}
/**
+ * Sets a human readable label for this alert definition.
+ *
+ * @param label
+ * the label or {@code null} if none.
+ */
+ public void setLabel(String label) {
+ this.label = label;
+ }
+
+ /**
+ * Gets the label for this alert definition.
+ *
+ * @return the label or {@code null} if none.
+ */
+ public String getLabel() {
+ return label;
+ }
+
+ /**
* Called before {@link EntityManager#remove(Object)} for this entity, removes
* the non-owning relationship between definitions and groups.
*/
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index bef01c1..144900e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -574,6 +574,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
columns.add(new DBColumnInfo("service_name", String.class, 255, null, false));
columns.add(new DBColumnInfo("component_name", String.class, 255, null, true));
columns.add(new DBColumnInfo("scope", String.class, 255, null, true));
+ columns.add(new DBColumnInfo("label", String.class, 255, null, true));
columns.add(new DBColumnInfo("enabled", Short.class, 1, 1, false));
columns.add(new DBColumnInfo("schedule_interval", Integer.class, null, null, false));
columns.add(new DBColumnInfo("source_type", String.class, 255, null, false));
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index b39ca5d..fe39612 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -157,6 +157,7 @@ CREATE TABLE alert_definition (
service_name VARCHAR(255) NOT NULL,
component_name VARCHAR(255),
scope VARCHAR(255),
+ label VARCHAR(255),
enabled SMALLINT DEFAULT 1 NOT NULL,
schedule_interval INTEGER NOT NULL,
source_type VARCHAR(255) NOT NULL,
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 500313b..e311646 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -148,6 +148,7 @@ CREATE TABLE alert_definition (
service_name VARCHAR2(255) NOT NULL,
component_name VARCHAR2(255),
scope VARCHAR2(255),
+ label VARCHAR2(255),
enabled NUMBER(1) DEFAULT 1 NOT NULL,
schedule_interval NUMBER(10) NOT NULL,
source_type VARCHAR2(255) NOT NULL,
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index a13f415..25d973f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -181,6 +181,7 @@ CREATE TABLE alert_definition (
service_name VARCHAR(255) NOT NULL,
component_name VARCHAR(255),
scope VARCHAR(255),
+ label VARCHAR(255),
enabled SMALLINT DEFAULT 1 NOT NULL,
schedule_interval INTEGER NOT NULL,
source_type VARCHAR(255) NOT NULL,
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index ff38b24..2d5b267 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -246,6 +246,7 @@ CREATE TABLE ambari.alert_definition (
service_name VARCHAR(255) NOT NULL,
component_name VARCHAR(255),
scope VARCHAR(255),
+ label VARCHAR(255),
enabled SMALLINT DEFAULT 1 NOT NULL,
schedule_interval INTEGER NOT NULL,
source_type VARCHAR(255) NOT NULL,
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 55b451e..aa2bf71 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -416,6 +416,7 @@
"AlertDefinition/component_name",
"AlertDefinition/id",
"AlertDefinition/name",
+ "AlertDefinition/label",
"AlertDefinition/interval",
"AlertDefinition/enabled",
"AlertDefinition/scope",
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
index fc57389..bf51ecb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
@@ -56,91 +56,98 @@ import org.junit.Test;
public class AlertDefinitionResourceProviderTest {
AlertDefinitionDAO dao = null;
-
+
@Before
public void before() {
dao = createStrictMock(AlertDefinitionDAO.class);
-
+
AlertDefinitionResourceProvider.init(dao);
}
-
+
@Test
public void testGetResourcesNoPredicate() throws Exception {
AlertDefinitionResourceProvider provider = createProvider(null);
-
+
Request request = PropertyHelper.getReadRequest("AlertDefinition/cluster_name",
"AlertDefinition/id");
-
+
Set<Resource> results = provider.getResources(request, null);
-
+
assertEquals(0, results.size());
- }
+ }
@Test
public void testGetResourcesClusterPredicate() throws Exception {
Request request = PropertyHelper.getReadRequest(
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME,
AlertDefinitionResourceProvider.ALERT_DEF_ID,
- AlertDefinitionResourceProvider.ALERT_DEF_NAME);
-
+ AlertDefinitionResourceProvider.ALERT_DEF_NAME,
+ AlertDefinitionResourceProvider.ALERT_DEF_LABEL);
+
AmbariManagementController amc = createMock(AmbariManagementController.class);
Clusters clusters = createMock(Clusters.class);
Cluster cluster = createMock(Cluster.class);
expect(amc.getClusters()).andReturn(clusters).atLeastOnce();
expect(clusters.getCluster((String) anyObject())).andReturn(cluster).atLeastOnce();
expect(cluster.getClusterId()).andReturn(Long.valueOf(1)).anyTimes();
-
+
Predicate predicate = new PredicateBuilder().property(
- AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME).equals("c1").toPredicate();
-
+ AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME).equals("c1").toPredicate();
+
expect(dao.findAll(1L)).andReturn(getMockEntities());
replay(amc, clusters, cluster, dao);
-
- AlertDefinitionResourceProvider provider = createProvider(amc);
+
+ AlertDefinitionResourceProvider provider = createProvider(amc);
Set<Resource> results = provider.getResources(request, predicate);
-
+
assertEquals(1, results.size());
-
+
Resource r = results.iterator().next();
-
+
Assert.assertEquals("my_def", r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_NAME));
-
+
+ Assert.assertEquals("Mock Label",
+ r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_LABEL));
+
verify(amc, clusters, cluster, dao);
}
-
+
@Test
public void testGetSingleResource() throws Exception {
Request request = PropertyHelper.getReadRequest(
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME,
AlertDefinitionResourceProvider.ALERT_DEF_ID,
AlertDefinitionResourceProvider.ALERT_DEF_NAME,
+ AlertDefinitionResourceProvider.ALERT_DEF_LABEL,
AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE);
-
+
AmbariManagementController amc = createMock(AmbariManagementController.class);
Clusters clusters = createMock(Clusters.class);
Cluster cluster = createMock(Cluster.class);
expect(amc.getClusters()).andReturn(clusters).atLeastOnce();
expect(clusters.getCluster((String) anyObject())).andReturn(cluster).atLeastOnce();
expect(cluster.getClusterId()).andReturn(Long.valueOf(1)).anyTimes();
-
+
Predicate predicate = new PredicateBuilder().property(
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME).equals("c1")
- .and().property(AlertDefinitionResourceProvider.ALERT_DEF_ID).equals("1").toPredicate();
-
+ .and().property(AlertDefinitionResourceProvider.ALERT_DEF_ID).equals("1").toPredicate();
+
expect(dao.findById(1L)).andReturn(getMockEntities().get(0));
replay(amc, clusters, cluster, dao);
-
- AlertDefinitionResourceProvider provider = createProvider(amc);
+
+ AlertDefinitionResourceProvider provider = createProvider(amc);
Set<Resource> results = provider.getResources(request, predicate);
-
+
assertEquals(1, results.size());
-
+
Resource r = results.iterator().next();
-
+
Assert.assertEquals("my_def", r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_NAME));
Assert.assertEquals("metric", r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE));
+ Assert.assertEquals("Mock Label",
+ r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_LABEL));
Assert.assertNotNull(r.getPropertyValue("AlertDefinition/source/type"));
}
@@ -156,24 +163,26 @@ public class AlertDefinitionResourceProviderTest {
Capture<AlertDefinitionEntity> entityCapture = new Capture<AlertDefinitionEntity>();
dao.create(capture(entityCapture));
expectLastCall();
-
+
replay(amc, clusters, cluster, dao);
-
+
AlertDefinitionResourceProvider provider = createProvider(amc);
-
+
Map<String, Object> requestProps = new HashMap<String, Object>();
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME, "HDFS");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE, "METRIC");
-
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_LABEL,
+ "Mock Label (Create)");
+
Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
provider.createResources(request);
-
+
Assert.assertTrue(entityCapture.hasCaptured());
- AlertDefinitionEntity entity = entityCapture.getValue();
+ AlertDefinitionEntity entity = entityCapture.getValue();
Assert.assertNotNull(entity);
Assert.assertEquals(Long.valueOf(1), entity.getClusterId());
@@ -186,11 +195,12 @@ public class AlertDefinitionResourceProviderTest {
Assert.assertEquals("HDFS", entity.getServiceName());
Assert.assertNotNull(entity.getSource());
Assert.assertEquals("METRIC", entity.getSourceType());
-
+ Assert.assertEquals("Mock Label (Create)", entity.getLabel());
+
verify(amc, clusters, cluster, dao);
}
-
+
@Test
public void testUpdateResources() throws Exception {
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -203,40 +213,40 @@ public class AlertDefinitionResourceProviderTest {
Capture<AlertDefinitionEntity> entityCapture = new Capture<AlertDefinitionEntity>();
dao.create(capture(entityCapture));
expectLastCall();
-
+
replay(amc, clusters, cluster, dao);
-
+
Map<String, Object> requestProps = new HashMap<String, Object>();
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME, "HDFS");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE, "METRIC");
-
+
Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
AlertDefinitionResourceProvider provider = createProvider(amc);
-
+
provider.createResources(request);
-
+
Assert.assertTrue(entityCapture.hasCaptured());
- AlertDefinitionEntity entity = entityCapture.getValue();
+ AlertDefinitionEntity entity = entityCapture.getValue();
Assert.assertNotNull(entity);
-
+
Predicate p = new PredicateBuilder().property(
AlertDefinitionResourceProvider.ALERT_DEF_ID).equals("1").and().property(
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME).equals("c1").toPredicate();
// everything is mocked, there is no DB
entity.setDefinitionId(Long.valueOf(1));
-
+
String oldName = entity.getDefinitionName();
String oldHash = entity.getHash();
-
+
resetToStrict(dao);
expect(dao.findById(1L)).andReturn(entity).anyTimes();
expect(dao.merge((AlertDefinitionEntity) anyObject())).andReturn(entity).anyTimes();
replay(dao);
-
+
requestProps = new HashMap<String, Object>();
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "1");
@@ -244,15 +254,15 @@ public class AlertDefinitionResourceProviderTest {
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME, "HDFS");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE, "METRIC");
request = PropertyHelper.getUpdateRequest(requestProps, null);
-
+
provider.updateResources(request, p);
Assert.assertFalse(oldHash.equals(entity.getHash()));
Assert.assertFalse(oldName.equals(entity.getDefinitionName()));
-
+
verify(amc, clusters, cluster, dao);
}
-
+
@Test
public void testDeleteResources() throws Exception {
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -265,9 +275,9 @@ public class AlertDefinitionResourceProviderTest {
Capture<AlertDefinitionEntity> entityCapture = new Capture<AlertDefinitionEntity>();
dao.create(capture(entityCapture));
expectLastCall();
-
+
replay(amc, clusters, cluster, dao);
-
+
AlertDefinitionResourceProvider provider = createProvider(amc);
Map<String, Object> requestProps = new HashMap<String, Object>();
@@ -276,57 +286,58 @@ public class AlertDefinitionResourceProviderTest {
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME, "HDFS");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE, "METRIC");
-
+
Request request = PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
provider.createResources(request);
Assert.assertTrue(entityCapture.hasCaptured());
- AlertDefinitionEntity entity = entityCapture.getValue();
+ AlertDefinitionEntity entity = entityCapture.getValue();
Assert.assertNotNull(entity);
-
+
Predicate p = new PredicateBuilder().property(
AlertDefinitionResourceProvider.ALERT_DEF_ID).equals("1").and().property(
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME).equals("c1").toPredicate();
// everything is mocked, there is no DB
entity.setDefinitionId(Long.valueOf(1));
-
+
resetToStrict(dao);
expect(dao.findById(1L)).andReturn(entity).anyTimes();
dao.remove(capture(entityCapture));
expectLastCall();
replay(dao);
-
+
provider.deleteResources(p);
-
+
AlertDefinitionEntity entity1 = entityCapture.getValue();
Assert.assertEquals(Long.valueOf(1), entity1.getDefinitionId());
-
+
verify(amc, clusters, cluster, dao);
-
+
}
-
+
private AlertDefinitionResourceProvider createProvider(AmbariManagementController amc) {
return new AlertDefinitionResourceProvider(
PropertyHelper.getPropertyIds(Resource.Type.AlertDefinition),
PropertyHelper.getKeyPropertyIds(Resource.Type.AlertDefinition),
amc);
}
-
+
private List<AlertDefinitionEntity> getMockEntities() {
AlertDefinitionEntity entity = new AlertDefinitionEntity();
entity.setClusterId(Long.valueOf(1L));
entity.setComponentName(null);
entity.setDefinitionId(Long.valueOf(1L));
entity.setDefinitionName("my_def");
+ entity.setLabel("Mock Label");
entity.setEnabled(true);
entity.setHash("tmphash");
entity.setScheduleInterval(Integer.valueOf(2));
entity.setServiceName(null);
entity.setSourceType("metric");
entity.setSource("{'jmx': 'beanName/attributeName', 'host': '{{aa:123445}}'}");
-
+
return Arrays.asList(entity);
}
-
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/bbd9179a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index 4033f03..6262a2b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -197,7 +197,7 @@ public class UpgradeCatalog170Test {
assertViewColumns(maskColumnCapture);
assertViewParameterColumns(maskedColumnCapture);
- assertEquals(11, alertDefinitionColumnCapture.getValue().size());
+ assertEquals(12, alertDefinitionColumnCapture.getValue().size());
assertEquals(11, alertHistoryColumnCapture.getValue().size());
assertEquals(6, alertCurrentColumnCapture.getValue().size());
assertEquals(4, alertGroupColumnCapture.getValue().size());
@@ -282,7 +282,7 @@ public class UpgradeCatalog170Test {
upgradeCatalog.updateConfigurationProperties("hbase-env",
Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), false, false);
expectLastCall();
-
+
upgradeCatalog.updateConfigurationProperties("yarn-env",
Collections.singletonMap("min_user_id", "1000"), false, false);
expectLastCall();
[27/50] [abbrv] git commit: AMBARI-6898. Config History: the version
history box are in wrong sorting sequence sometimes.(xiwang)
Posted by jo...@apache.org.
AMBARI-6898. Config History: the version history box are in wrong sorting sequence sometimes.(xiwang)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/40dc5b75
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/40dc5b75
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/40dc5b75
Branch: refs/heads/branch-alerts-dev
Commit: 40dc5b75ee63358765d52a60d5751c6152100704
Parents: 30f8a87
Author: Xi Wang <xi...@apache.org>
Authored: Mon Aug 18 11:15:16 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Aug 19 11:36:34 2014 -0700
----------------------------------------------------------------------
ambari-web/app/views/common/configs/config_history_flow.js | 5 ++++-
ambari-web/app/views/main/dashboard/config_history_view.js | 1 -
2 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/40dc5b75/ambari-web/app/views/common/configs/config_history_flow.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/config_history_flow.js b/ambari-web/app/views/common/configs/config_history_flow.js
index 5f7bfad..1dcb0e0 100644
--- a/ambari-web/app/views/common/configs/config_history_flow.js
+++ b/ambari-web/app/views/common/configs/config_history_flow.js
@@ -281,7 +281,10 @@ App.ConfigHistoryFlowView = Em.View.extend({
});
},
serviceVersions: function () {
- return App.ServiceConfigVersion.find().filterProperty('serviceName', this.get('serviceName'));
+ var allServiceVersions = App.ServiceConfigVersion.find().filterProperty('serviceName', this.get('serviceName'));
+ return allServiceVersions.sort(function (a, b) {
+ return Em.get(a, 'createTime') - Em.get(b, 'createTime');
+ });
}.property('serviceName'),
/**
* move back to the previous service version
http://git-wip-us.apache.org/repos/asf/ambari/blob/40dc5b75/ambari-web/app/views/main/dashboard/config_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/config_history_view.js b/ambari-web/app/views/main/dashboard/config_history_view.js
index 5f9a943..d45ed9e 100644
--- a/ambari-web/app/views/main/dashboard/config_history_view.js
+++ b/ambari-web/app/views/main/dashboard/config_history_view.js
@@ -139,7 +139,6 @@ App.MainConfigHistoryView = App.TableView.extend({
emptyValue: Em.I18n.t('common.all')
}),
-
modifiedFilterView: filters.createSelectView({
column: 3,
fieldType: 'filter-input-width',
[36/50] [abbrv] git commit: AMBARI-6933. Rolling restart not working.
Posted by jo...@apache.org.
AMBARI-6933. Rolling restart not working.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f72b323f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f72b323f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f72b323f
Branch: refs/heads/branch-alerts-dev
Commit: f72b323f7e468da16067607221a08d9d0926bf30
Parents: 068cafa
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Tue Aug 19 22:46:34 2014 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Tue Aug 19 22:46:34 2014 -0700
----------------------------------------------------------------------
.../internal/InternalAuthenticationToken.java | 17 ++++++++++++++---
1 file changed, 14 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f72b323f/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
index 1fc86a2..8752278 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
@@ -17,26 +17,37 @@
*/
package org.apache.ambari.server.security.authorization.internal;
+
+import org.apache.ambari.server.orm.entities.PermissionEntity;
+import org.apache.ambari.server.orm.entities.PrivilegeEntity;
+import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
-import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.User;
-
import java.util.Collection;
import java.util.Collections;
public class InternalAuthenticationToken implements Authentication {
private static final String INTERNAL_NAME = "internal";
+ private static final PrivilegeEntity ADMIN_PRIV_ENTITY = new PrivilegeEntity();
// used in ClustersImpl, checkPermissions
private static final Collection<? extends GrantedAuthority> AUTHORITIES =
- Collections.singleton(new SimpleGrantedAuthority("AMBARI.ADMIN"));
+ Collections.singleton(new AmbariGrantedAuthority(ADMIN_PRIV_ENTITY));
private static final User INTERNAL_USER = new User(INTERNAL_NAME, "empty", AUTHORITIES);
private String token;
private boolean authenticated = false;
+ static{
+ PermissionEntity pe = new PermissionEntity();
+ pe.setId(PermissionEntity.AMBARI_ADMIN_PERMISSION);
+ pe.setPermissionName(PermissionEntity.AMBARI_ADMIN_PERMISSION_NAME);
+
+ ADMIN_PRIV_ENTITY.setPermission(pe);
+ }
+
public InternalAuthenticationToken(String tokenString) {
this.token = tokenString;
}
[24/50] [abbrv] git commit: AMBARI-6917. DataNode didn't start
recommissioning after decommission,
turning on and off Maintenance Mode (dlysnichenko)
Posted by jo...@apache.org.
AMBARI-6917. DataNode didn't start recommissioning after decommission, turning on and off Maintenance Mode (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1c5ceb27
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1c5ceb27
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1c5ceb27
Branch: refs/heads/branch-alerts-dev
Commit: 1c5ceb273a81b0e553cf3c7585db2c0be670cec0
Parents: 0c46e95
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Aug 19 21:13:39 2014 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Aug 19 21:14:35 2014 +0300
----------------------------------------------------------------------
.../internal/InternalAuthenticationToken.java | 6 +++---
.../apache/ambari/server/state/cluster/ClustersImpl.java | 10 ++++++++++
2 files changed, 13 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1c5ceb27/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
index 5d865ea..1fc86a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
@@ -17,7 +17,6 @@
*/
package org.apache.ambari.server.security.authorization.internal;
-
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
@@ -29,14 +28,15 @@ import java.util.Collections;
public class InternalAuthenticationToken implements Authentication {
private static final String INTERNAL_NAME = "internal";
+
+ // used in ClustersImpl, checkPermissions
private static final Collection<? extends GrantedAuthority> AUTHORITIES =
- Collections.singleton(new SimpleGrantedAuthority("ADMIN"));
+ Collections.singleton(new SimpleGrantedAuthority("AMBARI.ADMIN"));
private static final User INTERNAL_USER = new User(INTERNAL_NAME, "empty", AUTHORITIES);
private String token;
private boolean authenticated = false;
-
public InternalAuthenticationToken(String tokenString) {
this.token = tokenString;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/1c5ceb27/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
index e67ea45..c27797e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
@@ -55,6 +55,7 @@ import org.apache.ambari.server.state.host.HostFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
import javax.persistence.RollbackException;
import java.util.ArrayList;
@@ -736,6 +737,15 @@ public class ClustersImpl implements Clusters {
}
}
}
+
+ // SimpleGrantedAuthority is required by InternalAuthenticationToken for internal authorization by token
+ if (grantedAuthority instanceof SimpleGrantedAuthority){
+ SimpleGrantedAuthority authority = (SimpleGrantedAuthority) grantedAuthority;
+ if ("AMBARI.ADMIN".equals(authority.getAuthority())) {
+ return true;
+ }
+
+ }
}
// TODO : should we log this?
return false;
[49/50] [abbrv] git commit: AMBARI-6915 - Alerts: Change
AlertDefinition to Support a Reporting Member (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6915 - Alerts: Change AlertDefinition to Support a Reporting Member (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0ac9cb3f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0ac9cb3f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0ac9cb3f
Branch: refs/heads/branch-alerts-dev
Commit: 0ac9cb3facaff211c5c679d609f59aaca633d267
Parents: 72ebd26
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Aug 19 10:14:30 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 20 10:54:28 2014 -0400
----------------------------------------------------------------------
.../server/api/services/AmbariMetaInfo.java | 16 +---
.../AlertDefinitionResourceProvider.java | 63 ++++++++++++--
.../server/state/alert/AggregateSource.java | 38 ++++++++
.../server/state/alert/AlertDefinition.java | 17 ++++
.../state/alert/AlertDefinitionFactory.java | 84 +++++++++++++++---
.../server/state/alert/PercentSource.java | 76 ++++++++++++++++
.../ambari/server/state/alert/PortSource.java | 46 ++++++++++
.../ambari/server/state/alert/Reporting.java | 92 ++++++++++++++++++++
.../ambari/server/state/alert/ScriptSource.java | 36 ++++++++
.../ambari/server/state/alert/Source.java | 13 ++-
.../ambari/server/state/alert/SourceType.java | 9 +-
.../server/api/services/AmbariMetaInfoTest.java | 51 +++++++++++
.../AlertDefinitionResourceProviderTest.java | 89 +++++++++++++++++--
.../stacks/HDP/2.0.5/services/HDFS/alerts.json | 61 ++++++++-----
14 files changed, 626 insertions(+), 65 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 3347a77..2eec33b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -31,7 +31,6 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Scanner;
import java.util.Set;
import java.util.concurrent.ExecutorService;
@@ -1093,19 +1092,6 @@ public class AmbariMetaInfo {
return null;
}
- Set<AlertDefinition> defs = new HashSet<AlertDefinition>();
- Map<String, List<AlertDefinition>> map = alertDefinitionFactory.getAlertDefinitions(alertsFile);
-
- for (Entry<String, List<AlertDefinition>> entry : map.entrySet()) {
- for (AlertDefinition ad : entry.getValue()) {
- ad.setServiceName(serviceName);
- if (!entry.getKey().equals("service")) {
- ad.setComponentName(entry.getKey());
- }
- }
- defs.addAll(entry.getValue());
- }
-
- return defs;
+ return alertDefinitionFactory.getAlertDefinitions(alertsFile, serviceName);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index f20a9a9..bed25e7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -61,18 +61,26 @@ import com.google.inject.Injector;
*/
public class AlertDefinitionResourceProvider extends AbstractControllerResourceProvider {
+ protected static final String ALERT_DEF = "AlertDefinition";
+
protected static final String ALERT_DEF_CLUSTER_NAME = "AlertDefinition/cluster_name";
protected static final String ALERT_DEF_ID = "AlertDefinition/id";
protected static final String ALERT_DEF_NAME = "AlertDefinition/name";
protected static final String ALERT_DEF_LABEL = "AlertDefinition/label";
protected static final String ALERT_DEF_INTERVAL = "AlertDefinition/interval";
- protected static final String ALERT_DEF_SOURCE_TYPE = "AlertDefinition/source/type";
- protected static final String ALERT_DEF_SOURCE = "AlertDefinition/source";
protected static final String ALERT_DEF_SERVICE_NAME = "AlertDefinition/service_name";
protected static final String ALERT_DEF_COMPONENT_NAME = "AlertDefinition/component_name";
protected static final String ALERT_DEF_ENABLED = "AlertDefinition/enabled";
protected static final String ALERT_DEF_SCOPE = "AlertDefinition/scope";
+ protected static final String ALERT_DEF_SOURCE = "AlertDefinition/source";
+ protected static final String ALERT_DEF_SOURCE_TYPE = "AlertDefinition/source/type";
+ protected static final String ALERT_DEF_SOURCE_REPORTING = "AlertDefinition/source/reporting";
+ protected static final String ALERT_DEF_SOURCE_REPORTING_OK = "AlertDefinition/source/reporting/ok";
+ protected static final String ALERT_DEF_SOURCE_REPORTING_WARNING = "AlertDefinition/source/reporting/warning";
+ protected static final String ALERT_DEF_SOURCE_REPORTING_CRITICAL = "AlertDefinition/source/reporting/critical";
+
+
private static Set<String> pkPropertyIds = new HashSet<String>(
Arrays.asList(ALERT_DEF_ID, ALERT_DEF_NAME));
@@ -176,21 +184,64 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
SourceType.class)));
}
- JsonObject jsonObj = new JsonObject();
+ // !!! Alert structures contain nested objects; reconstruct a valid
+ // JSON from the flat, exploded properties so that a Source instance can
+ // be properly persisted
+ JsonObject source = new JsonObject();
+ JsonObject reporting = new JsonObject();
+ JsonObject reportingOk = new JsonObject();
+ JsonObject reportingWarning = new JsonObject();
+ JsonObject reportingCritical = new JsonObject();
for (Entry<String, Object> entry : requestMap.entrySet()) {
String propCat = PropertyHelper.getPropertyCategory(entry.getKey());
String propName = PropertyHelper.getPropertyName(entry.getKey());
+ if (propCat.equals(ALERT_DEF) && "source".equals(propName)) {
+ source.addProperty(propName, entry.getValue().toString());
+ }
+
if (propCat.equals(ALERT_DEF_SOURCE)) {
- jsonObj.addProperty(propName, entry.getValue().toString());
+ source.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING)) {
+ reporting.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_OK)) {
+ reportingOk.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_WARNING)) {
+ reportingWarning.addProperty(propName, entry.getValue().toString());
+ }
+
+ if (propCat.equals(ALERT_DEF_SOURCE_REPORTING_CRITICAL)) {
+ reportingCritical.addProperty(propName, entry.getValue().toString());
}
}
- if (0 == jsonObj.entrySet().size()) {
+ if (0 == source.entrySet().size()) {
throw new IllegalArgumentException("Source must be specified");
}
+ if (reportingOk.entrySet().size() > 0) {
+ reporting.add("ok", reportingOk);
+ }
+
+ if (reportingWarning.entrySet().size() > 0) {
+ reporting.add("warning", reportingWarning);
+ }
+
+ if (reportingCritical.entrySet().size() > 0) {
+ reporting.add("critical", reportingCritical);
+ }
+
+ if (reporting.entrySet().size() > 0) {
+ source.add("reporting", reporting);
+ }
+
Cluster cluster = getManagementController().getClusters().getCluster(clusterName);
AlertDefinitionEntity entity = new AlertDefinitionEntity();
@@ -207,7 +258,7 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
entity.setScheduleInterval(interval);
entity.setServiceName((String) requestMap.get(ALERT_DEF_SERVICE_NAME));
entity.setSourceType((String) requestMap.get(ALERT_DEF_SOURCE_TYPE));
- entity.setSource(jsonObj.toString());
+ entity.setSource(source.toString());
Scope scope = null;
String desiredScope = (String) requestMap.get(ALERT_DEF_SCOPE);
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AggregateSource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AggregateSource.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AggregateSource.java
new file mode 100644
index 0000000..d056c40
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AggregateSource.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alert;
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * Alert when the source type is defined as {@link SourceType#AGGREGATE}.
+ * Aggregate alerts are alerts that are triggered by collecting the states of
+ * all instances of the defined alert and calculating the overall state.
+ */
+public class AggregateSource extends Source {
+
+ @SerializedName("alert_name")
+ private String m_alertName = null;
+
+ /**
+ * @return the unique name of the alert that will have its values aggregated.
+ */
+ public String getAlertName() {
+ return m_alertName;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
index 8d9b3c2..15f4bfe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinition.java
@@ -31,6 +31,7 @@ public class AlertDefinition {
private int interval = 1;
private boolean enabled = true;
private Source source = null;
+ private String label = null;
/**
* @return the service name
@@ -117,6 +118,22 @@ public class AlertDefinition {
source = definitionSource;
}
+ /**
+ * @return the label for the definition or {@code null} if none.
+ */
+ public String getLabel() {
+ return label;
+ }
+
+ /**
+ * Sets the label for this definition.
+ *
+ * @param definitionLabel
+ */
+ public void setLabel(String definitionLabel) {
+ label = definitionLabel;
+ }
+
@Override
public boolean equals(Object obj) {
if (null == obj || !obj.getClass().equals(AlertDefinition.class)) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java
index 1775f88..5ddb521 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionFactory.java
@@ -20,8 +20,11 @@ package org.apache.ambari.server.state.alert;
import java.io.File;
import java.io.FileReader;
import java.lang.reflect.Type;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
@@ -52,34 +55,58 @@ public class AlertDefinitionFactory {
/**
* Builder used for type adapter registration.
*/
- private final GsonBuilder m_builder = new GsonBuilder().registerTypeAdapter(
- Source.class, new AlertDefinitionSourceAdapter());
+ private final GsonBuilder m_builder = new GsonBuilder();
/**
* Thread safe deserializer.
*/
- private final Gson m_gson = m_builder.create();
+ private final Gson m_gson;
+ /**
+ * Constructor.
+ */
+ public AlertDefinitionFactory() {
+ m_builder.registerTypeAdapter(Source.class,
+ new AlertDefinitionSourceAdapter());
+
+ m_gson = m_builder.create();
+ }
/**
* Gets a list of all of the alert definitions defined in the specified JSON
- * {@link File}.
+ * {@link File} for the given service.
*
* @param alertDefinitionFile
+ * @param serviceName
* @return
* @throws AmbariException
* if there was a problem reading the file or parsing the JSON.
*/
- public Map<String, List<AlertDefinition>> getAlertDefinitions(
- File alertDefinitionFile) throws AmbariException {
+ public Set<AlertDefinition> getAlertDefinitions(File alertDefinitionFile,
+ String serviceName) throws AmbariException {
+ Map<String,List<AlertDefinition>> definitionMap = null;
+
try {
Type type = new TypeToken<Map<String, List<AlertDefinition>>>(){}.getType();
- return m_gson.fromJson(new FileReader(alertDefinitionFile), type);
+ definitionMap = m_gson.fromJson(new FileReader(alertDefinitionFile), type);
} catch (Exception e) {
LOG.error("Could not read the alert definition file", e);
throw new AmbariException("Could not read alert definition file", e);
}
+
+ Set<AlertDefinition> definitions = new HashSet<AlertDefinition>();
+ for (Entry<String, List<AlertDefinition>> entry : definitionMap.entrySet()) {
+ for (AlertDefinition ad : entry.getValue()) {
+ ad.setServiceName(serviceName);
+ if (!entry.getKey().equals("service")) {
+ ad.setComponentName(entry.getKey());
+ }
+ }
+ definitions.addAll(entry.getValue());
+ }
+
+ return definitions;
}
/**
@@ -103,6 +130,7 @@ public class AlertDefinitionFactory {
definition.setName(entity.getDefinitionName());
definition.setScope(entity.getScope());
definition.setServiceName(entity.getServiceName());
+ definition.setLabel(entity.getLabel());
try{
String sourceJson = entity.getSource();
@@ -118,6 +146,16 @@ public class AlertDefinitionFactory {
}
/**
+ * Gets an instance of {@link Gson} that can correctly serialize and
+ * deserialize an {@link AlertDefinition}.
+ *
+ * @return a {@link Gson} instance (not {@code null}).
+ */
+ public Gson getGson() {
+ return m_gson;
+ }
+
+ /**
* Deserializes {@link Source} implementations.
*/
private static final class AlertDefinitionSourceAdapter implements JsonDeserializer<Source>{
@@ -130,21 +168,41 @@ public class AlertDefinitionFactory {
JsonObject jsonObj = (JsonObject) json;
SourceType type = SourceType.valueOf(jsonObj.get("type").getAsString());
- Class<? extends Source> cls = null;
+ Class<? extends Source> clazz = null;
switch (type) {
- case METRIC:
- cls = MetricSource.class;
+ case METRIC:{
+ clazz = MetricSource.class;
+ break;
+ }
+ case PORT:{
+ clazz = PortSource.class;
break;
+ }
+ case SCRIPT: {
+ clazz = ScriptSource.class;
+ break;
+ }
+ case AGGREGATE: {
+ clazz = AggregateSource.class;
+ break;
+ }
+ case PERCENT: {
+ clazz = PercentSource.class;
+ break;
+ }
default:
break;
}
- if (null != cls) {
- return context.deserialize(json, cls);
- } else {
+ if (null == clazz) {
+ LOG.warn(
+ "Unable to deserialize an alert definition with source type {}",
+ type);
return null;
}
+
+ return context.deserialize(json, clazz);
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PercentSource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PercentSource.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PercentSource.java
new file mode 100644
index 0000000..ef79cfd
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PercentSource.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alert;
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * Alert when the source type is defined as {@link SourceType#PERCENT}
+ */
+public class PercentSource extends Source {
+
+ @SerializedName("numerator")
+ private MetricFractionPart m_numerator = null;
+
+ @SerializedName("denominator")
+ private MetricFractionPart m_denominator = null;
+
+ /**
+ * Gets the numerator for the percent calculation.
+ *
+ * @return a metric value representing the numerator (never {@code null}).
+ */
+ public MetricFractionPart getNumerator() {
+ return m_numerator;
+ }
+
+ /**
+ * Gets the denomintor for the percent calculation.
+ *
+ * @return a metric value representing the denominator (never {@code null}).
+ */
+ public MetricFractionPart getDenominator() {
+ return m_denominator;
+ }
+
+ /**
+ * The {@link MetricFractionPart} class represents either the numerator or the
+ * denominator of a fraction.
+ */
+ public static final class MetricFractionPart {
+ @SerializedName("jmx")
+ private String m_jmxInfo = null;
+
+ @SerializedName("ganglia")
+ private String m_gangliaInfo = null;
+
+ /**
+ * @return the jmx info, if this metric is jmx-based
+ */
+ public String getJmxInfo() {
+ return m_jmxInfo;
+ }
+
+ /**
+ * @return the ganglia info, if this metric is ganglia-based
+ */
+ public String getGangliaInfo() {
+ return m_gangliaInfo;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PortSource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PortSource.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PortSource.java
new file mode 100644
index 0000000..afb60dd
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/PortSource.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alert;
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * Alert when the source type is defined as {@link SourceType#PORT}
+ */
+public class PortSource extends Source {
+
+ @SerializedName("uri")
+ private String m_uri = null;
+
+ @SerializedName("port")
+ private int m_port = 0;
+
+ /**
+ * @return the URI to check for a valid port
+ */
+ public String getUri() {
+ return m_uri;
+ }
+
+ /**
+ * @return the port to check on the given URI.
+ */
+ public int getPort() {
+ return m_port;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Reporting.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Reporting.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Reporting.java
new file mode 100644
index 0000000..7e63d43
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Reporting.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alert;
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * The {@link Reporting} class represents the OK/WARNING/CRITICAL structures in
+ * an {@link AlertDefinition}.
+ */
+public class Reporting {
+
+ /**
+ *
+ */
+ @SerializedName("ok")
+ private ReportTemplate m_ok;
+
+ /**
+ *
+ */
+ @SerializedName("warning")
+ private ReportTemplate m_warning;
+
+ /**
+ *
+ */
+ @SerializedName("critical")
+ private ReportTemplate m_critical;
+
+ /**
+ * @return the WARNING structure or {@code null} if none.
+ */
+ public ReportTemplate getWarning() {
+ return m_warning;
+ }
+
+ /**
+ * @return the CRITICAL structure or {@code null} if none.
+ */
+ public ReportTemplate getCritical() {
+ return m_critical;
+ }
+
+ /**
+ * @return the OK structure or {@code null} if none.
+ */
+ public ReportTemplate getOk() {
+ return m_ok;
+ }
+
+ /**
+ * The {@link ReportTemplate} class is used to pair a label and threshhold
+ * value.
+ */
+ public static final class ReportTemplate {
+ @SerializedName("text")
+ private String m_text;
+
+ @SerializedName("value")
+ private Double m_value = null;
+
+ /**
+ * @return the parameterized text of this template or {@code null} if none.
+ */
+ public String getText() {
+ return m_text;
+ }
+
+ /**
+ * @return the threshold value for this template or {@code null} if none.
+ */
+ public Double getValue() {
+ return m_value;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/ScriptSource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/ScriptSource.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/ScriptSource.java
new file mode 100644
index 0000000..13a6057
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/ScriptSource.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alert;
+
+import com.google.gson.annotations.SerializedName;
+
+/**
+ * Alert when the source type is defined as {@link SourceType#SCRIPT}
+ */
+public class ScriptSource extends Source {
+
+ @SerializedName("path")
+ private String m_path = null;
+
+ /**
+ * @return the path to the script file.
+ */
+ public String getPath() {
+ return m_path;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Source.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Source.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Source.java
index f64b7d2..cdce41c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Source.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/Source.java
@@ -17,6 +17,8 @@
*/
package org.apache.ambari.server.state.alert;
+import com.google.gson.annotations.SerializedName;
+
/**
* Abstract class that all known alert sources should extend.
*/
@@ -24,11 +26,20 @@ public abstract class Source {
private SourceType type;
+ @SerializedName("reporting")
+ private Reporting reporting;
+
/**
* @return the type
*/
public SourceType getType() {
return type;
}
-
+
+ /**
+ * @return
+ */
+ public Reporting getReporting() {
+ return reporting;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/main/java/org/apache/ambari/server/state/alert/SourceType.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/SourceType.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/SourceType.java
index 8289d6f..18c13bd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/SourceType.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/SourceType.java
@@ -18,7 +18,7 @@
package org.apache.ambari.server.state.alert;
/**
- * Source type refers to how the alert is to be collected.
+ * Source type refers to how the alert is to be collected.
*/
public enum SourceType {
/**
@@ -36,5 +36,10 @@ public enum SourceType {
/**
* Source is an aggregate of a collection of other alert states
*/
- AGGREGATE
+ AGGREGATE,
+
+ /**
+ * Source is a ratio of two {@link #METRIC} values.
+ */
+ PERCENT;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index 68cbc92..6b50e16 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -29,6 +29,7 @@ import java.io.File;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
@@ -54,6 +55,8 @@ import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.Stack;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.alert.AlertDefinition;
+import org.apache.ambari.server.state.alert.Reporting;
+import org.apache.ambari.server.state.alert.Source;
import org.apache.ambari.server.state.stack.MetricDefinition;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
@@ -1420,5 +1423,53 @@ public class AmbariMetaInfoTest {
Assert.assertNotNull(set);
Assert.assertTrue(set.size() > 0);
+ // find two different definitions and test each one
+ AlertDefinition nameNodeProcess = null;
+ AlertDefinition nameNodeCpu = null;
+
+ Iterator<AlertDefinition> iterator = set.iterator();
+ while (iterator.hasNext()) {
+ AlertDefinition definition = iterator.next();
+ if (definition.getName().equals("namenode_process")) {
+ nameNodeProcess = definition;
+ }
+
+ if (definition.getName().equals("namenode_cpu")) {
+ nameNodeCpu = definition;
+ }
+ }
+
+ assertNotNull(nameNodeProcess);
+ assertNotNull(nameNodeCpu);
+
+ assertEquals("NameNode host CPU Utilization", nameNodeCpu.getLabel());
+
+ Source source = nameNodeProcess.getSource();
+ assertNotNull(source);
+
+ // test namenode_process
+ Reporting reporting = source.getReporting();
+ assertNotNull(reporting);
+ assertNotNull(reporting.getOk());
+ assertNotNull(reporting.getOk().getText());
+ assertNull(reporting.getOk().getValue());
+ assertNotNull(reporting.getCritical());
+ assertNotNull(reporting.getCritical().getText());
+ assertNull(reporting.getCritical().getValue());
+ assertNull(reporting.getWarning());
+
+ // test namenode_cpu
+ source = nameNodeCpu.getSource();
+ reporting = source.getReporting();
+ assertNotNull(reporting);
+ assertNotNull(reporting.getOk());
+ assertNotNull(reporting.getOk().getText());
+ assertNull(reporting.getOk().getValue());
+ assertNotNull(reporting.getCritical());
+ assertNotNull(reporting.getCritical().getText());
+ assertNotNull(reporting.getCritical().getValue());
+ assertNotNull(reporting.getWarning());
+ assertNotNull(reporting.getWarning().getText());
+ assertNotNull(reporting.getWarning().getValue());
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
index 333f674..7df999e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
@@ -29,10 +29,12 @@ import static org.easymock.EasyMock.resetToStrict;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertEquals;
+import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -44,18 +46,24 @@ import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.utilities.PredicateBuilder;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.metadata.ActionMetadata;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.alert.AlertDefinition;
+import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
import org.apache.ambari.server.state.alert.AlertDefinitionHash;
+import org.apache.ambari.server.state.alert.Source;
+import org.apache.ambari.server.state.alert.SourceType;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import com.google.gson.Gson;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
@@ -69,6 +77,7 @@ public class AlertDefinitionResourceProviderTest {
private AlertDefinitionDAO dao = null;
private AlertDefinitionHash definitionHash = null;
+ private AlertDefinitionFactory m_factory = new AlertDefinitionFactory();
private Injector m_injector;
private static String DEFINITION_UUID = UUID.randomUUID().toString();
@@ -82,6 +91,7 @@ public class AlertDefinitionResourceProviderTest {
new InMemoryDefaultTestModule()).with(new MockModule()));
AlertDefinitionResourceProvider.init(m_injector);
+ m_injector.injectMembers(m_factory);
}
/**
@@ -149,6 +159,7 @@ public class AlertDefinitionResourceProviderTest {
AlertDefinitionResourceProvider.ALERT_DEF_ID,
AlertDefinitionResourceProvider.ALERT_DEF_NAME,
AlertDefinitionResourceProvider.ALERT_DEF_LABEL,
+ AlertDefinitionResourceProvider.ALERT_DEF_SOURCE,
AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE);
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -174,9 +185,20 @@ public class AlertDefinitionResourceProviderTest {
Resource r = results.iterator().next();
Assert.assertEquals("my_def", r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_NAME));
- Assert.assertEquals("metric", r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE));
+
+ Assert.assertEquals(
+ SourceType.METRIC.name(),
+ r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE));
+
+ Source source = getMockSource();
+ String okJson = source.getReporting().getOk().getText();
+ Object reporting = r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_REPORTING);
+
+ Assert.assertTrue(reporting.toString().contains(okJson));
+
Assert.assertEquals("Mock Label",
r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_LABEL));
+
Assert.assertNotNull(r.getPropertyValue("AlertDefinition/source/type"));
}
@@ -203,14 +225,25 @@ public class AlertDefinitionResourceProviderTest {
replay(amc, clusters, cluster, dao, definitionHash);
+ Gson gson = m_factory.getGson();
+ Source source = getMockSource();
+ String sourceJson = gson.toJson(source);
AlertDefinitionResourceProvider provider = createProvider(amc);
Map<String, Object> requestProps = new HashMap<String, Object>();
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME, "c1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_INTERVAL, "1");
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_NAME, "my_def");
- requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME, "HDFS");
- requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE, "METRIC");
+
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SERVICE_NAME,
+ "HDFS");
+
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE,
+ sourceJson);
+
+ requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_SOURCE_TYPE,
+ SourceType.METRIC.name());
+
requestProps.put(AlertDefinitionResourceProvider.ALERT_DEF_LABEL,
"Mock Label (Create)");
@@ -230,10 +263,23 @@ public class AlertDefinitionResourceProviderTest {
Assert.assertEquals(Integer.valueOf(1), entity.getScheduleInterval());
Assert.assertNull(entity.getScope());
Assert.assertEquals("HDFS", entity.getServiceName());
- Assert.assertNotNull(entity.getSource());
Assert.assertEquals("METRIC", entity.getSourceType());
Assert.assertEquals("Mock Label (Create)", entity.getLabel());
+ // verify Source
+ Assert.assertNotNull(entity.getSource());
+ Source actualSource = gson.fromJson(entity.getSource(), Source.class);
+ Assert.assertNotNull(actualSource);
+
+ assertEquals(source.getReporting().getOk().getText(),
+ source.getReporting().getOk().getText());
+
+ assertEquals(source.getReporting().getWarning().getText(),
+ source.getReporting().getWarning().getText());
+
+ assertEquals(source.getReporting().getCritical().getText(),
+ source.getReporting().getCritical().getText());
+
verify(amc, clusters, cluster, dao);
}
@@ -386,7 +432,10 @@ public class AlertDefinitionResourceProviderTest {
/**
* @return
*/
- private List<AlertDefinitionEntity> getMockEntities() {
+ private List<AlertDefinitionEntity> getMockEntities() throws Exception {
+ Source source = getMockSource();
+ String sourceJson = new Gson().toJson(source);
+
AlertDefinitionEntity entity = new AlertDefinitionEntity();
entity.setClusterId(Long.valueOf(1L));
entity.setComponentName(null);
@@ -397,13 +446,36 @@ public class AlertDefinitionResourceProviderTest {
entity.setHash(DEFINITION_UUID);
entity.setScheduleInterval(Integer.valueOf(2));
entity.setServiceName(null);
- entity.setSourceType("metric");
- entity.setSource("{'jmx': 'beanName/attributeName', 'host': '{{aa:123445}}'}");
-
+ entity.setSourceType(SourceType.METRIC.name());
+ entity.setSource(sourceJson);
return Arrays.asList(entity);
}
/**
+ * @return
+ */
+ private Source getMockSource() throws Exception {
+ File alertsFile = new File(
+ "src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json");
+
+ Assert.assertTrue(alertsFile.exists());
+
+ Set<AlertDefinition> set = m_factory.getAlertDefinitions(alertsFile, "HDFS");
+ AlertDefinition nameNodeCpu = null;
+ Iterator<AlertDefinition> definitions = set.iterator();
+ while (definitions.hasNext()) {
+ AlertDefinition definition = definitions.next();
+
+ if (definition.getName().equals("namenode_cpu")) {
+ nameNodeCpu = definition;
+ }
+ }
+
+ Assert.assertNotNull(nameNodeCpu.getSource());
+ return nameNodeCpu.getSource();
+ }
+
+ /**
*
*/
private class MockModule implements Module {
@@ -418,6 +490,7 @@ public class AlertDefinitionResourceProviderTest {
EasyMock.createNiceMock(Clusters.class));
binder.bind(Cluster.class).toInstance(
EasyMock.createNiceMock(Cluster.class));
+ binder.bind(ActionMetadata.class);
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac9cb3f/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json
index 85aa3ab..02a9a58 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/alerts.json
@@ -1,18 +1,5 @@
{
- "service": [
- ],
- "SECONDARY_NAMENODE": [
- {
- "name": "secondary_namenode_process",
- "label": "Secondary NameNode process",
- "interval": 1,
- "scope": "service",
- "source": {
- "type": "PORT",
- "config": "{{hdfs-site/dfs.namenode.secondary.http-address}}:50071"
- }
- }
- ],
+ "service": [],
"NAMENODE": [
{
"name": "namenode_cpu",
@@ -21,7 +8,20 @@
"source": {
"type": "METRIC",
"jmx": "java.lang:type=OperatingSystem/SystemCpuLoad",
- "host": "{{hdfs-site/dfs.namenode.secondary.http-address}}"
+ "host": "{{hdfs-site/dfs.namenode.secondary.http-address}}",
+ "reporting": {
+ "ok": {
+ "text": "System CPU Load is OK"
+ },
+ "warning": {
+ "text": "System CPU Load is Nearing Critical",
+ "value": 70
+ },
+ "critical": {
+ "text": "System CPU Load is Critical",
+ "value": 80
+ }
+ }
}
},
{
@@ -31,8 +31,17 @@
"scope": "host",
"source": {
"type": "PORT",
- "uri": "{{hdfs-site/dfs.namenode.http-address}}:50070"
- }
+ "uri": "{{hdfs-site/dfs.namenode.http-address}}",
+ "port": 50070,
+ "reporting": {
+ "ok": {
+ "text": "TCP OK - {0:.4f} response on port {1}"
+ },
+ "critical": {
+ "text": "TCP FAIL - {0:.4f} response on port {1}"
+ }
+ }
+ }
},
{
"name": "hdfs_last_checkpoint",
@@ -46,6 +55,18 @@
}
}
],
- "DATANODE": [
- ]
-}
+ "SECONDARY_NAMENODE": [
+ {
+ "name": "secondary_namenode_process",
+ "label": "Secondary NameNode process",
+ "interval": 1,
+ "scope": "service",
+ "source": {
+ "type": "PORT",
+ "uri": "{{hdfs-site/dfs.namenode.secondary.http-address}}",
+ "port": 50070
+ }
+ }
+ ],
+ "DATANODE": []
+}
\ No newline at end of file
[13/50] [abbrv] git commit: AMBARI-6904. Admin View: need to handle
mask property attribute. (jaimin)
Posted by jo...@apache.org.
AMBARI-6904. Admin View: need to handle mask property attribute. (jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ab128d4a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ab128d4a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ab128d4a
Branch: refs/heads/branch-alerts-dev
Commit: ab128d4ac99e1e6742ea49a3064923556a23167b
Parents: b8c8326
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Aug 18 19:15:48 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Aug 18 19:15:48 2014 -0700
----------------------------------------------------------------------
.../main/resources/ui/admin-web/app/index.html | 4 +--
.../app/scripts/controllers/NavbarCtrl.js | 5 +++-
.../controllers/ambariViews/ViewsEditCtrl.js | 16 ++++++++++--
.../controllers/groups/GroupsEditCtrl.js | 1 -
.../controllers/groups/GroupsListCtrl.js | 12 ++++++++-
.../scripts/controllers/users/UsersListCtrl.js | 11 +++++++-
.../resources/ui/admin-web/app/styles/main.css | 27 +++++++++++++++++++-
.../admin-web/app/views/ambariViews/create.html | 22 +++++++++++-----
.../admin-web/app/views/ambariViews/edit.html | 8 +++---
.../ui/admin-web/app/views/groups/create.html | 4 +--
.../ui/admin-web/app/views/groups/edit.html | 6 ++++-
.../ui/admin-web/app/views/groups/list.html | 2 +-
.../ui/admin-web/app/views/users/create.html | 4 +--
.../ui/admin-web/app/views/users/show.html | 4 +--
14 files changed, 98 insertions(+), 28 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/index.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/index.html b/ambari-admin/src/main/resources/ui/admin-web/app/index.html
index 7b03087..2bc520f 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/index.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/index.html
@@ -19,9 +19,10 @@
<html class="no-js">
<head>
<meta charset="utf-8">
- <title>AdminConsole</title>
+ <title>Ambari</title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
+ <link rel="shortcut icon" href="/img/logo.png" type="image/x-icon">
<!-- Place favicon.ico and apple-touch-icon.png in the root directory -->
<!-- build:css styles/vendor.css -->
@@ -47,7 +48,6 @@
<div class="container">
<a href="#/" class="logo"><img src="/img/logo-white.png" alt="Apache Ambari" title="Apache Ambari"></a>
<a href="#/" class="brand" title="Apache Ambari">Ambari</a>
- <a href="" class="brand cluster-name">Admin Console</a>
<ul class="nav navbar-nav navbar-right">
<li>
<div class="btn-group" dropdown is-open="status.isopen">
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
index b839feb..8c7ec9e 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/NavbarCtrl.js
@@ -18,7 +18,7 @@
'use strict';
angular.module('ambariAdminConsole')
-.controller('NavbarCtrl',['$scope', 'Cluster', '$location', 'uiAlert', 'ROUTES', 'LDAP', 'ConfirmationModal', function($scope, Cluster, $location, uiAlert, ROUTES, LDAP, ConfirmationModal) {
+.controller('NavbarCtrl',['$scope', 'Cluster', '$location', 'uiAlert', 'ROUTES', 'LDAP', 'ConfirmationModal', '$rootScope', function($scope, Cluster, $location, uiAlert, ROUTES, LDAP, ConfirmationModal, $rootScope) {
$scope.cluster = null;
Cluster.getStatus().then(function(cluster) {
$scope.cluster = cluster;
@@ -46,6 +46,9 @@ angular.module('ambariAdminConsole')
ConfirmationModal.show('Sync LDAP', 'Are you sure you want to sync LDAP?').then(function() {
LDAP.sync($scope.ldapData['LDAP'].groups, $scope.ldapData['LDAP'].users).then(function() {
uiAlert.success('LDAP synced successful');
+ $rootScope.$evalAsync(function() {
+ $rootScope.LDAPSynced = true;
+ });
}).catch(function(data) {
uiAlert.danger(data.data.status, data.data.message);
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
index 42318a7..6d251a5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/ambariViews/ViewsEditCtrl.js
@@ -38,6 +38,17 @@ angular.module('ambariAdminConsole')
});
}
+
+ // Get META for properties
+ View.getMeta($routeParams.viewId, $routeParams.version).then(function(data) {
+ var meta = {};
+ angular.forEach(data.data.ViewVersionInfo.parameters, function(parameter) {
+ meta[parameter.name] = parameter;
+ });
+ $scope.configurationMeta = meta;
+ reloadViewInfo();
+ });
+
function reloadViewPrivilegies(){
PermissionLoader.getViewPermissions({
viewName: $routeParams.viewId,
@@ -57,7 +68,7 @@ angular.module('ambariAdminConsole')
$scope.permissions = [];
- reloadViewInfo();
+ // reloadViewInfo();
reloadViewPrivilegies();
$scope.editSettingsDisabled = true;
@@ -71,6 +82,7 @@ angular.module('ambariAdminConsole')
}
})
.success(function() {
+ reloadViewInfo();
$scope.editSettingsDisabled = true;
})
.catch(function(data) {
@@ -140,4 +152,4 @@ angular.module('ambariAdminConsole')
});
});
};
-}]);
\ No newline at end of file
+}]);
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
index b5bcded..0896de1 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsEditCtrl.js
@@ -84,7 +84,6 @@ angular.module('ambariAdminConsole')
privilegies.views[privilegie.instance_name].version = privilegie.version;
privilegies.views[privilegie.instance_name].view_name = privilegie.view_name;
privilegies.views[privilegie.instance_name].privileges += privilegies.views[privilegie.instance_name].privileges ? ', ' + privilegie.permission_name : privilegie.permission_name;
-
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsListCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsListCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsListCtrl.js
index 1ae4c44..82eb029 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsListCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/groups/GroupsListCtrl.js
@@ -18,7 +18,7 @@
'use strict';
angular.module('ambariAdminConsole')
-.controller('GroupsListCtrl',['$scope', 'Group', '$modal', 'ConfirmationModal', function($scope, Group, $modal, ConfirmationModal) {
+.controller('GroupsListCtrl',['$scope', 'Group', '$modal', 'ConfirmationModal', '$rootScope', function($scope, Group, $modal, ConfirmationModal, $rootScope) {
$scope.groups = [];
$scope.groupsPerPage = 10;
@@ -62,4 +62,14 @@ angular.module('ambariAdminConsole')
$scope.currentTypeFilter = $scope.typeFilterOptions[0];
loadGroups();
+
+ $rootScope.$watch(function(scope) {
+ return scope.LDAPSynced;
+ }, function(LDAPSynced) {
+ if(LDAPSynced === true){
+ $rootScope.LDAPSynced = false;
+ loadGroups();
+ }
+ });
+
}]);
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersListCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersListCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersListCtrl.js
index 4fee376..2ecf8e7 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersListCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/users/UsersListCtrl.js
@@ -18,7 +18,7 @@
'use strict';
angular.module('ambariAdminConsole')
-.controller('UsersListCtrl',['$scope', 'User', '$modal', function($scope, User, $modal) {
+.controller('UsersListCtrl',['$scope', 'User', '$modal', '$rootScope', function($scope, User, $modal, $rootScope) {
$scope.users = [];
$scope.usersPerPage = 10;
$scope.currentPage = 1;
@@ -67,4 +67,13 @@ angular.module('ambariAdminConsole')
$scope.currentTypeFilter = $scope.typeFilterOptions[0];
$scope.loadUsers();
+
+ $rootScope.$watch(function(scope) {
+ return scope.LDAPSynced;
+ }, function(LDAPSynced) {
+ if(LDAPSynced === true){
+ $rootScope.LDAPSynced = false;
+ $scope.loadUsers();
+ }
+ });
}]);
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
index 9b1b31d..f6db020 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/styles/main.css
@@ -98,6 +98,10 @@
border-radius: 0;
}
+.views-list-table .panel-group .panel:nth-child(even) .panel-heading{
+ background: #f9f9f9;
+}
+
.users-pane table .glyphicon{
width: 14px;
}
@@ -238,7 +242,12 @@
top: 5px;
z-index: 100;
}
-
+.groups-pane table thead th{
+ border-top: 0;
+}
+.groups-pane table thead tr:first-child th{
+ border: 0;
+}
.container{
padding-left: 0;
@@ -679,3 +688,19 @@ input[type="submit"].btn.btn-mini {
*padding-top: 1px;
*padding-bottom: 1px;
}
+
+.alert-info {
+ background-color: #E6F1F6;
+ border-color: #D2D9DD;
+ color: #4E575B;
+ text-shadow: none;
+}
+.alert-info .link {
+ padding: 0 15px;
+}
+.alert-info .link-left-pad {
+ padding-left: 15px;
+}
+.breadcrumb > .active {
+ color: #666;
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/create.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/create.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/create.html
index 6c5ded8..b7e3606 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/create.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/create.html
@@ -22,9 +22,17 @@
<hr>
<form class="form-horizontal create-view-form" role="form" name="form.isntanceCreateForm" novalidate>
<div class="view-header">
- <div class="description">
- <h4>View: <span>{{view.ViewVersionInfo.view_name}}</span></h4>
- <h4>Version: <select ng-model="version" class="instanceversion-input" ng-change="versionChanged()" ng-options="o as o for o in versions"></select></h4>
+ <div class="form-group">
+ <div class="col-sm-2">
+ <label for="" class="control-label">View</label>
+ </div>
+ <div class="col-sm-10"><label for="" class="control-label">{{view.ViewVersionInfo.view_name}}</label></div>
+ </div>
+ <div class="form-group">
+ <div class="col-sm-2"><label for="" class="control-label">Version</label></div>
+ <div class="col-sm-2">
+ <select ng-model="version" class="instanceversion-input form-control" ng-change="versionChanged()" ng-options="o as o for o in versions"></select>
+ </div>
</div>
</div>
@@ -53,7 +61,7 @@
</div>
<div class="form-group"
ng-class="{'has-error' : ( (form.isntanceCreateForm.displayLabel.$error.required || form.isntanceCreateForm.displayLabel.$error.pattern) && form.isntanceCreateForm.submitted)}">
- <label for="" class="control-labe col-sm-2">Display Label</label>
+ <label for="" class="control-label col-sm-2">Display Label</label>
<div class="col-sm-10">
<input type="text" class="form-control instancelabel-input" name="displayLabel" ng-model="instance.label" required ng-pattern="nameValidationPattern" autocomplete="off">
@@ -83,11 +91,11 @@
<div class="panel-body">
<div class="form-group" ng-repeat="parameter in instance.properties"
ng-class="{'has-error' : (form.isntanceCreateForm[parameter.name].$error.required && form.isntanceCreateForm.submitted)}" >
- <label for="" class="col-sm-3 control-label" ng-class="{'not-required': !parameter.required}">{{parameter.description}}{{parameter.required ? '*' : ''}}</label>
+ <label for="" class="col-sm-3 control-label" ng-class="{'not-required': !parameter.required}" tooltip="{{parameter.description}}">{{parameter.name}}{{parameter.required ? '*' : ''}}</label>
<div class="col-sm-9">
- <input type="text" class="form-control viewproperty-input" name="{{parameter.name}}" ng-required="parameter.required" ng-model="parameter.value" autocomplete="off">
+ <input type="{{parameter.masked ? 'password' : 'text'}}" class="form-control viewproperty-input" name="{{parameter.name}}" ng-required="parameter.required" ng-model="parameter.value" autocomplete="off">
<div class="alert alert-danger no-margin-bottom top-margin" ng-show='form.isntanceCreateForm[parameter.name].$error.required && form.isntanceCreateForm.submitted'>
- Field requried!
+ This field is required.
</div>
</div>
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
index 92fbecb..7296824 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/ambariViews/edit.html
@@ -130,9 +130,9 @@
<div class="panel-body">
<form action="" class="form-horizontal" ng-hide="isConfigurationEmpty">
<fieldset ng-disabled="editConfigurationDisabled">
- <div class="form-group" ng-repeat="(propertyName, propertyValue) in configuration">
- <label for="" class="control-label col-sm-3">{{propertyName}}</label>
- <div class="col-sm-9"><input type="text" class="form-control propertie-input" ng-model="configuration[propertyName]"></div>
+ <div class="form-group" ng-repeat="(propertyName, propertyValue) in configurationMeta">
+ <label for="" class="control-label col-sm-3" ng-class="{'not-required': !propertyValue.required}" tooltip="{{propertyValue.description}}">{{propertyName}}{{propertyValue.required ? '*' : ''}}</label>
+ <div class="col-sm-9"><input type="{{propertyValue.masked ? 'password' : 'text'}}" class="form-control propertie-input" ng-model="configuration[propertyName]"></div>
</div>
<div class="form-group" ng-hide="editConfigurationDisabled">
<div class="col-sm-offset-2 col-sm-10">
@@ -143,7 +143,7 @@
</fieldset>
</form>
<div ng-show="isConfigurationEmpty">
- <div class="alert alert-info">There are no configuration defined for this view.</div>
+ <div class="alert alert-info">There are no properties defined for this view.</div>
</div>
</div>
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/create.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/create.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/create.html
index 2395da7..9e3bc47 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/create.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/create.html
@@ -20,13 +20,13 @@
<li class="active">Create Local Group</li>
</ol>
<hr>
-<form class="form-horizontal" role="form" novalidate name="form">
+<form class="form-horizontal" role="form" novalidate name="form" autocomplete="off">
<div class="form-group" ng-class="{'has-error' : (form.group_name.$error.required || form.group_name.$error.pattern) && form.submitted}">
<label for="groupname" class="col-sm-2 control-label">Group name</label>
<div class="col-sm-10">
<input type="text" id="groupname" class="form-control groupname-input" name="group_name" placeholder="Group name" ng-model="group.group_name" required ng-pattern="/^([a-zA-Z0-9._\s]+)$/" autocomplete="off">
<div class="alert alert-danger top-margin" ng-show="form.group_name.$error.required && form.submitted">
- Required
+ This field is required.
</div>
<div class="alert alert-danger top-margin" ng-show="form.group_name.$error.pattern && form.submitted">
Must contain only simple characters.
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
index 7e399c0..d82caf6 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/edit.html
@@ -22,7 +22,11 @@
<li class="active">{{group.group_name}}</li>
</ol>
<div class="pull-right top-margin-4">
- <button class="btn btn-danger deletegroup-btn" ng-click="deleteGroup(group)">Delete Group</button>
+ <div ng-switch="group.ldap_group">
+ <button ng-switch-when="true" class="btn disabled deletegroup-btn deleteuser-btn" tooltip="Cannot Delete Group">Delete Group</button>
+ <button ng-switch-when="false" class="btn btn-danger deletegroup-btn" ng-click="deleteGroup(group)">Delete Group</button>
+ </div>
+
</div>
</div>
<hr>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/list.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/list.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/list.html
index bfb2b87..f5fd1dd 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/list.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/groups/list.html
@@ -29,7 +29,7 @@
<thead>
<tr>
<th>
- <label for="">Group name</label>
+ <label for="">Group Name</label>
</th>
<th>
<label for="">Type</label>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
index fb379ce..912a26d 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/create.html
@@ -20,13 +20,13 @@
<li class="active">Create Local User</li>
</ol>
<hr>
-<form class="form-horizontal create-user-form" role="form" novalidate name="form">
+<form class="form-horizontal create-user-form" role="form" novalidate name="form" autocomplete="off">
<div class="form-group" ng-class="{'has-error' : form.user_name.$error.required && form.submitted}">
<label for="username" class="col-sm-2 control-label">Username</label>
<div class="col-sm-10">
<input type="text" id="username" class="form-control username-input" name="user_name" placeholder="User name" ng-model="user.user_name" required autocomplete="off">
<div class="alert alert-danger top-margin" ng-show="form.user_name.$error.required && form.submitted">
- Required
+ This field is required.
</div>
</div>
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/ab128d4a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
index cbd0092..5530031 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/users/show.html
@@ -23,7 +23,7 @@
<li class="active"><span class="glyphicon glyphicon-flash" ng-show="user.admin"></span>{{user.user_name}}</li>
</ol>
<div class="pull-right top-margin-4">
- <div ng-switch="isCurrentUser">
+ <div ng-switch="isCurrentUser || user.ldap_user">
<button class="btn deleteuser-btn disabled btn-default" ng-switch-when="true" tooltip="Cannot Delete User">Delete User</button>
<button class="btn deleteuser-btn btn-danger" ng-switch-when="false" ng-click="deleteUser()">Delete User</button>
</div>
@@ -52,7 +52,7 @@
<div class="form-group">
<label for="password" class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
- <a href ng-click="openChangePwdDialog()" class="btn btn-default changepassword">Change Password</a>
+ <a href ng-click="openChangePwdDialog()" ng-disabled="user.ldap_user" class="btn btn-default changepassword">Change Password</a>
</div>
</div>
<div class="form-group">
[03/50] [abbrv] git commit: AMBARI-6891 Ambari upgrade: Accomodate
renaming pig-content to content in 1.7.0 ugradeCatalog (dsen)
Posted by jo...@apache.org.
AMBARI-6891 Ambari upgrade: Accomodate renaming pig-content to content in 1.7.0 ugradeCatalog (dsen)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f0be435e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f0be435e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f0be435e
Branch: refs/heads/branch-alerts-dev
Commit: f0be435e07349e53fb2b9ecf33952efd25d0c90d
Parents: 1730dc5
Author: Dmytro Sen <ds...@hortonworks.com>
Authored: Mon Aug 18 19:08:04 2014 +0300
Committer: Dmytro Sen <ds...@hortonworks.com>
Committed: Mon Aug 18 19:08:04 2014 +0300
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog170.java | 33 ++++++++++++++++++++
.../server/upgrade/UpgradeCatalog170Test.java | 9 +++++-
2 files changed, 41 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f0be435e/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index 9888177..33a87a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -83,8 +83,11 @@ import com.google.inject.Injector;
*/
public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
private static final String CONTENT_FIELD_NAME = "content";
+ private static final String PIG_CONTENT_FIELD_NAME = "pig-content";
private static final String ENV_CONFIGS_POSTFIX = "-env";
+ private static final String PIG_PROPERTIES_CONFIG_TYPE = "pig-properties";
+
private static final String ALERT_TABLE_DEFINITION = "alert_definition";
private static final String ALERT_TABLE_HISTORY = "alert_history";
private static final String ALERT_TABLE_CURRENT = "alert_current";
@@ -560,6 +563,7 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
moveGlobalsToEnv();
addEnvContentFields();
addMissingConfigs();
+ renamePigProperties();
upgradePermissionModel();
}
@@ -744,6 +748,35 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
false);
}
+ /**
+ * Rename pig-content to content in pig-properties config
+ * @throws AmbariException
+ */
+ protected void renamePigProperties() throws AmbariException {
+ ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+ AmbariManagementController ambariManagementController = injector.getInstance(
+ AmbariManagementController.class);
+
+ Clusters clusters = ambariManagementController.getClusters();
+ if (clusters == null) {
+ return;
+ }
+
+ Map<String, Cluster> clusterMap = clusters.getClusters();
+
+ if (clusterMap != null && !clusterMap.isEmpty()) {
+ for (final Cluster cluster : clusterMap.values()) {
+ Config oldConfig = cluster.getDesiredConfigByType(PIG_PROPERTIES_CONFIG_TYPE);
+ if (oldConfig != null) {
+ Map<String, String> properties = oldConfig.getProperties();
+ String value = properties.remove(PIG_CONTENT_FIELD_NAME);
+ properties.put(CONTENT_FIELD_NAME, value);
+ configHelper.createConfigType(cluster, ambariManagementController, PIG_PROPERTIES_CONFIG_TYPE, properties, "ambari-upgrade");
+ }
+ }
+ }
+ }
+
protected void addEnvContentFields() throws AmbariException {
ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
AmbariManagementController ambariManagementController = injector.getInstance(
http://git-wip-us.apache.org/repos/asf/ambari/blob/f0be435e/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index 9325e9f..4033f03 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -219,6 +219,7 @@ public class UpgradeCatalog170Test {
Cluster cluster = createStrictMock(Cluster.class);
Clusters clusters = createStrictMock(Clusters.class);
Config config = createStrictMock(Config.class);
+ Config pigConfig = createStrictMock(Config.class);
UserDAO userDAO = createNiceMock(UserDAO.class);
PrincipalDAO principalDAO = createNiceMock(PrincipalDAO.class);
@@ -256,6 +257,9 @@ public class UpgradeCatalog170Test {
globalConfigs.put("prop1", "val1");
globalConfigs.put("smokeuser_keytab", "val2");
+ Map<String, String> pigSettings = new HashMap<String, String>();
+ pigSettings.put("pig-content", "foo");
+
Set<String> envDicts = new HashSet<String>();
envDicts.add("hadoop-env");
envDicts.add("global");
@@ -331,8 +335,11 @@ public class UpgradeCatalog170Test {
expect(permissionDAO.findClusterOperatePermission()).andReturn(null);
expect(permissionDAO.findClusterReadPermission()).andReturn(null);
+ expect(cluster.getDesiredConfigByType("pig-properties")).andReturn(pigConfig).anyTimes();
+ expect(pigConfig.getProperties()).andReturn(pigSettings).anyTimes();
+
replay(entityManager, trans, upgradeCatalog, cb, cq, hrc, q);
- replay(dbAccessor, configuration, injector, cluster, clusters, amc, config, configHelper);
+ replay(dbAccessor, configuration, injector, cluster, clusters, amc, config, configHelper, pigConfig);
replay(userDAO, clusterDAO, viewDAO, viewInstanceDAO, permissionDAO);
Class<?> c = AbstractUpgradeCatalog.class;
[46/50] [abbrv] AMBARI-6887. Alerts: groundwork for alert collection
(ncole)
Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py
new file mode 100644
index 0000000..ea61b3f
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/triggers/simple.py
@@ -0,0 +1,17 @@
+from apscheduler.util import convert_to_datetime
+
+
+class SimpleTrigger(object):
+ def __init__(self, run_date):
+ self.run_date = convert_to_datetime(run_date)
+
+ def get_next_fire_time(self, start_date):
+ if self.run_date >= start_date:
+ return self.run_date
+
+ def __str__(self):
+ return 'date[%s]' % str(self.run_date)
+
+ def __repr__(self):
+ return '<%s (run_date=%s)>' % (
+ self.__class__.__name__, repr(self.run_date))
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py b/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py
new file mode 100644
index 0000000..dcede4c
--- /dev/null
+++ b/ambari-agent/src/main/python/ambari_agent/apscheduler/util.py
@@ -0,0 +1,230 @@
+"""
+This module contains several handy functions primarily meant for internal use.
+"""
+
+from datetime import date, datetime, timedelta
+from time import mktime
+import re
+import sys
+
+__all__ = ('asint', 'asbool', 'convert_to_datetime', 'timedelta_seconds',
+ 'time_difference', 'datetime_ceil', 'combine_opts',
+ 'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref',
+ 'to_unicode', 'iteritems', 'itervalues', 'xrange')
+
+
+def asint(text):
+ """
+ Safely converts a string to an integer, returning None if the string
+ is None.
+
+ :type text: str
+ :rtype: int
+ """
+ if text is not None:
+ return int(text)
+
+
+def asbool(obj):
+ """
+ Interprets an object as a boolean value.
+
+ :rtype: bool
+ """
+ if isinstance(obj, str):
+ obj = obj.strip().lower()
+ if obj in ('true', 'yes', 'on', 'y', 't', '1'):
+ return True
+ if obj in ('false', 'no', 'off', 'n', 'f', '0'):
+ return False
+ raise ValueError('Unable to interpret value "%s" as boolean' % obj)
+ return bool(obj)
+
+
+_DATE_REGEX = re.compile(
+ r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
+ r'(?: (?P<hour>\d{1,2}):(?P<minute>\d{1,2}):(?P<second>\d{1,2})'
+ r'(?:\.(?P<microsecond>\d{1,6}))?)?')
+
+
+def convert_to_datetime(input):
+ """
+ Converts the given object to a datetime object, if possible.
+ If an actual datetime object is passed, it is returned unmodified.
+ If the input is a string, it is parsed as a datetime.
+
+ Date strings are accepted in three different forms: date only (Y-m-d),
+ date with time (Y-m-d H:M:S) or with date+time with microseconds
+ (Y-m-d H:M:S.micro).
+
+ :rtype: datetime
+ """
+ if isinstance(input, datetime):
+ return input
+ elif isinstance(input, date):
+ return datetime.fromordinal(input.toordinal())
+ elif isinstance(input, basestring):
+ m = _DATE_REGEX.match(input)
+ if not m:
+ raise ValueError('Invalid date string')
+ values = [(k, int(v or 0)) for k, v in m.groupdict().items()]
+ values = dict(values)
+ return datetime(**values)
+ raise TypeError('Unsupported input type: %s' % type(input))
+
+
+def timedelta_seconds(delta):
+ """
+ Converts the given timedelta to seconds.
+
+ :type delta: timedelta
+ :rtype: float
+ """
+ return delta.days * 24 * 60 * 60 + delta.seconds + \
+ delta.microseconds / 1000000.0
+
+
+def time_difference(date1, date2):
+ """
+ Returns the time difference in seconds between the given two
+ datetime objects. The difference is calculated as: date1 - date2.
+
+ :param date1: the later datetime
+ :type date1: datetime
+ :param date2: the earlier datetime
+ :type date2: datetime
+ :rtype: float
+ """
+ later = mktime(date1.timetuple()) + date1.microsecond / 1000000.0
+ earlier = mktime(date2.timetuple()) + date2.microsecond / 1000000.0
+ return later - earlier
+
+
+def datetime_ceil(dateval):
+ """
+ Rounds the given datetime object upwards.
+
+ :type dateval: datetime
+ """
+ if dateval.microsecond > 0:
+ return dateval + timedelta(seconds=1,
+ microseconds=-dateval.microsecond)
+ return dateval
+
+
+def combine_opts(global_config, prefix, local_config={}):
+ """
+ Returns a subdictionary from keys and values of ``global_config`` where
+ the key starts with the given prefix, combined with options from
+ local_config. The keys in the subdictionary have the prefix removed.
+
+ :type global_config: dict
+ :type prefix: str
+ :type local_config: dict
+ :rtype: dict
+ """
+ prefixlen = len(prefix)
+ subconf = {}
+ for key, value in global_config.items():
+ if key.startswith(prefix):
+ key = key[prefixlen:]
+ subconf[key] = value
+ subconf.update(local_config)
+ return subconf
+
+
+def get_callable_name(func):
+ """
+ Returns the best available display name for the given function/callable.
+ """
+ f_self = getattr(func, '__self__', None) or getattr(func, 'im_self', None)
+
+ if f_self and hasattr(func, '__name__'):
+ if isinstance(f_self, type):
+ # class method
+ clsname = getattr(f_self, '__qualname__', None) or f_self.__name__
+ return '%s.%s' % (clsname, func.__name__)
+ # bound method
+ return '%s.%s' % (f_self.__class__.__name__, func.__name__)
+
+ if hasattr(func, '__call__'):
+ if hasattr(func, '__name__'):
+ # function, unbound method or a class with a __call__ method
+ return func.__name__
+ # instance of a class with a __call__ method
+ return func.__class__.__name__
+
+ raise TypeError('Unable to determine a name for %s -- '
+ 'maybe it is not a callable?' % repr(func))
+
+
+def obj_to_ref(obj):
+ """
+ Returns the path to the given object.
+ """
+ ref = '%s:%s' % (obj.__module__, get_callable_name(obj))
+ try:
+ obj2 = ref_to_obj(ref)
+ if obj != obj2:
+ raise ValueError
+ except Exception:
+ raise ValueError('Cannot determine the reference to %s' % repr(obj))
+
+ return ref
+
+
+def ref_to_obj(ref):
+ """
+ Returns the object pointed to by ``ref``.
+ """
+ if not isinstance(ref, basestring):
+ raise TypeError('References must be strings')
+ if not ':' in ref:
+ raise ValueError('Invalid reference')
+
+ modulename, rest = ref.split(':', 1)
+ try:
+ obj = __import__(modulename)
+ except ImportError:
+ raise LookupError('Error resolving reference %s: '
+ 'could not import module' % ref)
+
+ try:
+ for name in modulename.split('.')[1:] + rest.split('.'):
+ obj = getattr(obj, name)
+ return obj
+ except Exception:
+ raise LookupError('Error resolving reference %s: '
+ 'error looking up object' % ref)
+
+
+def maybe_ref(ref):
+ """
+ Returns the object that the given reference points to, if it is indeed
+ a reference. If it is not a reference, the object is returned as-is.
+ """
+ if not isinstance(ref, str):
+ return ref
+ return ref_to_obj(ref)
+
+
+def to_unicode(string, encoding='ascii'):
+ """
+ Safely converts a string to a unicode representation on any
+ Python version.
+ """
+ if hasattr(string, 'decode'):
+ return string.decode(encoding, 'ignore')
+ return string # pragma: nocover
+
+
+if sys.version_info < (3, 0): # pragma: nocover
+ iteritems = lambda d: d.iteritems()
+ itervalues = lambda d: d.itervalues()
+ xrange = xrange
+ basestring = basestring
+else: # pragma: nocover
+ iteritems = lambda d: d.items()
+ itervalues = lambda d: d.values()
+ xrange = range
+ basestring = str
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestAlerts.py b/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
new file mode 100644
index 0000000..51c3af9
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/TestAlerts.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import os
+import sys
+from ambari_agent.AlertSchedulerHandler import AlertSchedulerHandler
+from ambari_agent.apscheduler.scheduler import Scheduler
+from ambari_agent.alerts.port_alert import PortAlert
+from mock.mock import patch
+from unittest import TestCase
+
+class TestAlerts(TestCase):
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ sys.stdout == sys.__stdout__
+
+ @patch.object(Scheduler, "add_interval_job")
+ def test_build(self, aps_add_interval_job_mock):
+ test_file_path = os.path.join('ambari_agent', 'dummy_files', 'alert_definitions.json')
+
+ ash = AlertSchedulerHandler(test_file_path)
+
+ self.assertTrue(aps_add_interval_job_mock.called)
+
+ def test_port_alert(self):
+ json = { "name": "namenode_process",
+ "service": "HDFS",
+ "component": "NAMENODE",
+ "label": "NameNode process",
+ "interval": 6,
+ "scope": "host",
+ "source": {
+ "type": "PORT",
+ "uri": "http://c6401.ambari.apache.org:50070",
+ "default_port": 50070,
+ "reporting": {
+ "ok": {
+ "text": "TCP OK - {0:.4f} response time on port {1}"
+ },
+ "critical": {
+ "text": "Could not load process info: {0}"
+ }
+ }
+ }
+ }
+
+ pa = PortAlert(json, json['source'])
+ self.assertEquals(6, pa.interval())
+
+ res = pa.collect()
+
+ pass
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/14e79ed1/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json b/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json
new file mode 100644
index 0000000..6c55966
--- /dev/null
+++ b/ambari-agent/src/test/python/ambari_agent/dummy_files/alert_definitions.json
@@ -0,0 +1,46 @@
+{
+ "c1": [
+ {
+ "name": "namenode_cpu",
+ "label": "NameNode host CPU Utilization",
+ "scope": "host",
+ "source": {
+ "type": "METRIC",
+ "jmx": "java.lang:type=OperatingSystem/SystemCpuLoad",
+ "host": "{{hdfs-site/dfs.namenode.secondary.http-address}}"
+ }
+ },
+ {
+ "name": "namenode_process",
+ "service": "HDFS",
+ "component": "NAMENODE",
+ "label": "NameNode process",
+ "interval": 6,
+ "scope": "host",
+ "source": {
+ "type": "PORT",
+ "uri": "http://c6401.ambari.apache.org:50070",
+ "default_port": 50070,
+ "reporting": {
+ "ok": {
+ "text": "TCP OK - {0:.4f} response time on port {1}"
+ },
+ "critical": {
+ "text": "Could not load process info: {0}"
+ }
+ }
+ }
+ },
+ {
+ "name": "hdfs_last_checkpoint",
+ "label": "Last Checkpoint Time",
+ "interval": 1,
+ "scope": "service",
+ "enabled": false,
+ "source": {
+ "type": "SCRIPT",
+ "path": "scripts/alerts/last_checkpoint.py"
+ }
+ }
+ ]
+}
[22/50] [abbrv] git commit: AMBARI-6879. Hadoop env generated via
tarball on client config download is incorrect and missing java properties.
(mpapirkovskyy)
Posted by jo...@apache.org.
AMBARI-6879. Hadoop env generated via tarball on client config download is incorrect and missing java properties. (mpapirkovskyy)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c9fbc849
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c9fbc849
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c9fbc849
Branch: refs/heads/branch-alerts-dev
Commit: c9fbc849f8a617a834d979c66c185cf0c364c694
Parents: 2745675
Author: Myroslav Papirkovskyy <mp...@hortonworks.com>
Authored: Tue Aug 19 20:16:52 2014 +0300
Committer: Myroslav Papirkovskyy <mp...@hortonworks.com>
Committed: Tue Aug 19 20:18:13 2014 +0300
----------------------------------------------------------------------
.../services/HDFS/package/scripts/params.py | 27 ++++++++++++++++++
.../services/HDFS/package/scripts/params.py | 29 ++++++++++++++++++++
2 files changed, 56 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c9fbc849/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
index 82b9cc4..c68c982 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
@@ -150,3 +150,30 @@ if not "com.hadoop.compression.lzo" in io_compression_codecs:
exclude_packages = ["lzo", "hadoop-lzo", "hadoop-lzo-native"]
else:
exclude_packages = []
+
+
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#hadoop-env.sh
+if System.get_instance().os_family == "suse":
+ jsvc_path = "/usr/lib/bigtop-utils"
+else:
+ jsvc_path = "/usr/libexec/bigtop-utils"
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = default("/configurations/mapred-env/jtnode_opt_newsize","200m")
+jtnode_opt_maxnewsize = default("/configurations/mapred-env/jtnode_opt_maxnewsize","200m")
+jtnode_heapsize = default("/configurations/mapred-env/jtnode_heapsize","1024m")
+ttnode_heapsize = default("/configurations/mapred-env/ttnode_heapsize","1024m")
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+
+mapred_pid_dir_prefix = default("/configurations/hadoop-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/c9fbc849/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 9a5e393..6ad04b3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -185,3 +185,32 @@ if not "com.hadoop.compression.lzo" in io_compression_codecs:
else:
exclude_packages = []
name_node_params = default("/commandParams/namenode", None)
+
+#hadoop params
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#hadoop-env.sh
+java_home = config['hostLevelParams']['java_home']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+ # deprecated rhel jsvc_path
+ jsvc_path = "/usr/libexec/bigtop-utils"
+else:
+ jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize = "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
[25/50] [abbrv] git commit: AMBARI-6808 - Ambari Agent DataCleaner
should delete log files when a max size in MB is reached (Alejandro Fernandez
via jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6808 - Ambari Agent DataCleaner should delete log files when a max size in MB is reached (Alejandro Fernandez via jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/30f8a87a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/30f8a87a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/30f8a87a
Branch: refs/heads/branch-alerts-dev
Commit: 30f8a87a657993f2b0632289142fd513e7948e75
Parents: 1c5ceb2
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Aug 19 14:21:16 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Aug 19 14:21:16 2014 -0400
----------------------------------------------------------------------
ambari-agent/conf/unix/ambari-agent.ini | 1 +
.../main/python/ambari_agent/AmbariConfig.py | 1 +
.../src/main/python/ambari_agent/DataCleaner.py | 60 +++++++++++++++++---
.../test/python/ambari_agent/TestDataCleaner.py | 39 ++++++++-----
4 files changed, 79 insertions(+), 22 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/30f8a87a/ambari-agent/conf/unix/ambari-agent.ini
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent.ini b/ambari-agent/conf/unix/ambari-agent.ini
index 9bbef26..162041a 100644
--- a/ambari-agent/conf/unix/ambari-agent.ini
+++ b/ambari-agent/conf/unix/ambari-agent.ini
@@ -24,6 +24,7 @@ tmp_dir=/var/lib/ambari-agent/data/tmp
loglevel=INFO
data_cleanup_interval=86400
data_cleanup_max_age=2592000
+data_cleanup_max_size_MB = 100
ping_port=8670
cache_dir=/var/lib/ambari-agent/cache
tolerate_download_failures=true
http://git-wip-us.apache.org/repos/asf/ambari/blob/30f8a87a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
index 4330eb3..4453161 100644
--- a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
+++ b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
@@ -35,6 +35,7 @@ prefix=/tmp/ambari-agent
tmp_dir=/tmp/ambari-agent/tmp
data_cleanup_interval=86400
data_cleanup_max_age=2592000
+data_cleanup_max_size_MB = 100
ping_port=8670
cache_dir=/var/lib/ambari-agent/cache
http://git-wip-us.apache.org/repos/asf/ambari/blob/30f8a87a/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/DataCleaner.py b/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
index e42caac..0102eef 100644
--- a/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
+++ b/ambari-agent/src/main/python/ambari_agent/DataCleaner.py
@@ -28,23 +28,34 @@ import logging
logger = logging.getLogger()
class DataCleaner(threading.Thread):
- FILE_NAME_PATTERN = 'errors-\d+.txt|output-\d+.txt|site-\d+.pp'
+ FILE_NAME_PATTERN = 'errors-\d+.txt|output-\d+.txt|site-\d+.pp|structured-out-\d+.json|command-\d+.json'
def __init__(self, config):
threading.Thread.__init__(self)
self.daemon = True
logger.info('Data cleanup thread started')
self.config = config
- self.file_max_age = int(config.get('agent','data_cleanup_max_age'))
- if self.file_max_age < 86400:
+
+ self.file_max_age = config.get('agent','data_cleanup_max_age')
+ self.file_max_age = int(self.file_max_age) if self.file_max_age else None
+ if self.file_max_age is None or self.file_max_age < 86400: # keep for at least 24h
logger.warn('The minimum value allowed for data_cleanup_max_age is 1 '
'day. Setting data_cleanup_max_age to 86400.')
self.file_max_age = 86400
- self.cleanup_interval = int(config.get('agent','data_cleanup_interval'))
- if self.cleanup_interval < 3600:
+
+ self.cleanup_interval = config.get('agent','data_cleanup_interval')
+ self.cleanup_interval = int(self.cleanup_interval) if self.cleanup_interval else None
+ if self.cleanup_interval is None or self.cleanup_interval < 3600: # wait at least 1 hour between runs
logger.warn('The minimum value allowed for data_cleanup_interval is 1 '
'hour. Setting data_cleanup_interval to 3600.')
- self.file_max_age = 3600
+ self.cleanup_interval = 3600
+
+ self.cleanup_max_size_MB = config.get('agent', 'data_cleanup_max_size_MB')
+ self.cleanup_max_size_MB = int(self.cleanup_max_size_MB) if self.cleanup_max_size_MB else None
+ if self.cleanup_max_size_MB is None or self.cleanup_max_size_MB > 10000: # no more than 10 GBs
+ logger.warn('The maximum value allowed for cleanup_max_size_MB is 10000 MB (10 GB). '
+ 'Setting cleanup_max_size_MB to 10000.')
+ self.cleanup_max_size_MB = 10000
self.data_dir = config.get('agent','prefix')
self.compiled_pattern = re.compile(self.FILE_NAME_PATTERN)
@@ -54,17 +65,52 @@ class DataCleaner(threading.Thread):
logger.info('Data cleanup thread killed.')
def cleanup(self):
+ logger.debug("Cleaning up inside directory " + self.data_dir)
+ now = time.time()
+ total_size_bytes = 0
+ file_path_to_timestamp = {}
+ file_path_to_size = {}
+
for root, dirs, files in os.walk(self.data_dir):
for f in files:
file_path = os.path.join(root, f)
if self.compiled_pattern.match(f):
try:
- if time.time() - os.path.getmtime(file_path) > self.file_max_age:
+ file_age = now - os.path.getmtime(file_path)
+ if file_age > self.file_max_age:
os.remove(os.path.join(file_path))
logger.debug('Removed file: ' + file_path)
+ else:
+ # Since file wasn't deleted in first pass, consider it for the second one with oldest files first
+ file_size = os.path.getsize(file_path)
+ total_size_bytes += file_size
+ file_path_to_timestamp[file_path] = file_age
+ file_path_to_size[file_path] = file_size
except Exception:
logger.error('Error when removing file: ' + file_path)
+ target_size_bytes = self.cleanup_max_size_MB * 1000000
+ if len(file_path_to_timestamp) and total_size_bytes > target_size_bytes:
+ logger.info("DataCleaner values need to be more aggressive. Current size in bytes for all log files is %d, "
+ "and will try to clean to reach %d bytes." % (total_size_bytes, target_size_bytes))
+ # Prune oldest files first
+ count = 0
+ file_path_oldest_first_list = sorted(file_path_to_timestamp, key=file_path_to_timestamp.get, reverse=True)
+ for file_path in file_path_oldest_first_list:
+ try:
+ os.remove(os.path.join(file_path))
+ total_size_bytes -= file_path_to_size[file_path]
+ count += 1
+ if total_size_bytes <= target_size_bytes:
+ # Finally reached below the cap
+ break
+ except Exception:
+ pass
+ else:
+ # Did not reach below cap.
+ logger.warn("DataCleaner deleted an additional %d files, currently log files occupy %d bytes." %
+ (count, total_size_bytes))
+ pass
def run(self):
while not self.stopped:
http://git-wip-us.apache.org/repos/asf/ambari/blob/30f8a87a/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py b/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py
index d385697..b64dc44 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestDataCleaner.py
@@ -28,14 +28,15 @@ class TestDataCleaner(unittest.TestCase):
def setUp(self):
self.test_dir = [('/test_path', [],
- ['errors-12.txt','output-12.txt','site-12.pp','site-13.pp','site-15.pp','version'])]
+ ['errors-12.txt', 'output-12.txt', 'site-12.pp', 'site-13.pp', 'site-15.pp',
+ 'structured-out-13.json', 'command-13.json', 'version'])]
self.config = MagicMock()
- self.config.get.side_effect = [2592000,3600 + 1,"/test_path"]
+ self.config.get.side_effect = [2592000, (3600 + 1), 10000, "/test_path"]
DataCleaner.logger = MagicMock()
def test_init_success(self):
config = MagicMock()
- config.get.return_value = 2592000
+ config.get.side_effect = [2592000, (3600 + 1), 10000, "/test_path"]
DataCleaner.logger.reset_mock()
cleaner = DataCleaner.DataCleaner(config)
self.assertFalse(DataCleaner.logger.warn.called)
@@ -43,45 +44,53 @@ class TestDataCleaner(unittest.TestCase):
def test_init_warn(self):
config = MagicMock()
- config.get.return_value = 10
+ config.get.side_effect = [1, (3600 - 1), (10000 + 1), "/test_path"]
DataCleaner.logger.reset_mock()
cleaner = DataCleaner.DataCleaner(config)
self.assertTrue(DataCleaner.logger.warn.called)
- self.assertTrue(cleaner.file_max_age == 3600)
+ self.assertTrue(cleaner.file_max_age == 86400)
+ self.assertTrue(cleaner.cleanup_interval == 3600)
+ self.assertTrue(cleaner.cleanup_max_size_MB == 10000)
@patch('os.walk')
@patch('time.time')
@patch('os.path.getmtime')
@patch('os.remove')
- def test_cleanup_success(self,remMock,mtimeMock,timeMock,walkMock):
+ @patch('os.path.getsize')
+ def test_cleanup_success(self, sizeMock, remMock, mtimeMock, timeMock, walkMock):
self.config.reset_mock()
DataCleaner.logger.reset_mock()
walkMock.return_value = iter(self.test_dir)
timeMock.return_value = 2592000 + 2
- mtimeMock.side_effect = [1,1,1,2,1,1]
+ mtimeMock.side_effect = [1, 1, 1, 2, 1, 1, 1, 1]
+ sizeMock.return_value = 100
cleaner = DataCleaner.DataCleaner(self.config)
cleaner.cleanup()
- self.assertTrue(len(remMock.call_args_list) == 4)
- remMock.assert_any_call('/test_path/errors-12.txt');
- remMock.assert_any_call('/test_path/output-12.txt');
- remMock.assert_any_call('/test_path/site-12.pp');
- remMock.assert_any_call('/test_path/site-15.pp');
+ self.assertTrue(len(remMock.call_args_list) == 6)
+ remMock.assert_any_call('/test_path/errors-12.txt')
+ remMock.assert_any_call('/test_path/output-12.txt')
+ remMock.assert_any_call('/test_path/site-12.pp')
+ remMock.assert_any_call('/test_path/site-15.pp')
+ remMock.assert_any_call('/test_path/structured-out-13.json')
+ remMock.assert_any_call('/test_path/command-13.json')
pass
@patch('os.walk')
@patch('time.time')
@patch('os.path.getmtime')
@patch('os.remove')
- def test_cleanup_remove_error(self,remMock,mtimeMock,timeMock,walkMock):
+ @patch('os.path.getsize')
+ def test_cleanup_remove_error(self, sizeMock, remMock, mtimeMock, timeMock, walkMock):
self.config.reset_mock()
DataCleaner.logger.reset_mock()
walkMock.return_value = iter(self.test_dir)
timeMock.return_value = 2592000 + 2
- mtimeMock.side_effect = [1,1,1,2,1,1]
+ mtimeMock.side_effect = [1, 1, 1, 2, 1, 1, 1, 1]
+ sizeMock.return_value = 100
def side_effect(arg):
if arg == '/test_path/site-15.pp':
@@ -92,7 +101,7 @@ class TestDataCleaner(unittest.TestCase):
cleaner = DataCleaner.DataCleaner(self.config)
cleaner.cleanup()
- self.assertTrue(len(remMock.call_args_list) == 4)
+ self.assertTrue(len(remMock.call_args_list) == 6)
self.assertTrue(DataCleaner.logger.error.call_count == 1)
pass
[02/50] [abbrv] git commit: AMBARI-6894. UpgradeTest broken on
trunk.(vbrodetskyi)
Posted by jo...@apache.org.
AMBARI-6894. UpgradeTest broken on trunk.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1730dc5b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1730dc5b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1730dc5b
Branch: refs/heads/branch-alerts-dev
Commit: 1730dc5b87bd3bf9f3ded68a47c7bf2d33725997
Parents: ec1707f
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Aug 18 18:17:49 2014 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Aug 18 18:17:49 2014 +0300
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog170.java | 101 ++++++++-----------
1 file changed, 44 insertions(+), 57 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1730dc5b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index fa47428..9888177 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -348,7 +348,50 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
}
dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('service_config_id_seq', 1)", false);
+ + valueColumnName + ") " + "VALUES('alert_definition_id_seq', 0)",
+ false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('alert_group_id_seq', 0)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('alert_target_id_seq', 0)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('alert_history_id_seq', 0)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('alert_notice_id_seq', 0)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('alert_current_id_seq', 0)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('group_id_seq', 1)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('member_id_seq', 1)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('resource_type_id_seq', 4)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('resource_id_seq', 2)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('principal_type_id_seq', 3)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('principal_id_seq', 2)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('permission_id_seq', 5)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('privilege_id_seq', 1)", false);
+
+ dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ + valueColumnName + ") " + "VALUES('service_config_id_seq', 1)", false);
dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
+ valueColumnName + ") " + "VALUES('service_config_application_id_seq', 1)", false);
@@ -457,62 +500,6 @@ public class UpgradeCatalog170 extends AbstractUpgradeCatalog {
protected void executeDMLUpdates() throws AmbariException, SQLException {
String dbType = getDbType();
- // add new sequences for view entity
- String valueColumnName = "\"value\"";
- if (Configuration.ORACLE_DB_NAME.equals(dbType)
- || Configuration.MYSQL_DB_NAME.equals(dbType)) {
- valueColumnName = "value";
- }
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_definition_id_seq', 0)",
- false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_group_id_seq', 0)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_target_id_seq', 0)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_history_id_seq', 0)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_notice_id_seq', 0)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('alert_current_id_seq', 0)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('group_id_seq', 1)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('member_id_seq', 1)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('resource_type_id_seq', 4)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('resource_id_seq', 2)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('principal_type_id_seq', 3)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('principal_id_seq', 2)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('permission_id_seq', 5)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('privilege_id_seq', 1)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('service_config_id_seq', 1)", false);
-
- dbAccessor.executeQuery("INSERT INTO ambari_sequences(sequence_name, "
- + valueColumnName + ") " + "VALUES('config_id_seq', 1)", false);
-
// Update historic records with the log paths, but only enough so as to not prolong the upgrade process
executeInTransaction(new Runnable() {
@Override
[30/50] [abbrv] git commit: AMBARI-6922 FE: Ambari installer and
service config page should validate configs by calling /validations.
(ababiichuk)
Posted by jo...@apache.org.
AMBARI-6922 FE: Ambari installer and service config page should validate configs by calling /validations. (ababiichuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/818dc161
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/818dc161
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/818dc161
Branch: refs/heads/branch-alerts-dev
Commit: 818dc161fe41b7f9b92e67042b3840e4a8158737
Parents: 4644a82
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Tue Aug 19 22:03:23 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Tue Aug 19 22:10:08 2014 +0300
----------------------------------------------------------------------
ambari-web/app/controllers/installer.js | 32 ++-
.../controllers/main/service/info/configs.js | 20 +-
.../app/controllers/wizard/step5_controller.js | 18 +-
.../app/controllers/wizard/step6_controller.js | 13 +-
.../app/controllers/wizard/step7_controller.js | 56 +----
ambari-web/app/messages.js | 4 +
ambari-web/app/mixins.js | 1 +
ambari-web/app/mixins/common/serverValidator.js | 239 +++++++++++++++++++
ambari-web/app/models/service_config.js | 15 +-
ambari-web/app/models/stack_service.js | 3 +
ambari-web/app/routes/installer.js | 11 +-
ambari-web/app/utils/blueprint.js | 81 +++++++
ambari-web/app/utils/config.js | 20 +-
ambari-web/test/utils/blueprint_test.js | 137 +++++++++++
14 files changed, 560 insertions(+), 90 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/controllers/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/installer.js b/ambari-web/app/controllers/installer.js
index 03c5afb..8b1fa1f 100644
--- a/ambari-web/app/controllers/installer.js
+++ b/ambari-web/app/controllers/installer.js
@@ -38,6 +38,15 @@ App.InstallerController = App.WizardController.extend({
slaveGroupProperties: null,
stacks: null,
clients:[],
+ /**
+ * recommendations for host groups loaded from server
+ */
+ recommendations: null,
+ /**
+ * recommendationsHostGroups - current component assignment after 5 and 6 steps
+ * (uses for host groups validation and to load recommended configs)
+ */
+ recommendationsHostGroups: null,
controllerName: 'installerController'
}),
@@ -64,7 +73,10 @@ App.InstallerController = App.WizardController.extend({
'stacksVersions',
'currentStep',
'serviceInfo',
- 'hostInfo'
+ 'hostInfo',
+ 'recommendations',
+ 'recommendationsHostGroups',
+ 'recommendationsConfigs'
],
init: function () {
@@ -467,6 +479,18 @@ App.InstallerController = App.WizardController.extend({
this.set("content.masterComponentHosts", masterComponentHosts);
},
+ loadRecommendations: function() {
+ this.set("content.recommendations", this.getDBProperty('recommendations'));
+ },
+
+ loadCurrentHostGroups: function() {
+ this.set("content.recommendationsHostGroups", this.getDBProperty('recommendationsHostGroups'));
+ },
+
+ loadRecommendationsConfigs: function() {
+ App.router.set("wizardStep7Controller.recommendationsConfigs", this.getDBProperty('recommendationsConfigs'));
+ },
+
/**
* Load master component hosts data for using in required step controllers
*/
@@ -685,6 +709,7 @@ App.InstallerController = App.WizardController.extend({
callback: function () {
this.loadMasterComponentHosts();
this.loadConfirmedHosts();
+ this.loadRecommendations();
}
}
],
@@ -694,6 +719,7 @@ App.InstallerController = App.WizardController.extend({
callback: function () {
this.loadSlaveComponentHosts();
this.loadClients();
+ this.loadRecommendations();
}
}
],
@@ -703,6 +729,8 @@ App.InstallerController = App.WizardController.extend({
callback: function () {
this.loadServiceConfigGroups();
this.loadServiceConfigProperties();
+ this.loadCurrentHostGroups();
+ this.loadRecommendationsConfigs();
}
}
]
@@ -739,7 +767,7 @@ App.InstallerController = App.WizardController.extend({
* Clear loaded recommendations
*/
clearRecommendations: function() {
- this.set('recommendations', undefined)
+ this.set('content.recommendations', undefined)
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index a856d1e..a1b3677 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -21,7 +21,7 @@ require('controllers/wizard/slave_component_groups_controller');
var batchUtils = require('utils/batch_scheduled_requests');
var lazyLoading = require('utils/lazy_loading');
-App.MainServiceInfoConfigsController = Em.Controller.extend({
+App.MainServiceInfoConfigsController = Em.Controller.extend(App.ServerValidatorMixin, {
name: 'mainServiceInfoConfigsController',
isHostsConfigsPage: false,
forceTransition: false,
@@ -1091,15 +1091,17 @@ App.MainServiceInfoConfigsController = Em.Controller.extend({
(serviceName !== 'HDFS' && this.get('content.isStopped') === true) ||
((serviceName === 'HDFS') && this.get('content.isStopped') === true && (!App.Service.find().someProperty('id', 'MAPREDUCE') || App.Service.find('MAPREDUCE').get('isStopped')))) {
- if (this.isDirChanged()) {
- App.showConfirmationPopup(function () {
+ this.serverSideValidation().done(function() {
+ if (self.isDirChanged()) {
+ App.showConfirmationPopup(function () {
+ self.saveConfigs();
+ }, Em.I18n.t('services.service.config.confirmDirectoryChange').format(displayName), function () {
+ self.set('isApplyingChanges', false)
+ });
+ } else {
self.saveConfigs();
- }, Em.I18n.t('services.service.config.confirmDirectoryChange').format(displayName), function () {
- self.set('isApplyingChanges', false)
- });
- } else {
- this.saveConfigs();
- }
+ }
+ });
} else {
status = 'started';
if (this.get('content.serviceName') !== 'HDFS' || (this.get('content.serviceName') === 'HDFS' && !App.Service.find().someProperty('id', 'MAPREDUCE'))) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/controllers/wizard/step5_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step5_controller.js b/ambari-web/app/controllers/wizard/step5_controller.js
index b82fa1c..7df1b8b 100644
--- a/ambari-web/app/controllers/wizard/step5_controller.js
+++ b/ambari-web/app/controllers/wizard/step5_controller.js
@@ -212,11 +212,11 @@ App.WizardStep5Controller = Em.Controller.extend({
return false;
}
- if (App.supports.serverRecommendValidate) {
+ if (App.get('supports.serverRecommendValidate')) {
self.set('submitDisabled', true);
// reset previous recommendations
- App.router.set('installerController.recommendations', null);
+ this.set('content.recommendations', null);
if (self.get('servicesMasters').length === 0) {
return;
@@ -375,7 +375,7 @@ App.WizardStep5Controller = Em.Controller.extend({
console.log("WizardStep5Controller: Loading step5: Assign Masters");
this.clearStep();
this.renderHostInfo();
- if (App.supports.serverRecommendValidate ) {
+ if (App.get('supports.serverRecommendValidate')) {
this.loadComponentsRecommendationsFromServer(this.loadStepCallback);
} else {
this.loadComponentsRecommendationsLocally(this.loadStepCallback);
@@ -478,7 +478,7 @@ App.WizardStep5Controller = Em.Controller.extend({
loadComponentsRecommendationsFromServer: function(callback, includeMasters) {
var self = this;
- if (App.router.get('installerController.recommendations')) {
+ if (this.get('content.recommendations')) {
// Don't do AJAX call if recommendations has been already received
// But if user returns to previous step (selecting services), stored recommendations will be cleared in routers' next handler and AJAX call will be made again
callback(self.createComponentInstallationObjects(), self);
@@ -523,7 +523,7 @@ App.WizardStep5Controller = Em.Controller.extend({
/**
* Create components for displaying component-host comboboxes in UI assign dialog
- * expects installerController.recommendations will be filled with recommendations API call result
+ * expects content.recommendations will be filled with recommendations API call result
* @return {Object[]}
*/
createComponentInstallationObjects: function() {
@@ -538,7 +538,7 @@ App.WizardStep5Controller = Em.Controller.extend({
var masterHosts = self.get('content.masterComponentHosts'); //saved to local storage info
var selectedNotInstalledServices = self.get('content.services').filterProperty('isSelected').filterProperty('isInstalled', false).mapProperty('serviceName');
- var recommendations = App.router.get('installerController.recommendations');
+ var recommendations = this.get('content.recommendations');
var resultComponents = [];
var multipleComponentHasBeenAdded = {};
@@ -616,7 +616,7 @@ App.WizardStep5Controller = Em.Controller.extend({
* @method loadRecommendationsSuccessCallback
*/
loadRecommendationsSuccessCallback: function (data) {
- App.router.set('installerController.recommendations', data.resources[0].recommendations);
+ this.set('content.recommendations', data.resources[0].recommendations);
},
/**
@@ -1025,7 +1025,7 @@ App.WizardStep5Controller = Em.Controller.extend({
// load recommendations with partial request
self.loadComponentsRecommendationsFromServer(function() {
// For validation use latest received recommendations because ir contains current master layout and recommended slave/client layout
- self.validate(App.router.get('installerController.recommendations'), function() {
+ self.validate(self.get('content.recommendations'), function() {
if (callback) {
callback();
}
@@ -1047,7 +1047,7 @@ App.WizardStep5Controller = Em.Controller.extend({
}
};
- if (App.supports.serverRecommendValidate ) {
+ if (App.get('supports.serverRecommendValidate')) {
self.recommendAndValidate(function() {
goNextStepIfValid();
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/controllers/wizard/step6_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step6_controller.js b/ambari-web/app/controllers/wizard/step6_controller.js
index a74f020..78c1254 100644
--- a/ambari-web/app/controllers/wizard/step6_controller.js
+++ b/ambari-web/app/controllers/wizard/step6_controller.js
@@ -377,7 +377,7 @@ App.WizardStep6Controller = Em.Controller.extend({
var clientHeaders = headers.findProperty('name', 'CLIENT');
var slaveComponents = this.get('content.slaveComponentHosts');
if (!slaveComponents) { // we are at this page for the first time
- if (!App.supports.serverRecommendValidate) {
+ if (!App.get('supports.serverRecommendValidate')) {
hostsObj.forEach(function (host) {
var checkboxes = host.get('checkboxes');
checkboxes.setEach('checked', !host.hasMaster);
@@ -393,7 +393,7 @@ App.WizardStep6Controller = Em.Controller.extend({
lastHost.get('checkboxes').setEach('checked', true);
}
} else {
- var recommendations = App.router.get('installerController.recommendations');
+ var recommendations = this.get('content.recommendations');
// Get all host-component pairs from recommendations
var componentHostPairs = recommendations.blueprint.host_groups.map(function (group) {
return group.components.map(function (component) {
@@ -495,7 +495,7 @@ App.WizardStep6Controller = Em.Controller.extend({
callValidation: function (successCallback) {
var self = this;
- if (App.supports.serverRecommendValidate) {
+ if (App.get('supports.serverRecommendValidate')) {
self.callServerSideValidation(successCallback);
} else {
var res = self.callClientSideValidation();
@@ -540,7 +540,7 @@ App.WizardStep6Controller = Em.Controller.extend({
var invisibleComponents = invisibleMasters.concat(invisibleSlaves).concat(alreadyInstalledClients);
- var invisibleBlueprint = blueprintUtils.filterByComponents(App.router.get('installerController.recommendations'), invisibleComponents);
+ var invisibleBlueprint = blueprintUtils.filterByComponents(this.get('content.recommendations'), invisibleComponents);
masterBlueprint = blueprintUtils.mergeBlueprints(masterBlueprint, invisibleBlueprint);
} else if (this.get('isAddHostWizard')) {
masterBlueprint = self.getMasterSlaveBlueprintForAddHostWizard();
@@ -548,6 +548,9 @@ App.WizardStep6Controller = Em.Controller.extend({
slaveBlueprint = blueprintUtils.addComponentsToBlueprint(slaveBlueprint, invisibleSlaves);
}
+ var bluePrintsForValidation = blueprintUtils.mergeBlueprints(masterBlueprint, slaveBlueprint);
+ this.set('content.recommendationsHostGroups', bluePrintsForValidation);
+
App.ajax.send({
name: 'config.validations',
sender: self,
@@ -556,7 +559,7 @@ App.WizardStep6Controller = Em.Controller.extend({
hosts: hostNames,
services: services,
validate: 'host_groups',
- recommendations: blueprintUtils.mergeBlueprints(masterBlueprint, slaveBlueprint)
+ recommendations: bluePrintsForValidation
},
success: 'updateValidationsSuccessCallback'
}).
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index bb222b5..6bf145c 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -29,7 +29,7 @@ var stringUtils = require('utils/string_utils');
*
*/
-App.WizardStep7Controller = Em.Controller.extend({
+App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, {
name: 'wizardStep7Controller',
@@ -113,8 +113,6 @@ App.WizardStep7Controller = Em.Controller.extend({
serviceConfigsData: require('data/service_configs'),
- recommendedConfigs: null,
-
/**
* Are advanced configs loaded
* @type {bool}
@@ -600,7 +598,7 @@ App.WizardStep7Controller = Em.Controller.extend({
var s = App.StackService.find(component.get('serviceName')),
defaultGroupSelected = component.get('selectedConfigGroup.isDefault');
- if(!App.supports.serverRecommendValidate) {
+ if(!App.get('supports.serverRecommendValidate')) {
if (s && s.get('configsValidator')) {
var recommendedDefaults = this._getRecommendedDefaultsForComponent(component.get('serviceName'));
s.get('configsValidator').set('recommendedDefaults', recommendedDefaults);
@@ -723,8 +721,8 @@ App.WizardStep7Controller = Em.Controller.extend({
}
//STEP 6: Distribute configs by service and wrap each one in App.ServiceConfigProperty (configs -> serviceConfigs)
var self = this;
- if (App.supports.serverRecommendValidate) {
- this.loadDefaultConfigs(function() {
+ if (App.get('supports.serverRecommendValidate')) {
+ this.loadServerSideConfigsRecommendations().complete(function() {
self.setStepConfigs(configs, storedConfigs);
self.checkHostOverrideInstaller();
self.activateSpecialConfigs();
@@ -771,7 +769,7 @@ App.WizardStep7Controller = Em.Controller.extend({
masterComponentHosts: this.get('wizardController.content.masterComponentHosts'),
slaveComponentHosts: this.get('wizardController.content.slaveComponentHosts')
};
- var serviceConfigs = App.config.renderConfigs(configs, storedConfigs, this.get('allSelectedServiceNames'), this.get('installedServiceNames'), localDB, this.get('recommendedConfigs'));
+ var serviceConfigs = App.config.renderConfigs(configs, storedConfigs, this.get('allSelectedServiceNames'), this.get('installedServiceNames'), localDB, this.get('recommendationsConfigs'));
if (this.get('wizardController.name') === 'addServiceController') {
serviceConfigs.setEach('showConfig', true);
serviceConfigs.setEach('selected', false);
@@ -930,41 +928,6 @@ App.WizardStep7Controller = Em.Controller.extend({
}
},
- loadDefaultConfigs: function(callback) {
- var selectedServices = App.StackService.find().filterProperty('isSelected').mapProperty('serviceName');
- var installedServices = App.StackService.find().filterProperty('isInstalled').mapProperty('serviceName');
- var services = installedServices.concat(selectedServices).uniq();
- this.set('isDefaultsLoaded', false);
- var hostNames = Object.keys(this.get('content.hosts'));
- App.ajax.send({
- 'name': 'wizard.step7.loadrecommendations.configs',
- 'sender': this,
- 'data': {
- stackVersionUrl: App.get('stackVersionURL'),
- hosts: hostNames,
- services: services,
- recommendations: App.router.get('installerController.recommendations')
- },
- 'success': 'loadDefaultConfigsSuccess'
- })
- .retry({
- times: App.maxRetries,
- timeout: App.timeout
- })
- .then(function () {
- callback();
- }, function () {
- App.showReloadPopup();
- console.log('Load recommendations failed');
- });
- },
-
- loadDefaultConfigsSuccess: function(data) {
- if (!data) {
- console.warn('error while loading default config values');
- }
- this.set("recommendedConfigs", Em.get(data.resources[0] , "recommendations.blueprint.configurations"));
- },
/**
* Check if Oozie or Hive use existing database then need
* to restore missed properties
@@ -1360,12 +1323,15 @@ App.WizardStep7Controller = Em.Controller.extend({
* @method submit
*/
submit: function () {
+ if (this.get('isSubmitDisabled')) {
+ return;
+ }
var _this = this;
- if (!this.get('isSubmitDisabled')) {
- this.checkDatabaseConnectionTest().done(function () {
+ this.serverSideValidation().done(function () {
+ _this.checkDatabaseConnectionTest().done(function () {
_this.resolveHiveMysqlDatabase();
});
- }
+ });
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index cd25878..989a657 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -648,6 +648,10 @@ Em.I18n.translations = {
'installer.step7.popup.mySQLWarning.confirmation.body': 'You will be brought back to the \"Assign Masters\" step and will lose all your current customizations. Are you sure?',
'installer.step7.popup.database.connection.header': 'Database Connectivity Warning',
'installer.step7.popup.database.connection.body': 'You have not run or passed the database connection test for: {0}. It is highly recommended that you pass the connection test before proceeding to prevent failures during deployment.',
+ 'installer.step7.popup.validation.failed.header': 'Validation failed.',
+ 'installer.step7.popup.validation.failed.body': 'Some services are not properly configured. You have to change the highlighted configs according to the recommended values.',
+ 'installer.step7.popup.validation.request.failed.body': 'Config validaition failed.',
+ 'installer.step7.popup.validation.warning.body': 'Some services are not properly configured. Recommended to change the highlighted configs. Are you sure you want to proceed witout changing configs?',
'installer.step7.oozie.database.new': 'New Derby Database',
'installer.step7.hive.database.new': 'New MySQL Database',
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/mixins.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins.js b/ambari-web/app/mixins.js
index 3384391..f087569 100644
--- a/ambari-web/app/mixins.js
+++ b/ambari-web/app/mixins.js
@@ -21,6 +21,7 @@
require('mixins/common/localStorage');
require('mixins/common/userPref');
+require('mixins/common/serverValidator');
require('mixins/models/service_mixin');
require('mixins/common/tableServerProvider');
require('mixins/common/table_server_mixin');
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/mixins/common/serverValidator.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/serverValidator.js b/ambari-web/app/mixins/common/serverValidator.js
new file mode 100644
index 0000000..a4be042
--- /dev/null
+++ b/ambari-web/app/mixins/common/serverValidator.js
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+var blueprintUtils = require('utils/blueprint');
+
+App.ServerValidatorMixin = Em.Mixin.create({
+
+ /**
+ * @type {bool} set true if at leasst one config has error
+ */
+ configValidationError: false,
+
+ /**
+ * @type {bool} set true if at leasst one config has warning
+ */
+ configValidationWarning: false,
+
+ /**
+ * @type {bool} set true if at leasst one config has warning
+ */
+ configValidationFailed: false,
+
+ /**
+ * recommendation configs loaded from server
+ * (used only during install)
+ * @type {Object}
+ */
+ recommendationsConfigs: null,
+
+ /**
+ * by default loads data from model otherwise must be overridden as computed property
+ * refer to \assets\data\stacks\HDP-2.1\recommendations_configs.json to learn structure
+ * (shouldn't contain configurations filed)
+ * @type {Object}
+ */
+ hostNames: function() {
+ return this.get('content.hosts')
+ ? Object.keys(this.get('content.hosts'))
+ : App.HostComponent.find().mapProperty('hostName').uniq();
+ }.property('content.hosts'),
+
+
+ /**
+ * by default loads data from model otherwise must be overridden as computed property
+ * @type {Array} - of strings (serviceNames)
+ */
+ serviceNames: function() {
+ return this.get('content.serviceName')
+ ? [this.get('content.serviceName')]
+ : App.StackService.find().filter(function(s){return s.get('isSelected') || s.get('isInstalled')}).mapProperty('serviceName');
+ }.property('content.serviceName'),
+
+ /**
+ * by default loads data from model otherwise must be overridden as computed property
+ * filter services that support server validation and concat with misc configs if Installer or current service
+ * @type {Array} - of objects (services)
+ */
+ services: function() {
+ return this.get('content.serviceName')
+ ? [App.StackService.find(this.get('content.serviceName'))]
+ : App.StackService.find().filter(function(s){
+ return s.get('allowServerValidator') && (s.get('isSelected') || s.get('isInsalled'))
+ }).concat(require("data/service_configs"));
+ }.property('content.serviceName'),
+
+ /**
+ * by default loads data from model otherwise must be overridden as computed property
+ * can be used for service|host configs pages
+ * @type {Array} of strings (hostNames)
+ */
+ hostGroups: function() {
+ return this.get('content.recommendationsHostGroups') || blueprintUtils.generateHostGroups(this.get('hostNames'), App.HostComponent.find());
+ }.property('content.recommendationsHostGroups', 'hostNames'),
+
+ /**
+ * controller that is child of this mixis has to contain stepConfigs
+ * @type {Array}
+ */
+ stepConfigs: null,
+
+ /**
+ * @method loadServerSideConfigsRecommendations
+ * laod recommendations from server
+ * (used only during install)
+ * @returns {*}
+ */
+ loadServerSideConfigsRecommendations: function() {
+ if (this.get('recommendationsConfigs') || !App.get('supports.serverRecommendValidate')) {
+ return $.Deferred().resolve();
+ }
+ return App.ajax.send({
+ 'name': 'wizard.step7.loadrecommendations.configs',
+ 'sender': this,
+ 'data': {
+ stackVersionUrl: App.get('stackVersionURL'),
+ hosts: this.get('hostNames'),
+ services: this.get('serviceNames'),
+ recommendations: this.get('hostGroups')
+ },
+ 'success': 'loadRecommendationsSuccess',
+ 'error': 'loadRecommendationsError'
+ });
+ },
+
+ /**
+ * @method loadRecommendationsSuccess
+ * success callback after loading recommendations
+ * (used only during install)
+ * @param data
+ */
+ loadRecommendationsSuccess: function(data) {
+ if (!data) {
+ console.warn('error while loading default config values');
+ }
+ this.set("recommendationsConfigs", Em.get(data.resources[0] , "recommendations.blueprint.configurations"));
+ },
+
+ loadRecommendationsError: function() {
+ console.error('Load recommendations failed');
+ },
+
+ /**
+ * @method serverSideValidation
+ * send request to validate configs
+ * @returns {*}
+ */
+ serverSideValidation: function() {
+ var self = this;
+ var deferred = $.Deferred();
+ if (!App.get('supports.serverRecommendValidate')) {
+ deferred.resolve();
+ return deferred;
+ }
+ var recommendations = this.get('hostGroups');
+ recommendations.blueprint.configurations = blueprintUtils.buildConfisJSON(this.get('services'), this.get('stepConfigs'));
+ App.ajax.send({
+ name: 'config.validations',
+ sender: this,
+ data: {
+ stackVersionUrl: App.get('stackVersionURL'),
+ hosts: this.get('hostNames'),
+ services: this.get('serviceNames'),
+ validate: 'configurations',
+ recommendations: recommendations
+ },
+ success: 'validationSuccess',
+ error: 'validationError'
+ }).complete(function() {
+ self.warnUser(deferred);
+ });
+ return deferred;
+ },
+
+
+ /**
+ * @method validationSuccess
+ * success callback after getting responce from server
+ * go through the step configs and set warn and error messages
+ * @param data
+ */
+ validationSuccess: function(data) {
+ var self = this;
+ self.set('configValidationError', false);
+ self.set('configValidationWarning', false);
+ self.set('configValidationFailed', false);
+ data.resources.forEach(function(r) {
+ r.items.forEach(function(item){
+ if (item.type == "configuration") {
+ self.get('stepConfigs').forEach(function(service) {
+ service.get('configs').forEach(function(property) {
+ if ((property.get('filename') == item['config-type'] + '.xml') && (property.get('name') == item['config-name'])) {
+ if (item.level == "ERROR") {
+ self.set('configValidationError', true);
+ property.set('errorMessage', item.message);
+ property.set('error', true);
+ } else if (item.level == "ERROR") {
+ self.set('configValidationWarning', true);
+ property.set('warnMessage', item.message);
+ property.set('warn', true);
+ }
+ }
+ });
+ })
+ }
+ });
+ });
+ },
+
+ validationError: function() {
+ this.set('configValidationFailed', true);
+ console.error('config validation failed');
+ },
+
+
+ /**
+ * warn user if some errors or warning were
+ * in seting up configs otherwise go to the nex operation
+ * @param deferred
+ * @returns {*}
+ */
+ warnUser: function(deferred) {
+ var self = this;
+ if (this.get('configValidationFailed')) {
+ this.set('isSubmitDisabled', false);
+ this.set("isApplyingChanges", false);
+ return App.showAlertPopup(Em.I18n.t('installer.step7.popup.validation.failed.header'), Em.I18n.t('installer.step7.popup.validation.request.failed.body'));
+ } else if (this.get('configValidationError')) {
+ this.set("isApplyingChanges", false);
+ this.set('isSubmitDisabled', true);
+ return App.showAlertPopup(Em.I18n.t('installer.step7.popup.validation.failed.header'), Em.I18n.t('installer.step7.popup.validation.failed.body'));
+ } else if (this.get('configValidationWarning')) {
+ this.set('isSubmitDisabled', true);
+ this.set("isApplyingChanges", false);
+ return App.showConfirmationPopup(function () {
+ self.set('isSubmitDisabled', false);
+ self.set("isApplyingChanges", true);
+ deferred.resolve();
+ }, Em.I18n.t('installer.step7.popup.validation.warning.body'));
+ } else {
+ deferred.resolve();
+ }
+ }
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/models/service_config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service_config.js b/ambari-web/app/models/service_config.js
index 7ef67f3..2df2ad7 100644
--- a/ambari-web/app/models/service_config.js
+++ b/ambari-web/app/models/service_config.js
@@ -844,13 +844,14 @@ App.ServiceConfigProperty = Ember.Object.extend({
}
}
}
-
- var serviceValidator = this.get('serviceValidator');
- if (serviceValidator!=null) {
- var validationIssue = serviceValidator.validateConfig(this);
- if (validationIssue) {
- this.set('warnMessage', validationIssue);
- isWarn = true;
+ if (!App.get('supports.serverRecommendValidate')) {
+ var serviceValidator = this.get('serviceValidator');
+ if (serviceValidator!=null) {
+ var validationIssue = serviceValidator.validateConfig(this);
+ if (validationIssue) {
+ this.set('warnMessage', validationIssue);
+ isWarn = true;
+ }
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/models/stack_service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service.js b/ambari-web/app/models/stack_service.js
index c599c33..0b489f8 100644
--- a/ambari-web/app/models/stack_service.js
+++ b/ambari-web/app/models/stack_service.js
@@ -171,6 +171,9 @@ App.StackService = DS.Model.extend(App.ServiceModelMixin, {
return defaultConfigsHandler && defaultConfigsHandler.configsValidator;
}.property('serviceName'),
+ allowServerValidator: function() {
+ return ["YARN", "STORM", "MAPREDUCE2", "HIVE", "TEZ"].contains(this.get('serviceName'));
+ }.property('serviceName'),
/**
* configCategories are fetched from App.StackService.configCategories.
* Also configCategories that does not match any serviceComponent of a service and not included in the permissible default pattern are omitted
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/routes/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/installer.js b/ambari-web/app/routes/installer.js
index 7abfe96..0a5e46e 100644
--- a/ambari-web/app/routes/installer.js
+++ b/ambari-web/app/routes/installer.js
@@ -250,6 +250,7 @@ module.exports = Em.Route.extend({
controller.saveClients(wizardStep4Controller);
controller.clearRecommendations(); // Force reload recommendation between steps 4 and 5
+ controller.setDBProperty('recommendations', undefined);
controller.setDBProperty('masterComponentHosts', undefined);
router.transitionTo('step5');
}
@@ -273,6 +274,7 @@ module.exports = Em.Route.extend({
var wizardStep5Controller = router.get('wizardStep5Controller');
controller.saveMasterComponentHosts(wizardStep5Controller);
controller.setDBProperty('slaveComponentHosts', undefined);
+ controller.setDBProperty('recommendations', wizardStep5Controller.get('content.recommendations'));
router.transitionTo('step6');
}
}),
@@ -302,6 +304,8 @@ module.exports = Em.Route.extend({
controller.setDBProperty('serviceConfigProperties', null);
controller.setDBProperty('advancedServiceConfig', null);
controller.setDBProperty('serviceConfigGroups', null);
+ controller.setDBProperty('recommendationsHostGroups', wizardStep6Controller.get('content.recommendationsHostGroups'));
+ controller.setDBProperty('recommendationsConfigs', null);
controller.loadAdvancedConfigs(wizardStep7Controller);
router.transitionTo('step7');
}
@@ -326,12 +330,13 @@ module.exports = Em.Route.extend({
},
back: Em.Router.transitionTo('step6'),
next: function (router) {
- var installerController = router.get('installerController');
+ var controller = router.get('installerController');
var wizardStep7Controller = router.get('wizardStep7Controller');
- installerController.saveServiceConfigProperties(wizardStep7Controller);
+ controller.saveServiceConfigProperties(wizardStep7Controller);
if (App.supports.hostOverridesInstaller) {
- installerController.saveServiceConfigGroups(wizardStep7Controller);
+ controller.saveServiceConfigGroups(wizardStep7Controller);
}
+ controller.setDBProperty('recommendationsConfigs', wizardStep7Controller.get('recommendationsConfigs'));
router.transitionTo('step8');
}
}),
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/utils/blueprint.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/blueprint.js b/ambari-web/app/utils/blueprint.js
index b56ceca..f8ec9c2 100644
--- a/ambari-web/app/utils/blueprint.js
+++ b/ambari-web/app/utils/blueprint.js
@@ -185,5 +185,86 @@ module.exports = {
});
return res;
+ },
+
+ /**
+ * @method buildConfisJSON - generet JSON according to blueprint format
+ * @param {Em.Array} stepConfigs - array of Ember Objects
+ * @param {Array} services
+ * @returns {Object}
+ * Example:
+ * {
+ * "yarn-env": {
+ * "properties": {
+ * "content": "some value",
+ * "yarn_heapsize": "1024",
+ * "resourcemanager_heapsize": "1024",
+ * }
+ * },
+ * "yarn-log4j": {
+ * "properties": {
+ * "content": "some other value"
+ * }
+ * }
+ * }
+ */
+ buildConfisJSON: function(services, stepConfigs) {
+ var configurations = {};
+ services.forEach(function(service) {
+ var config = stepConfigs.findProperty('serviceName', service.get('serviceName'));
+ if (config && service.get('configTypes')) {
+ Object.keys(service.get('configTypes')).forEach(function(type) {
+ configurations[type] = {
+ properties: {}
+ }
+ });
+ config.get('configs').forEach(function(property){
+ if (configurations[property.get('filename').replace('.xml','')]){
+ configurations[property.get('filename').replace('.xml','')]['properties'][property.get('name')] = property.get('value');
+ } else {
+ console.warn(property.get('name') + " from " + property.get('filename') + " can't be validate");
+ }
+ });
+ }
+ });
+ return configurations;
+ },
+
+ /**
+ * @method generateHostGroups
+ * @param {Array} hostNames - list of all hostNames
+ * @param {Array} hostComponents - list of all hostComponents
+ * @returns {{blueprint: {host_groups: Array}, blueprint_cluster_binding: {host_groups: Array}}}
+ */
+ generateHostGroups: function(hostNames, hostComponents) {
+ var recommendations = {
+ blueprint: {
+ host_groups: []
+ },
+ blueprint_cluster_binding: {
+ host_groups: []
+ }
+ };
+
+ for (var i = 1; i <= hostNames.length; i++) {
+ var host_group = {
+ name: "host-group-" + i,
+ components: []
+ };
+ var hcFiltered = hostComponents.filterProperty('hostName', hostNames[i-1]).mapProperty('componentName');
+ for (var j = 0; j < hcFiltered.length; j++) {
+ host_group.components.push({
+ name: hcFiltered[j]
+ });
+ }
+ recommendations.blueprint.host_groups.push(host_group);
+ recommendations.blueprint_cluster_binding.host_groups.push({
+ name: "host-group-" + i,
+ hosts: [{
+ fqdn: hostNames[i-1]
+ }]
+ });
+ }
+ return recommendations;
}
};
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index f576f38..62db148 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -690,8 +690,8 @@ App.config = Em.Object.create({
// Use calculated default values for some configs
var recommendedDefaults = {};
- if (App.supports.serverRecommendValidate) {
- if (!storedConfigs && service.get('configTypes')) {
+ if (App.get('supports.serverRecommendValidate')) {
+ if (!storedConfigs && service.get('configTypes') && service.get('allowServerValidator')) {
Object.keys(service.get('configTypes')).forEach(function (type) {
if (!recommended || !recommended[type]) {
return;
@@ -727,14 +727,14 @@ App.config = Em.Object.create({
}
});
}
- }
- if (service.get('configsValidator')) {
- service.get('configsValidator').set('recommendedDefaults', recommendedDefaults);
- var validators = service.get('configsValidator').get('configValidators');
- for (var validatorName in validators) {
- var c = configsByService.findProperty('name', validatorName);
- if (c) {
- c.set('serviceValidator', service.get('configsValidator'));
+ if (service.get('configsValidator')) {
+ service.get('configsValidator').set('recommendedDefaults', recommendedDefaults);
+ var validators = service.get('configsValidator').get('configValidators');
+ for (var validatorName in validators) {
+ var c = configsByService.findProperty('name', validatorName);
+ if (c) {
+ c.set('serviceValidator', service.get('configsValidator'));
+ }
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/818dc161/ambari-web/test/utils/blueprint_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/blueprint_test.js b/ambari-web/test/utils/blueprint_test.js
index e527a09..1f8d803 100644
--- a/ambari-web/test/utils/blueprint_test.js
+++ b/ambari-web/test/utils/blueprint_test.js
@@ -276,4 +276,141 @@ describe('utils/blueprint', function() {
);
});
});
+
+ describe('#buildConfisJSON', function () {
+ var tests = [
+ {
+ "services": [
+ Em.Object.create({
+ serviceName: "YARN",
+ configTypes: {
+ "yarn-site": {},
+ "yarn-env": {}
+ },
+ allowServerValidator: true,
+ isInstalled: true
+ })
+ ],
+ "stepConfigs": [
+ Em.Object.create({
+ serviceName: "YARN",
+ configs: [
+ Em.Object.create({
+ name: "p1",
+ value: "v1",
+ filename: "yarn-site.xml"
+ }),
+ Em.Object.create({
+ name: "p2",
+ value: "v2",
+ filename: "yarn-site.xml"
+ }),
+ Em.Object.create({
+ name: "p3",
+ value: "v3",
+ filename: "yarn-env.xml"
+ })
+ ]
+ })
+ ],
+ "configurations": {
+ "yarn-site": {
+ "properties": {
+ "p1": "v1",
+ "p2": "v2"
+ }
+ },
+ "yarn-env": {
+ "properties": {
+ "p3": "v3"
+ }
+ }
+ }
+ }
+ ];
+ tests.forEach(function (test) {
+ it("generate configs for request (use in validation)", function () {
+ expect(blueprintUtils.buildConfisJSON(test.services, test.stepConfigs)).to.eql(test.configurations);
+ });
+ });
+ });
+
+ describe('#generateHostGroups', function () {
+ var tests = [
+ {
+ "hostNames": ["host1", "host2"],
+ "hostComponents": [
+ Em.Object.create({
+ componentName: "C1",
+ hostName: "host1"
+ }),
+ Em.Object.create({
+ componentName: "C2",
+ hostName: "host1"
+ }),
+ Em.Object.create({
+ componentName: "C1",
+ hostName: "host2"
+ }),
+ Em.Object.create({
+ componentName: "C3",
+ hostName: "host2"
+ })
+ ],
+ result: {
+ blueprint: {
+ host_groups: [
+ {
+ name: "host-group-1",
+ "components": [
+ {
+ "name": "C1"
+ },
+ {
+ "name": "C2"
+ }
+ ]
+ },
+ {
+ name: "host-group-2",
+ "components": [
+ {
+ "name": "C1"
+ },
+ {
+ "name": "C3"
+ }
+ ]
+ }
+ ]
+ },
+ blueprint_cluster_binding: {
+ host_groups: [
+ {
+ "name": "host-group-1",
+ "hosts": [
+ {
+ "fqdn": "host1"
+ }
+ ]
+ },
+ {
+ "name": "host-group-2",
+ "hosts": [
+ {
+ "fqdn": "host2"
+ }
+ ]
+ },
+ ]
+ }
+ }
+ }
+ ];
+ tests.forEach(function (test) {
+ it("generate host groups", function () {
+ expect(blueprintUtils.generateHostGroups(test.hostNames, test.hostComponents)).to.eql(test.result);
+ });
+ });
+ });
});
\ No newline at end of file
[42/50] [abbrv] git commit: AMBARI-6853 - Alerts: Calculate Hash
Based On Alert Definitions (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6853 - Alerts: Calculate Hash Based On Alert Definitions (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/93e61c0b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/93e61c0b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/93e61c0b
Branch: refs/heads/branch-alerts-dev
Commit: 93e61c0badb99995b7cbac5e8a6c159eab446735
Parents: 84c4b43
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Aug 13 15:39:35 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Aug 20 10:44:36 2014 -0400
----------------------------------------------------------------------
.../ambari/server/agent/HeartBeatHandler.java | 93 ++++---
.../ambari/server/agent/HeartBeatResponse.java | 54 ++--
.../AlertDefinitionResourceProvider.java | 4 +
.../server/orm/dao/AlertDefinitionDAO.java | 129 ++++++++-
.../orm/entities/AlertDefinitionEntity.java | 9 +-
.../server/state/alert/AlertDefinitionHash.java | 272 +++++++++++++++++++
.../src/main/resources/properties.json | 3 +-
.../AlertDefinitionResourceProviderTest.java | 9 +-
.../server/orm/dao/AlertDefinitionDAOTest.java | 117 +++++++-
.../state/alerts/AlertDefinitionHashTest.java | 224 +++++++++++++++
10 files changed, 834 insertions(+), 80 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index fa633c1..8a818a6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -17,20 +17,16 @@
*/
package org.apache.ambari.server.agent;
-import com.google.gson.Gson;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Singleton;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
+
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.HostNotFoundException;
import org.apache.ambari.server.RoleCommand;
@@ -46,7 +42,6 @@ import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.metadata.ActionMetadata;
import org.apache.ambari.server.state.AgentVersion;
import org.apache.ambari.server.state.Alert;
-import org.apache.ambari.server.state.AlertState;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
@@ -63,6 +58,7 @@ import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
import org.apache.ambari.server.state.host.HostHealthyHeartbeatEvent;
import org.apache.ambari.server.state.host.HostRegistrationRequestEvent;
@@ -78,6 +74,11 @@ import org.apache.ambari.server.utils.VersionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+
/**
* This class handles the heartbeats coming from the agent, passes on the information
@@ -90,29 +91,40 @@ public class HeartBeatHandler {
private final Clusters clusterFsm;
private final ActionQueue actionQueue;
private final ActionManager actionManager;
+ private HeartbeatMonitor heartbeatMonitor;
+
@Inject
- Injector injector;
+ private Injector injector;
+
@Inject
- Configuration config;
+ private Configuration config;
+
@Inject
- AmbariMetaInfo ambariMetaInfo;
+ private AmbariMetaInfo ambariMetaInfo;
+
@Inject
- ActionMetadata actionMetadata;
- private HeartbeatMonitor heartbeatMonitor;
+ private ActionMetadata actionMetadata;
+
@Inject
private Gson gson;
+
@Inject
- ConfigHelper configHelper;
+ private ConfigHelper configHelper;
+
+ @Inject
+ private AlertDefinitionHash alertDefinitionHash;
+
private Map<String, Long> hostResponseIds = new ConcurrentHashMap<String, Long>();
+
private Map<String, HeartBeatResponse> hostResponses = new ConcurrentHashMap<String, HeartBeatResponse>();
@Inject
public HeartBeatHandler(Clusters fsm, ActionQueue aq, ActionManager am,
Injector injector) {
- this.clusterFsm = fsm;
- this.actionQueue = aq;
- this.actionManager = am;
- this.heartbeatMonitor = new HeartbeatMonitor(fsm, aq, am, 60000, injector);
+ clusterFsm = fsm;
+ actionQueue = aq;
+ actionManager = am;
+ heartbeatMonitor = new HeartbeatMonitor(fsm, aq, am, 60000, injector);
injector.injectMembers(this);
}
@@ -130,14 +142,17 @@ public class HeartBeatHandler {
if(heartbeat.getAgentEnv() != null && heartbeat.getAgentEnv().getHostHealth() != null) {
heartbeat.getAgentEnv().getHostHealth().setServerTimeStampAtReporting(now);
}
+
String hostname = heartbeat.getHostname();
Long currentResponseId = hostResponseIds.get(hostname);
HeartBeatResponse response;
+
if (currentResponseId == null) {
//Server restarted, or unknown host.
LOG.error("CurrentResponseId unknown for " + hostname + " - send register command");
return createRegisterCommand();
}
+
LOG.debug("Received heartbeat from host"
+ ", hostname=" + hostname
+ ", currentResponseId=" + currentResponseId
@@ -195,18 +210,23 @@ public class HeartBeatHandler {
// Examine heartbeart for component live status reports
processStatusReports(heartbeat, hostname, clusterFsm);
-
+
// Calculate host status
// NOTE: This step must be after processing command/status reports
processHostStatus(heartbeat, hostname);
-
+
calculateHostAlerts(heartbeat, hostname);
// Send commands if node is active
if (hostObject.getState().equals(HostState.HEALTHY)) {
sendCommands(hostname, response);
annotateResponse(hostname, response);
- }
+ }
+
+ // send the alert definition hash for this host
+ Map<String, String> alertDefinitionHashes = alertDefinitionHash.getHashes(hostname);
+ response.setAlertDefinitionHash(alertDefinitionHashes);
+
return response;
}
@@ -218,7 +238,7 @@ public class HeartBeatHandler {
}
}
}
-
+
protected void processHostStatus(HeartBeat heartbeat, String hostname) throws AmbariException {
Host host = clusterFsm.getHost(hostname);
@@ -268,7 +288,7 @@ public class HeartBeatHandler {
StackId stackId;
Cluster cluster = clusterFsm.getCluster(clusterName);
stackId = cluster.getDesiredStackVersion();
-
+
MaintenanceStateHelper psh = injector.getInstance(MaintenanceStateHelper.class);
List<ServiceComponentHost> scHosts = cluster.getServiceComponentHosts(heartbeat.getHostname());
@@ -347,7 +367,7 @@ public class HeartBeatHandler {
"STOP".equals(report.getCustomCommand())))) {
continue;
}
-
+
Cluster cl = clusterFsm.getCluster(report.getClusterName());
String service = report.getServiceName();
if (service == null || service.isEmpty()) {
@@ -439,7 +459,7 @@ public class HeartBeatHandler {
if (status.getClusterName().equals(cl.getClusterName())) {
try {
Service svc = cl.getService(status.getServiceName());
-
+
String componentName = status.getComponentName();
if (svc.getServiceComponents().containsKey(componentName)) {
ServiceComponent svcComp = svc.getServiceComponent(
@@ -470,7 +490,7 @@ public class HeartBeatHandler {
if (null != status.getConfigTags()) {
scHost.updateActualConfigs(status.getConfigTags());
}
-
+
Map<String, Object> extra = status.getExtra();
if (null != extra && !extra.isEmpty()) {
try {
@@ -479,7 +499,7 @@ public class HeartBeatHandler {
List<Map<String, String>> list = (List<Map<String, String>>) extra.get("processes");
scHost.setProcesses(list);
}
-
+
} catch (Exception e) {
LOG.error("Could not access extra JSON for " +
scHost.getServiceComponentName() + " from " +
@@ -487,7 +507,7 @@ public class HeartBeatHandler {
" (" + e.getMessage() + ")");
}
}
-
+
if (null != status.getAlerts()) {
List<Alert> clusterAlerts = new ArrayList<Alert>();
for (AgentAlert aa : status.getAlerts()) {
@@ -496,14 +516,15 @@ public class HeartBeatHandler {
scHost.getHostName(), aa.getState());
alert.setLabel(aa.getLabel());
alert.setText(aa.getText());
-
+
clusterAlerts.add(alert);
}
-
- if (0 != clusterAlerts.size())
- cl.addAlerts(clusterAlerts);
+
+ if (0 != clusterAlerts.size()) {
+ cl.addAlerts(clusterAlerts);
}
-
+ }
+
} else {
// TODO: What should be done otherwise?
@@ -563,7 +584,7 @@ public class HeartBeatHandler {
throw new AmbariException("Could not get jaxb string for command", e);
}
switch (ac.getCommandType()) {
- case BACKGROUND_EXECUTION_COMMAND:
+ case BACKGROUND_EXECUTION_COMMAND:
case EXECUTION_COMMAND: {
response.addExecutionCommand((ExecutionCommand) ac);
break;
@@ -699,7 +720,7 @@ public class HeartBeatHandler {
* @throws org.apache.ambari.server.AmbariException
*/
private void annotateResponse(String hostname, HeartBeatResponse response) throws AmbariException {
- for (Cluster cl : this.clusterFsm.getClustersForHost(hostname)) {
+ for (Cluster cl : clusterFsm.getClustersForHost(hostname)) {
List<ServiceComponentHost> scHosts = cl.getServiceComponentHosts(hostname);
if (scHosts != null && scHosts.size() > 0) {
response.setHasMappedComponents(true);
@@ -718,19 +739,19 @@ public class HeartBeatHandler {
throws AmbariException {
ComponentsResponse response = new ComponentsResponse();
- Cluster cluster = this.clusterFsm.getCluster(clusterName);
+ Cluster cluster = clusterFsm.getCluster(clusterName);
StackId stackId = cluster.getCurrentStackVersion();
if (stackId == null) {
throw new AmbariException("Cannot provide stack components map. " +
"Stack hasn't been selected yet.");
}
- StackInfo stack = this.ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
stackId.getStackVersion());
response.setClusterName(clusterName);
response.setStackName(stackId.getStackName());
response.setStackVersion(stackId.getStackVersion());
- response.setComponents(this.getComponentsMap(stack));
+ response.setComponents(getComponentsMap(stack));
return response;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
index 1670beb..24bd8a2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatResponse.java
@@ -20,26 +20,34 @@ package org.apache.ambari.server.agent;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import org.codehaus.jackson.annotate.JsonProperty;
/**
- *
* Controller to Agent response data model.
- *
*/
public class HeartBeatResponse {
private long responseId;
-
- List<ExecutionCommand> executionCommands = new ArrayList<ExecutionCommand>();
- List<StatusCommand> statusCommands = new ArrayList<StatusCommand>();
- List<CancelCommand> cancelCommands = new ArrayList<CancelCommand>();
- RegistrationCommand registrationCommand;
+ private List<ExecutionCommand> executionCommands = new ArrayList<ExecutionCommand>();
+ private List<StatusCommand> statusCommands = new ArrayList<StatusCommand>();
+ private List<CancelCommand> cancelCommands = new ArrayList<CancelCommand>();
+
+ private RegistrationCommand registrationCommand;
+
+ private boolean restartAgent = false;
+ private boolean hasMappedComponents = false;
- boolean restartAgent = false;
- boolean hasMappedComponents = false;
+ /**
+ * A mapping between cluster name and the alert defintion hash for that
+ * cluster. The alert definition hash for a cluster is a hashed value of all
+ * of the UUIDs for each alert definition that the agent host should be
+ * scheduling. If any of the alert definitions change, their UUID will change
+ * which will cause this hash value to change.
+ */
+ private Map<String, String> alertDefinitionHashes = null;
@JsonProperty("responseId")
public long getResponseId() {
@@ -111,6 +119,16 @@ public class HeartBeatResponse {
this.hasMappedComponents = hasMappedComponents;
}
+ @JsonProperty("alertDefinitionHashes")
+ public Map<String, String> getAlertDefinitionHash() {
+ return alertDefinitionHashes;
+ }
+
+ @JsonProperty("alertDefinitionHashes")
+ public void setAlertDefinitionHash(Map<String, String> alertDefinitionHashes) {
+ this.alertDefinitionHashes = alertDefinitionHashes;
+ }
+
public void addExecutionCommand(ExecutionCommand execCmd) {
executionCommands.add(execCmd);
}
@@ -125,13 +143,15 @@ public class HeartBeatResponse {
@Override
public String toString() {
- return "HeartBeatResponse{" +
- "responseId=" + responseId +
- ", executionCommands=" + executionCommands +
- ", statusCommands=" + statusCommands +
- ", cancelCommands=" + cancelCommands +
- ", registrationCommand=" + registrationCommand +
- ", restartAgent=" + restartAgent +
- '}';
+ StringBuilder buffer = new StringBuilder("HeartBeatResponse{");
+ buffer.append("responseId=").append(responseId);
+ buffer.append(", executionCommands=").append(executionCommands);
+ buffer.append(", statusCommands=").append(statusCommands);
+ buffer.append(", cancelCommands=").append(cancelCommands);
+ buffer.append(", registrationCommand=").append(registrationCommand);
+ buffer.append(", restartAgent=").append(restartAgent);
+ buffer.append(", alertDefinitionHashes=").append(alertDefinitionHashes);
+ buffer.append('}');
+ return buffer.toString();
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
index 07b033e..5ea6d3b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProvider.java
@@ -65,6 +65,7 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
protected static final String ALERT_DEF_COMPONENT_NAME = "AlertDefinition/component_name";
protected static final String ALERT_DEF_ENABLED = "AlertDefinition/enabled";
protected static final String ALERT_DEF_SCOPE = "AlertDefinition/scope";
+ protected static final String ALERT_DEF_UUID = "AlertDefinition/uuid";
private static Set<String> pkPropertyIds = new HashSet<String>(
Arrays.asList(ALERT_DEF_ID, ALERT_DEF_NAME));
@@ -354,6 +355,9 @@ public class AlertDefinitionResourceProvider extends AbstractControllerResourceP
setResourceProperty(resource, ALERT_DEF_LABEL, entity.getLabel(),
requestedIds);
+ setResourceProperty(resource, ALERT_DEF_UUID, entity.getHash(),
+ requestedIds);
+
if (!isCollection && null != resource.getPropertyValue(ALERT_DEF_SOURCE_TYPE)) {
try {
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
index 05881e4..db5c63f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAO.java
@@ -17,12 +17,16 @@
*/
package org.apache.ambari.server.orm.dao;
+import java.util.Collections;
import java.util.List;
+import java.util.Set;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
+import org.apache.ambari.server.controller.RootServiceResponseFactory;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.state.alert.Scope;
import com.google.inject.Inject;
import com.google.inject.Provider;
@@ -61,7 +65,7 @@ public class AlertDefinitionDAO {
/**
* Gets an alert definition with the specified ID.
- *
+ *
* @param definitionId
* the ID of the definition to retrieve.
* @return the alert definition or {@code null} if none exists.
@@ -74,7 +78,7 @@ public class AlertDefinitionDAO {
/**
* Gets an alert definition with the specified name. Alert definition names
* are unique within a cluster.
- *
+ *
* @param clusterId
* the ID of the cluster.
* @param definitionName
@@ -93,7 +97,7 @@ public class AlertDefinitionDAO {
/**
* Gets all alert definitions stored in the database.
- *
+ *
* @return all alert definitions or an empty list if none exist (never
* {@code null}).
*/
@@ -106,7 +110,7 @@ public class AlertDefinitionDAO {
/**
* Gets all alert definitions stored in the database.
- *
+ *
* @return all alert definitions or empty list if none exist (never
* {@code null}).
*/
@@ -120,8 +124,114 @@ public class AlertDefinitionDAO {
}
/**
+ * Gets all alert definitions for the given service in the specified cluster.
+ *
+ * @param clusterId
+ * the ID of the cluster.
+ * @param serviceName
+ * the name of the service.
+ *
+ * @return all alert definitions for the service or empty list if none exist
+ * (never {@code null}).
+ */
+ public List<AlertDefinitionEntity> findByService(long clusterId,
+ String serviceName) {
+ TypedQuery<AlertDefinitionEntity> query = entityManagerProvider.get().createNamedQuery(
+ "AlertDefinitionEntity.findByService", AlertDefinitionEntity.class);
+
+ query.setParameter("clusterId", clusterId);
+ query.setParameter("serviceName", serviceName);
+
+ return daoUtils.selectList(query);
+ }
+
+ /**
+ * Gets all alert definitions for the specified services that do not have a
+ * component. These definitions are assumed to be run on the master hosts.
+ *
+ * @param clusterId
+ * the ID of the cluster.
+ * @param services
+ * the services to match on.
+ *
+ * @return all alert definitions for the services or empty list if none exist
+ * (never {@code null}).
+ */
+ public List<AlertDefinitionEntity> findByServiceMaster(long clusterId,
+ Set<String> services) {
+ if (null == services || services.size() == 0) {
+ return Collections.emptyList();
+ }
+
+ TypedQuery<AlertDefinitionEntity> query = entityManagerProvider.get().createNamedQuery(
+ "AlertDefinitionEntity.findByServiceMaster",
+ AlertDefinitionEntity.class);
+
+ query.setParameter("clusterId", clusterId);
+ query.setParameter("services", services);
+ query.setParameter("scope", Scope.SERVICE);
+
+ return daoUtils.selectList(query);
+ }
+
+ /**
+ * Gets all alert definitions that are not bound to a particular service. An
+ * example of this type of definition is a host capacity alert.
+ *
+ * @param clusterId
+ * the ID of the cluster.
+ * @param serviceName
+ * the name of the service (not {@code null}).
+ * @param componentName
+ * the name of the service component (not {@code null}).
+ * @return all alert definitions that are not bound to a service or an empty
+ * list (never {@code null}).
+ */
+ public List<AlertDefinitionEntity> findByServiceComponent(long clusterId,
+ String serviceName, String componentName) {
+ if (null == serviceName || null == componentName) {
+ return Collections.emptyList();
+ }
+
+ TypedQuery<AlertDefinitionEntity> query = entityManagerProvider.get().createNamedQuery(
+ "AlertDefinitionEntity.findByServiceAndComponent",
+ AlertDefinitionEntity.class);
+
+ query.setParameter("clusterId", clusterId);
+ query.setParameter("serviceName", serviceName);
+ query.setParameter("componentName", componentName);
+
+ return daoUtils.selectList(query);
+ }
+
+ /**
+ * Gets all alert definitions that are not bound to a particular service. An
+ * example of this type of definition is a host capacity alert.
+ *
+ * @param clusterId
+ * the ID of the cluster.
+ * @return all alert definitions that are not bound to a service or an empty
+ * list (never {@code null}).
+ */
+ public List<AlertDefinitionEntity> findAgentScoped(long clusterId) {
+ TypedQuery<AlertDefinitionEntity> query = entityManagerProvider.get().createNamedQuery(
+ "AlertDefinitionEntity.findByServiceAndComponent",
+ AlertDefinitionEntity.class);
+
+ query.setParameter("clusterId", clusterId);
+
+ query.setParameter("serviceName",
+ RootServiceResponseFactory.Services.AMBARI.name());
+
+ query.setParameter("componentName",
+ RootServiceResponseFactory.Components.AMBARI_AGENT.name());
+
+ return daoUtils.selectList(query);
+ }
+
+ /**
* Persists a new alert definition.
- *
+ *
* @param alertDefinition
* the definition to persist (not {@code null}).
*/
@@ -132,7 +242,7 @@ public class AlertDefinitionDAO {
/**
* Refresh the state of the alert definition from the database.
- *
+ *
* @param alertDefinition
* the definition to refresh (not {@code null}).
*/
@@ -144,7 +254,7 @@ public class AlertDefinitionDAO {
/**
* Merge the speicified alert definition with the existing definition in the
* database.
- *
+ *
* @param alertDefinition
* the definition to merge (not {@code null}).
* @return the updated definition with merged content (never {@code null}).
@@ -157,7 +267,7 @@ public class AlertDefinitionDAO {
/**
* Removes the specified alert definition and all related history and
* associations from the database.
- *
+ *
* @param alertDefinition
* the definition to remove.
*/
@@ -170,7 +280,8 @@ public class AlertDefinitionDAO {
EntityManager entityManager = entityManagerProvider.get();
alertDefinition = findById(alertDefinition.getDefinitionId());
- if (null != alertDefinition)
+ if (null != alertDefinition) {
entityManager.remove(alertDefinition);
+ }
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
index 0062388..c93702a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/AlertDefinitionEntity.java
@@ -50,9 +50,12 @@ import org.apache.ambari.server.state.alert.Scope;
"cluster_id", "definition_name" }))
@TableGenerator(name = "alert_definition_id_generator", table = "ambari_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_value", pkColumnValue = "alert_definition_id_seq", initialValue = 0, allocationSize = 1)
@NamedQueries({
- @NamedQuery(name = "AlertDefinitionEntity.findAll", query = "SELECT alertDefinition FROM AlertDefinitionEntity alertDefinition"),
- @NamedQuery(name = "AlertDefinitionEntity.findAllInCluster", query = "SELECT alertDefinition FROM AlertDefinitionEntity alertDefinition WHERE alertDefinition.clusterId = :clusterId"),
- @NamedQuery(name = "AlertDefinitionEntity.findByName", query = "SELECT alertDefinition FROM AlertDefinitionEntity alertDefinition WHERE alertDefinition.definitionName = :definitionName AND alertDefinition.clusterId = :clusterId"), })
+ @NamedQuery(name = "AlertDefinitionEntity.findAll", query = "SELECT ad FROM AlertDefinitionEntity ad"),
+ @NamedQuery(name = "AlertDefinitionEntity.findAllInCluster", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.clusterId = :clusterId"),
+ @NamedQuery(name = "AlertDefinitionEntity.findByName", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.definitionName = :definitionName AND ad.clusterId = :clusterId"),
+ @NamedQuery(name = "AlertDefinitionEntity.findByService", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.serviceName = :serviceName AND ad.clusterId = :clusterId"),
+ @NamedQuery(name = "AlertDefinitionEntity.findByServiceAndComponent", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.serviceName = :serviceName AND ad.componentName = :componentName AND ad.clusterId = :clusterId"),
+ @NamedQuery(name = "AlertDefinitionEntity.findByServiceMaster", query = "SELECT ad FROM AlertDefinitionEntity ad WHERE ad.serviceName IN :services AND ad.scope = :scope AND ad.clusterId = :clusterId") })
public class AlertDefinitionEntity {
@Id
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
new file mode 100644
index 0000000..1f31c35
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/alert/AlertDefinitionHash.java
@@ -0,0 +1,272 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alert;
+
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.commons.codec.binary.Hex;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+
+/**
+ * The {@link AlertDefinitionHash} class is used to generate an MD5 hash for a
+ * list of {@link AlertDefinitionEntity}s. It is used in order to represent the
+ * state of a group of definitions by using
+ * {@link AlertDefinitionEntity#getHash()}
+ */
+@Singleton
+public class AlertDefinitionHash {
+
+ /**
+ * Logger.
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(AlertDefinitionHash.class);
+
+ /**
+ * The hash returned when there are no definitions to hash.
+ */
+ public static String NULL_MD5_HASH = "37a6259cc0c1dae299a7866489dff0bd";
+
+ /**
+ * DAO for retrieving {@link AlertDefinitionEntity} instances.
+ */
+ @Inject
+ private AlertDefinitionDAO m_definitionDao;
+
+ /**
+ * All clusters.
+ */
+ @Inject
+ private Clusters m_clusters;
+
+ /**
+ * The hashes for all hosts.
+ */
+ private Map<String, String> m_hashes = new ConcurrentHashMap<String, String>();
+
+ /**
+ * Gets a unique hash value reprssenting all of the alert definitions that
+ * should be scheduled to run on a given host.
+ * <p/>
+ * This will not include alert definitions where the type is defined as
+ * {@link SourceType#AGGREGATE} since aggregate definitions are not scheduled
+ * to run on agent hosts.
+ * <p/>
+ * Hash values from this method are cached.
+ *
+ * @param clusterName
+ * the cluster name (not {@code null}).
+ * @param hostName
+ * the host name (not {@code null}).
+ * @return the unique hash or {@value #NULL_MD5_HASH} if none.
+ */
+ public String getHash(String clusterName, String hostName) {
+ String hash = m_hashes.get(hostName);
+ if (null != hash) {
+ return hash;
+ }
+
+ hash = hash(clusterName, hostName);
+ m_hashes.put(hostName, hash);
+
+ return hash;
+ }
+
+ /**
+ * Gets a mapping between cluster and alert definition hashes for all of the
+ * clusters that the given host belongs to.
+ *
+ * @param hostName
+ * the host name (not {@code null}).
+ * @return a mapping between cluster and alert definition hash or an empty map
+ * (never @code null).
+ * @see #getHash(String, String)
+ * @throws AmbariException
+ */
+ public Map<String, String> getHashes(String hostName)
+ throws AmbariException {
+ Set<Cluster> clusters = m_clusters.getClustersForHost(hostName);
+ if (null == clusters || clusters.size() == 0) {
+ return Collections.emptyMap();
+ }
+
+ Map<String, String> hashes = new HashMap<String, String>();
+ for (Cluster cluster : clusters) {
+ String clusterName = cluster.getClusterName();
+ String hash = getHash(clusterName, hostName);
+ hashes.put(clusterName, hash);
+ }
+
+ return hashes;
+ }
+
+ /**
+ * Gets the alert definitions for the specified host. This will include the
+ * following types of alert definitions:
+ * <ul>
+ * <li>Service/Component alerts</li>
+ * <li>Service alerts where the host is a MASTER</li>
+ * <li>Host alerts that are not bound to a service</li>
+ * </ul>
+ *
+ * @param clusterName
+ * the cluster name (not {@code null}).
+ * @param hostName
+ * the host name (not {@code null}).
+ * @return the alert definitions for the host, or an empty set (never
+ * {@code null}).
+ */
+ public Set<AlertDefinitionEntity> getAlertDefinitions(String clusterName,
+ String hostName) {
+ Set<AlertDefinitionEntity> definitions = new HashSet<AlertDefinitionEntity>();
+
+ try {
+ Cluster cluster = m_clusters.getCluster(clusterName);
+ if (null == cluster) {
+ LOG.warn("Unable to get alert definitions for the missing cluster {}",
+ clusterName);
+
+ return Collections.emptySet();
+ }
+
+ long clusterId = cluster.getClusterId();
+ List<ServiceComponentHost> serviceComponents = cluster.getServiceComponentHosts(hostName);
+ if (null == serviceComponents || serviceComponents.size() == 0) {
+ LOG.warn(
+ "Unable to get alert definitions for {} since there are no service components defined",
+ hostName);
+
+ return Collections.emptySet();
+ }
+
+ for (ServiceComponentHost serviceComponent : serviceComponents) {
+ String serviceName = serviceComponent.getServiceName();
+ String componentName = serviceComponent.getServiceComponentName();
+
+ // add all alerts for this service/component pair
+ definitions.addAll(m_definitionDao.findByServiceComponent(
+ clusterId, serviceName, componentName));
+ }
+
+ // for every service, get the master components and see if the host
+ // is a master
+ Set<String> services = new HashSet<String>();
+ for (Entry<String, Service> entry : cluster.getServices().entrySet()) {
+ Service service = entry.getValue();
+ Map<String, ServiceComponent> components = service.getServiceComponents();
+ for (Entry<String, ServiceComponent> component : components.entrySet()) {
+ if (component.getValue().isMasterComponent()) {
+ Map<String, ServiceComponentHost> hosts = component.getValue().getServiceComponentHosts();
+
+ if( hosts.containsKey( hostName ) ){
+ services.add(service.getName());
+ }
+ }
+ }
+ }
+
+ // add all service scoped alerts
+ if( services.size() > 0 ){
+ definitions.addAll(m_definitionDao.findByServiceMaster(clusterId,
+ services));
+ }
+
+ // add any alerts not bound to a service (host level alerts)
+ definitions.addAll(m_definitionDao.findAgentScoped(clusterId));
+ } catch (AmbariException ambariException) {
+ LOG.error("Unable to get alert definitions", ambariException);
+ return Collections.emptySet();
+ }
+
+ return definitions;
+ }
+
+ /**
+ * Calculates a unique hash value representing all of the alert definitions
+ * that should be scheduled to run on a given host. Alerts of type
+ * {@link SourceType#AGGREGATE} are not included in the hash since they are
+ * not run on the agents.
+ *
+ * @param clusterName
+ * the cluster name (not {@code null}).
+ * @param hostName
+ * the host name (not {@code null}).
+ * @return the unique hash or {@value #NULL_MD5_HASH} if none.
+ */
+ private String hash(String clusterName, String hostName) {
+ Set<AlertDefinitionEntity> definitions = getAlertDefinitions(clusterName,
+ hostName);
+
+ // no definitions found for this host, don't bother hashing
+ if( null == definitions || definitions.size() == 0 ) {
+ return NULL_MD5_HASH;
+ }
+
+ // strip out all AGGREGATE types
+ Iterator<AlertDefinitionEntity> iterator = definitions.iterator();
+ while (iterator.hasNext()) {
+ if (SourceType.AGGREGATE.equals(iterator.next().getSourceType())) {
+ iterator.remove();
+ }
+ }
+
+ // build the UUIDs
+ List<String> uuids = new ArrayList<String>(definitions.size());
+ for (AlertDefinitionEntity definition : definitions) {
+ uuids.add(definition.getHash());
+ }
+
+ // sort the UUIDs so that the digest is created with bytes in the same order
+ Collections.sort(uuids);
+
+ try {
+ MessageDigest digest = MessageDigest.getInstance("MD5");
+ for (String uuid : uuids) {
+ digest.update(uuid.getBytes());
+ }
+
+ byte[] hashBytes = digest.digest();
+ return Hex.encodeHexString(hashBytes);
+ } catch (NoSuchAlgorithmException nsae) {
+ LOG.warn("Unable to calculate MD5 alert definition hash", nsae);
+ return NULL_MD5_HASH;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 6686f39..fec8aa2 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -421,7 +421,8 @@
"AlertDefinition/interval",
"AlertDefinition/enabled",
"AlertDefinition/scope",
- "AlertDefinition/source"
+ "AlertDefinition/source",
+ "AlertDefinition/uuid"
],
"Controller":[
"Controllers/name",
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
index bf51ecb..cf6ff42 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertDefinitionResourceProviderTest.java
@@ -34,6 +34,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.UUID;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.spi.Predicate;
@@ -57,6 +58,8 @@ public class AlertDefinitionResourceProviderTest {
AlertDefinitionDAO dao = null;
+ private static String DEFINITION_UUID = UUID.randomUUID().toString();
+
@Before
public void before() {
dao = createStrictMock(AlertDefinitionDAO.class);
@@ -82,6 +85,7 @@ public class AlertDefinitionResourceProviderTest {
AlertDefinitionResourceProvider.ALERT_DEF_CLUSTER_NAME,
AlertDefinitionResourceProvider.ALERT_DEF_ID,
AlertDefinitionResourceProvider.ALERT_DEF_NAME,
+ AlertDefinitionResourceProvider.ALERT_DEF_UUID,
AlertDefinitionResourceProvider.ALERT_DEF_LABEL);
AmbariManagementController amc = createMock(AmbariManagementController.class);
@@ -110,6 +114,9 @@ public class AlertDefinitionResourceProviderTest {
Assert.assertEquals("Mock Label",
r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_LABEL));
+ Assert.assertEquals(DEFINITION_UUID,
+ r.getPropertyValue(AlertDefinitionResourceProvider.ALERT_DEF_UUID));
+
verify(amc, clusters, cluster, dao);
}
@@ -331,7 +338,7 @@ public class AlertDefinitionResourceProviderTest {
entity.setDefinitionName("my_def");
entity.setLabel("Mock Label");
entity.setEnabled(true);
- entity.setHash("tmphash");
+ entity.setHash(DEFINITION_UUID);
entity.setScheduleInterval(Integer.valueOf(2));
entity.setServiceName(null);
entity.setSourceType("metric");
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAOTest.java
index f2ddcd7..d621a9b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/AlertDefinitionDAOTest.java
@@ -28,6 +28,7 @@ import java.util.List;
import java.util.TimeZone;
import java.util.UUID;
+import org.apache.ambari.server.controller.RootServiceResponseFactory;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.OrmTestHelper;
@@ -41,7 +42,6 @@ import org.apache.ambari.server.state.MaintenanceState;
import org.apache.ambari.server.state.NotificationState;
import org.apache.ambari.server.state.alert.Scope;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@@ -65,7 +65,7 @@ public class AlertDefinitionDAOTest {
OrmTestHelper helper;
/**
- *
+ *
*/
@Before
public void setup() {
@@ -78,7 +78,9 @@ public class AlertDefinitionDAOTest {
helper = injector.getInstance(OrmTestHelper.class);
clusterId = helper.createCluster();
- for (int i = 0; i < 8; i++) {
+ // create 8 HDFS alerts
+ int i = 0;
+ for (; i < 8; i++) {
AlertDefinitionEntity definition = new AlertDefinitionEntity();
definition.setDefinitionName("Alert Definition " + i);
definition.setServiceName("HDFS");
@@ -91,6 +93,58 @@ public class AlertDefinitionDAOTest {
definition.setSourceType("SCRIPT");
dao.create(definition);
}
+
+ // create 2 HDFS with components
+ for (; i < 10; i++) {
+ AlertDefinitionEntity definition = new AlertDefinitionEntity();
+ definition.setDefinitionName("Alert Definition " + i);
+ definition.setServiceName("HDFS");
+
+ if (i == 9) {
+ definition.setComponentName("NAMENODE");
+ } else {
+ definition.setComponentName("DATANODE");
+ }
+
+ definition.setClusterId(clusterId);
+ definition.setHash(UUID.randomUUID().toString());
+ definition.setScheduleInterval(60);
+ definition.setScope(Scope.SERVICE);
+ definition.setSource("Source " + i);
+ definition.setSourceType("SCRIPT");
+ dao.create(definition);
+ }
+
+ // create 2 host scoped
+ for (; i < 12; i++) {
+ AlertDefinitionEntity definition = new AlertDefinitionEntity();
+ definition.setDefinitionName("Alert Definition " + i);
+ definition.setServiceName("OOZIE");
+ definition.setComponentName("OOZIE_SERVER");
+ definition.setClusterId(clusterId);
+ definition.setHash(UUID.randomUUID().toString());
+ definition.setScheduleInterval(60);
+ definition.setScope(Scope.HOST);
+ definition.setSource("Source " + i);
+ definition.setSourceType("SCRIPT");
+ dao.create(definition);
+ }
+
+ // create 3 agent alerts
+ for (; i < 15; i++) {
+ AlertDefinitionEntity definition = new AlertDefinitionEntity();
+ definition.setDefinitionName("Alert Definition " + i);
+ definition.setServiceName(RootServiceResponseFactory.Services.AMBARI.name());
+ definition.setComponentName(RootServiceResponseFactory.Components.AMBARI_AGENT.name());
+ definition.setClusterId(clusterId);
+ definition.setHash(UUID.randomUUID().toString());
+ definition.setScheduleInterval(60);
+ definition.setScope(Scope.HOST);
+ definition.setSource("Source " + i);
+ definition.setSourceType("SCRIPT");
+ dao.create(definition);
+ }
+
}
@After
@@ -100,40 +154,77 @@ public class AlertDefinitionDAOTest {
}
/**
- *
+ *
*/
@Test
public void testFindByName() {
List<AlertDefinitionEntity> definitions = dao.findAll();
- Assert.assertNotNull(definitions);
+ assertNotNull(definitions);
AlertDefinitionEntity definition = definitions.get(2);
AlertDefinitionEntity retrieved = dao.findByName(
definition.getClusterId(), definition.getDefinitionName());
- Assert.assertEquals(definition, retrieved);
+ assertEquals(definition, retrieved);
}
/**
- *
+ *
*/
@Test
public void testFindAll() {
List<AlertDefinitionEntity> definitions = dao.findAll();
- Assert.assertNotNull(definitions);
- Assert.assertEquals(8, definitions.size());
+ assertNotNull(definitions);
+ assertEquals(15, definitions.size());
}
/**
- *
+ *
*/
@Test
- public void findById() {
+ public void testFindById() {
List<AlertDefinitionEntity> definitions = dao.findAll();
- Assert.assertNotNull(definitions);
+ assertNotNull(definitions);
AlertDefinitionEntity definition = definitions.get(2);
AlertDefinitionEntity retrieved = dao.findById(definition.getDefinitionId());
+ assertEquals(definition, retrieved);
+ }
+
+ /**
+ *
+ */
+ @Test
+ public void testFindByService() {
+ List<AlertDefinitionEntity> definitions = dao.findByService(clusterId,
+ "HDFS");
+
+ assertNotNull(definitions);
+ assertEquals(10, definitions.size());
+
+ definitions = dao.findByService(clusterId, "YARN");
+ assertNotNull(definitions);
+ assertEquals(0, definitions.size());
+ }
- Assert.assertEquals(definition, retrieved);
+ /**
+ *
+ */
+ @Test
+ public void testFindByServiceComponent() {
+ List<AlertDefinitionEntity> definitions = dao.findByServiceComponent(
+ clusterId, "OOZIE", "OOZIE_SERVER");
+
+ assertNotNull(definitions);
+ assertEquals(2, definitions.size());
+ }
+
+ /**
+ *
+ */
+ @Test
+ public void testFindAgentScoped() {
+ List<AlertDefinitionEntity> definitions = dao.findAgentScoped(clusterId);
+ assertNotNull(definitions);
+ assertEquals(3, definitions.size());
}
@Test
http://git-wip-us.apache.org/repos/asf/ambari/blob/93e61c0b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
new file mode 100644
index 0000000..937417a
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertDefinitionHashTest.java
@@ -0,0 +1,224 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.alerts;
+
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+import junit.framework.TestCase;
+
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.alert.AlertDefinitionHash;
+import org.apache.ambari.server.state.alert.Scope;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.util.Modules;
+
+/**
+ * Tests for {@link AlertDefinitionHash}.
+ */
+public class AlertDefinitionHashTest extends TestCase {
+
+ private AlertDefinitionHash m_hash;
+ private Clusters m_mockClusters;
+ private Cluster m_mockCluster;
+ private AlertDefinitionDAO m_mockDao;
+ private Injector m_injector;
+
+ private static final String CLUSTERNAME = "cluster1";
+ private static final String HOSTNAME = "c6401.ambari.apache.org";
+
+ /**
+ *
+ */
+ @Override
+ @Before
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ m_injector = Guice.createInjector(Modules.override(
+ new InMemoryDefaultTestModule()).with(new MockModule()));
+
+ m_mockClusters = m_injector.getInstance(Clusters.class);
+ m_mockCluster = m_injector.getInstance(Cluster.class);
+ m_mockDao = m_injector.getInstance(AlertDefinitionDAO.class);
+
+ // add HDFS/NN
+ List<ServiceComponentHost> serviceComponentHosts = new ArrayList<ServiceComponentHost>();
+ ServiceComponentHost sch = EasyMock.createNiceMock(ServiceComponentHost.class);
+ expect(sch.getServiceName()).andReturn("HDFS").anyTimes();
+ expect(sch.getServiceComponentName()).andReturn("NAMENODE").anyTimes();
+ expect(sch.getHostName()).andReturn(HOSTNAME).anyTimes();
+ EasyMock.replay(sch);
+ serviceComponentHosts.add(sch);
+
+ // add HDFS/DN
+ sch = EasyMock.createNiceMock(ServiceComponentHost.class);
+ expect(sch.getServiceName()).andReturn("HDFS").anyTimes();
+ expect(sch.getServiceComponentName()).andReturn("DATANODE").anyTimes();
+ expect(sch.getHostName()).andReturn(HOSTNAME).anyTimes();
+ EasyMock.replay(sch);
+ serviceComponentHosts.add(sch);
+
+ Map<String, ServiceComponentHost> mapComponentHosts = new HashMap<String, ServiceComponentHost>();
+ ServiceComponentHost host = EasyMock.createNiceMock(ServiceComponentHost.class);
+ expect(host.getHostName()).andReturn(HOSTNAME).anyTimes();
+ mapComponentHosts.put(HOSTNAME, host);
+
+ Map<String, ServiceComponent> serviceComponents = new HashMap<String, ServiceComponent>();
+ ServiceComponent namenode = EasyMock.createNiceMock(ServiceComponent.class);
+ expect(namenode.getServiceComponentHosts()).andReturn(mapComponentHosts).anyTimes();
+ expect(namenode.isMasterComponent()).andReturn(true).anyTimes();
+ serviceComponents.put("NAMENODE", namenode);
+
+ // create HDFS for the cluster
+ Map<String, Service> services = new HashMap<String, Service>();
+ String hdfsName = "HDFS";
+ Service hdfs = EasyMock.createNiceMock(Service.class);
+ expect(hdfs.getName()).andReturn("HDFS").anyTimes();
+ expect(hdfs.getServiceComponents()).andReturn(serviceComponents).anyTimes();
+ services.put(hdfsName, hdfs);
+
+ // replay
+ EasyMock.replay(hdfs, host, namenode);
+
+ // Clusters mock
+ expect(m_mockClusters.getCluster((String) anyObject())).andReturn(
+ m_mockCluster).atLeastOnce();
+
+ // cluster mock
+ expect(m_mockCluster.getClusterId()).andReturn(Long.valueOf(1)).anyTimes();
+ expect(m_mockCluster.getClusterName()).andReturn(CLUSTERNAME).anyTimes();
+ expect(m_mockCluster.getServices()).andReturn(services).anyTimes();
+ expect(
+ m_mockCluster.getServiceComponentHosts(EasyMock.anyObject(String.class))).andReturn(
+ serviceComponentHosts).anyTimes();
+
+ AlertDefinitionEntity hdfsService = new AlertDefinitionEntity();
+ hdfsService.setDefinitionId(1L);
+ hdfsService.setClusterId(1L);
+ hdfsService.setHash(UUID.randomUUID().toString());
+ hdfsService.setServiceName("HDFS");
+ hdfsService.setComponentName("NAMENODE");
+ hdfsService.setScope(Scope.SERVICE);
+
+ AlertDefinitionEntity hdfsHost = new AlertDefinitionEntity();
+ hdfsHost.setDefinitionId(2L);
+ hdfsHost.setClusterId(1L);
+ hdfsHost.setHash(UUID.randomUUID().toString());
+ hdfsHost.setServiceName("HDFS");
+ hdfsHost.setComponentName("DATANODE");
+ hdfsHost.setScope(Scope.HOST);
+
+ AlertDefinitionEntity agentScoped = new AlertDefinitionEntity();
+ agentScoped.setDefinitionId(3L);
+ agentScoped.setClusterId(1L);
+ agentScoped.setHash(UUID.randomUUID().toString());
+ agentScoped.setServiceName("AMBARI");
+ agentScoped.setComponentName("AMBARI_AGENT");
+ agentScoped.setScope(Scope.HOST);
+
+ EasyMock.expect(
+ m_mockDao.findByServiceMaster(EasyMock.anyInt(),
+ (Set<String>) EasyMock.anyObject())).andReturn(
+ Collections.singletonList(hdfsService)).anyTimes();
+
+ EasyMock.expect(
+ m_mockDao.findByServiceComponent(EasyMock.anyInt(),
+ EasyMock.anyObject(String.class), EasyMock.anyObject(String.class))).andReturn(
+ Collections.singletonList(hdfsHost)).anyTimes();
+
+ EasyMock.expect(m_mockDao.findAgentScoped(EasyMock.anyInt())).andReturn(
+ Collections.singletonList(agentScoped)).anyTimes();
+
+ EasyMock.replay(m_mockClusters, m_mockCluster, m_mockDao);
+ m_hash = m_injector.getInstance(AlertDefinitionHash.class);
+ }
+
+ /**
+ *
+ */
+ @Override
+ @After
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ /**
+ * Test method for {@link org.apache.ambari.server.state.alert.AlertDefinitionHash#getHash(java.lang.String, java.lang.String)}.
+ */
+ @Test
+ public void testGetHash() {
+ String hash = m_hash.getHash(CLUSTERNAME, HOSTNAME);
+ assertNotNull(hash);
+ assertNotSame(AlertDefinitionHash.NULL_MD5_HASH, hash);
+ assertEquals(hash, m_hash.getHash(CLUSTERNAME, HOSTNAME));
+ }
+
+ /**
+ * Test method for {@link org.apache.ambari.server.state.alert.AlertDefinitionHash#getAlertDefinitions(java.lang.String, java.lang.String)}.
+ */
+ @Test
+ public void testGetAlertDefinitions() {
+ Set<AlertDefinitionEntity> definitions = m_hash.getAlertDefinitions(
+ CLUSTERNAME, HOSTNAME);
+
+ assertEquals(3, definitions.size());
+ }
+
+ /**
+ *
+ */
+ private class MockModule implements Module {
+ /**
+ *
+ */
+ @Override
+ public void configure(Binder binder) {
+ binder.bind(Clusters.class).toInstance(
+ EasyMock.createNiceMock(Clusters.class));
+ binder.bind(Cluster.class).toInstance(
+ EasyMock.createNiceMock(Cluster.class));
+ binder.bind(AlertDefinitionDAO.class).toInstance(
+ EasyMock.createNiceMock(AlertDefinitionDAO.class));
+ }
+ }
+}
[20/50] [abbrv] git commit: AMBARI-6910 IE doesn't handle download
configs errors 2. (Max Shepel via ababiichuk)
Posted by jo...@apache.org.
AMBARI-6910 IE doesn't handle download configs errors 2. (Max Shepel via ababiichuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1bd86fb1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1bd86fb1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1bd86fb1
Branch: refs/heads/branch-alerts-dev
Commit: 1bd86fb1255087f411d8df3f683e0cba40aa1ab6
Parents: 7d8927c
Author: aBabiichuk <ab...@cybervisiontech.com>
Authored: Tue Aug 19 17:55:49 2014 +0300
Committer: aBabiichuk <ab...@cybervisiontech.com>
Committed: Tue Aug 19 17:56:06 2014 +0300
----------------------------------------------------------------------
ambari-web/app/utils/components.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd86fb1/ambari-web/app/utils/components.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/components.js b/ambari-web/app/utils/components.js
index e72cc04..a7fadec 100644
--- a/ambari-web/app/utils/components.js
+++ b/ambari-web/app/utils/components.js
@@ -109,7 +109,7 @@ module.exports = {
} else {
errorMessage += Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.noErrorMessage').format(data.displayName);
}
- errorMessage += Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.question');
+ errorMessage += isNoConfigs ? '' : Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.question');
} else {
errorMessage += Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.noErrorMessage').format(data.displayName) +
Em.I18n.t('services.service.actions.downloadClientConfigs.fail.popup.body.question');
[23/50] [abbrv] git commit: AMBARI-6905. POST on /api/v1/persist
fails for CLUSTER.OPERATE. (mahadev)
Posted by jo...@apache.org.
AMBARI-6905. POST on /api/v1/persist fails for CLUSTER.OPERATE. (mahadev)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0c46e95e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0c46e95e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0c46e95e
Branch: refs/heads/branch-alerts-dev
Commit: 0c46e95e646da932d644719377d2584fc2b026c6
Parents: c9fbc84
Author: Mahadev Konar <ma...@apache.org>
Authored: Mon Aug 18 21:01:39 2014 -0700
Committer: Mahadev Konar <ma...@apache.org>
Committed: Tue Aug 19 10:56:11 2014 -0700
----------------------------------------------------------------------
.../AmbariAuthorizationFilter.java | 15 ++-
.../AmbariAuthorizationFilterTest.java | 124 +++++++++++++++++++
2 files changed, 136 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0c46e95e/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
index 4ba8c7f..bc67cdb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
@@ -54,9 +54,10 @@ public class AmbariAuthorizationFilter implements Filter {
HttpServletRequest httpRequest = (HttpServletRequest) request;
HttpServletResponse httpResponse = (HttpServletResponse) response;
- SecurityContext context = SecurityContextHolder.getContext();
+ SecurityContext context = getSecurityContext();
- if (context.getAuthentication() == null || !context.getAuthentication().isAuthenticated()) {
+ Authentication authentication = context.getAuthentication();
+ if (authentication == null || !authentication.isAuthenticated()) {
String token = httpRequest.getHeader(INTERNAL_TOKEN_HEADER);
if (token != null) {
context.setAuthentication(new InternalAuthenticationToken(token));
@@ -64,7 +65,6 @@ public class AmbariAuthorizationFilter implements Filter {
} else {
boolean authorized = false;
- Authentication authentication = context.getAuthentication();
for (GrantedAuthority grantedAuthority : authentication.getAuthorities()) {
if (grantedAuthority instanceof AmbariGrantedAuthority) {
@@ -93,6 +93,11 @@ public class AmbariAuthorizationFilter implements Filter {
authorized = true;
break;
}
+ } else if (requestURI.matches("/api/v[0-9]+/persist.*")) {
+ if (permissionId.equals(PermissionEntity.CLUSTER_OPERATE_PERMISSION)) {
+ authorized = true;
+ break;
+ }
}
}
}
@@ -134,4 +139,8 @@ public class AmbariAuthorizationFilter implements Filter {
}
return value == null || value.length() == 0 ? defaultValue : value;
}
+
+ SecurityContext getSecurityContext() {
+ return SecurityContextHolder.getContext();
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/0c46e95e/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
new file mode 100644
index 0000000..a950eb6
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.security.authorization;
+
+import org.apache.ambari.server.orm.entities.PermissionEntity;
+import org.apache.ambari.server.orm.entities.PrivilegeEntity;
+import org.easymock.EasyMock;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.context.SecurityContext;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.util.Collection;
+import java.util.Collections;
+
+import static org.easymock.EasyMock.*;
+
+public class AmbariAuthorizationFilterTest {
+
+ @Test
+ public void testDoFilter_postPersist_hasOperatePermission() throws Exception {
+ FilterChain chain = createNiceMock(FilterChain.class);
+ HttpServletRequest request = createNiceMock(HttpServletRequest.class);
+ HttpServletResponse response = createNiceMock(HttpServletResponse.class);
+ AmbariAuthorizationFilter filter = createMockBuilder(AmbariAuthorizationFilter.class)
+ .addMockedMethod("getSecurityContext").withConstructor().createMock();
+ SecurityContext securityContext = createNiceMock(SecurityContext.class);
+ Authentication authentication = createNiceMock(Authentication.class);
+ AmbariGrantedAuthority authority = createNiceMock(AmbariGrantedAuthority.class);
+ PrivilegeEntity privilegeEntity = createNiceMock(PrivilegeEntity.class);
+ PermissionEntity permission = createNiceMock(PermissionEntity.class);
+ FilterConfig filterConfig = createNiceMock(FilterConfig.class);
+
+
+ expect(filterConfig.getInitParameter("realm")).andReturn("AuthFilter");
+ expect(authentication.isAuthenticated()).andReturn(true);
+ expect(request.getRequestURI()).andReturn("/api/v1/persist/some_val");
+ expect(authority.getPrivilegeEntity()).andReturn(privilegeEntity);
+ expect(privilegeEntity.getPermission()).andReturn(permission);
+ EasyMock.<Collection<? extends GrantedAuthority>>expect(authentication.getAuthorities())
+ .andReturn(Collections.singletonList(authority));
+ expect(filter.getSecurityContext()).andReturn(securityContext);
+ expect(securityContext.getAuthentication()).andReturn(authentication);
+
+ expect(permission.getId()).andReturn(PermissionEntity.CLUSTER_OPERATE_PERMISSION);
+
+ // expect continue filtering
+ chain.doFilter(request, response);
+
+ replay(request, response, chain, filter, securityContext, authentication, authority,
+ privilegeEntity, permission, filterConfig);
+
+ filter.init(filterConfig);
+ filter.doFilter(request, response, chain);
+
+ verify(request, response, chain, filter, securityContext, authentication, authority,
+ privilegeEntity, permission, filterConfig);
+ }
+
+ @Test
+ public void testDoFilter_postPersist_hasNoOperatePermission() throws Exception {
+ FilterChain chain = createNiceMock(FilterChain.class);
+ HttpServletRequest request = createNiceMock(HttpServletRequest.class);
+ HttpServletResponse response = createNiceMock(HttpServletResponse.class);
+ AmbariAuthorizationFilter filter = createMockBuilder(AmbariAuthorizationFilter.class)
+ .addMockedMethod("getSecurityContext").withConstructor().createMock();
+ SecurityContext securityContext = createNiceMock(SecurityContext.class);
+ Authentication authentication = createNiceMock(Authentication.class);
+ AmbariGrantedAuthority authority = createNiceMock(AmbariGrantedAuthority.class);
+ PrivilegeEntity privilegeEntity = createNiceMock(PrivilegeEntity.class);
+ PermissionEntity permission = createNiceMock(PermissionEntity.class);
+ FilterConfig filterConfig = createNiceMock(FilterConfig.class);
+
+
+ expect(filterConfig.getInitParameter("realm")).andReturn("AuthFilter");
+ expect(authentication.isAuthenticated()).andReturn(true);
+ expect(request.getRequestURI()).andReturn("/api/v1/persist/some_val");
+ expect(authority.getPrivilegeEntity()).andReturn(privilegeEntity);
+ expect(privilegeEntity.getPermission()).andReturn(permission);
+ EasyMock.<Collection<? extends GrantedAuthority>>expect(authentication.getAuthorities())
+ .andReturn(Collections.singletonList(authority));
+ expect(filter.getSecurityContext()).andReturn(securityContext);
+ expect(securityContext.getAuthentication()).andReturn(authentication);
+
+
+ expect(request.getMethod()).andReturn("POST");
+ expect(permission.getId()).andReturn(PermissionEntity.VIEW_USE_PERMISSION);
+
+ // expect permission denial
+ response.setHeader("WWW-Authenticate", "Basic realm=\"AuthFilter\"");
+ response.sendError(HttpServletResponse.SC_FORBIDDEN, "You do not have permissions to access this resource.");
+ response.flushBuffer();
+
+ replay(request, response, chain, filter, securityContext, authentication, authority,
+ privilegeEntity, permission, filterConfig);
+
+ filter.init(filterConfig);
+ filter.doFilter(request, response, chain);
+
+ verify(request, response, chain, filter, securityContext, authentication, authority,
+ privilegeEntity, permission, filterConfig);
+ }
+}
\ No newline at end of file
[41/50] [abbrv] git commit: AMBARI-6935. Stacks API should expose
service level dependencies (aonishuk)
Posted by jo...@apache.org.
AMBARI-6935. Stacks API should expose service level dependencies (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/84c4b434
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/84c4b434
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/84c4b434
Branch: refs/heads/branch-alerts-dev
Commit: 84c4b434e73fcd8667d2b9c5133be7d2ff4c4454
Parents: ed09f6a
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Aug 20 15:54:55 2014 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Aug 20 15:54:55 2014 +0300
----------------------------------------------------------------------
.../server/api/util/StackExtensionHelper.java | 7 ++++++
.../server/controller/StackServiceResponse.java | 10 ++++++++
.../internal/StackServiceResourceProvider.java | 6 +++++
.../apache/ambari/server/state/ServiceInfo.java | 11 ++++++++-
.../src/main/resources/properties.json | 1 +
.../HDP/1.3.2/services/HBASE/metainfo.xml | 5 ++++
.../stacks/HDP/1.3.2/services/HIVE/metainfo.xml | 9 +++++++
.../HDP/1.3.2/services/MAPREDUCE/metainfo.xml | 4 ++++
.../HDP/1.3.2/services/OOZIE/metainfo.xml | 4 ++++
.../stacks/HDP/1.3.2/services/PIG/metainfo.xml | 4 ++++
.../HDP/1.3.2/services/SQOOP/metainfo.xml | 5 ++++
.../HDP/1.3.2/services/WEBHCAT/metainfo.xml | 6 +++++
.../HDP/2.0.6/services/FLUME/metainfo.xml | 4 ++++
.../HDP/2.0.6/services/HBASE/metainfo.xml | 5 ++++
.../stacks/HDP/2.0.6/services/HDFS/metainfo.xml | 4 ++++
.../stacks/HDP/2.0.6/services/HIVE/metainfo.xml | 11 +++++++++
.../HDP/2.0.6/services/OOZIE/metainfo.xml | 4 ++++
.../stacks/HDP/2.0.6/services/PIG/metainfo.xml | 4 ++++
.../HDP/2.0.6/services/SQOOP/metainfo.xml | 5 ++++
.../HDP/2.0.6/services/WEBHCAT/metainfo.xml | 6 +++++
.../stacks/HDP/2.0.6/services/YARN/metainfo.xml | 9 +++++++
.../stacks/HDP/2.1/services/FALCON/metainfo.xml | 4 ++++
.../stacks/HDP/2.1/services/STORM/metainfo.xml | 4 ++++
.../stacks/HDP/2.1/services/TEZ/metainfo.xml | 4 ++++
.../api/util/StackExtensionHelperTest.java | 25 ++++++++++++++++++++
.../HDP/2.0.7/services/HBASE/metainfo.xml | 4 ++++
26 files changed, 164 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
index 0670d9c..c62e48d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
@@ -156,6 +156,13 @@ public class StackExtensionHelper {
parentService.getExcludedConfigTypes() :
Collections.<String>emptySet()
);
+ mergedServiceInfo.setRequiredServices(
+ childService.getRequiredServices() != null ?
+ childService.getRequiredServices() :
+ parentService.getRequiredServices() != null ?
+ parentService.getRequiredServices() :
+ Collections.<String>emptyList()
+ );
mergedServiceInfo.setRestartRequiredAfterChange(
(childService.isRestartRequiredAfterChange() != null)
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
index 25b3b8d..6e7e8e0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
@@ -38,6 +38,7 @@ public class StackServiceResponse {
private List<String> customCommands;
private Map<String, Map<String, Map<String, String>>> configTypes;
+ private List<String> requiredServices;
/**
* Constructor.
@@ -51,6 +52,7 @@ public class StackServiceResponse {
comments = service.getComment();
serviceVersion = service.getVersion();
configTypes = service.getConfigTypes();
+ requiredServices = service.getRequiredServices();
serviceCheckSupported = null != service.getCommandScript();
// the custom command names defined at the service (not component) level
@@ -116,6 +118,14 @@ public class StackServiceResponse {
public Map<String, Map<String, Map<String, String>>> getConfigTypes() {
return configTypes;
}
+
+ public List<String> getRequiredServices() {
+ return requiredServices;
+ }
+
+ public void setRequiredServices(List<String> requiredServices) {
+ this.requiredServices = requiredServices;
+ }
/**
* Gets whether the service represented by this response supports running
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
index 0523edc..f6a6141 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackServiceResourceProvider.java
@@ -61,6 +61,9 @@ public class StackServiceResourceProvider extends ReadOnlyResourceProvider {
private static final String CONFIG_TYPES = PropertyHelper.getPropertyId(
"StackServices", "config_types");
+
+ private static final String REQUIRED_SERVICES_ID = PropertyHelper.getPropertyId(
+ "StackServices", "required_services");
private static final String SERVICE_CHECK_SUPPORTED_PROPERTY_ID = PropertyHelper.getPropertyId(
"StackServices", "service_check_supported");
@@ -127,6 +130,9 @@ public class StackServiceResourceProvider extends ReadOnlyResourceProvider {
setResourceProperty(resource, CONFIG_TYPES,
response.getConfigTypes(), requestedIds);
+
+ setResourceProperty(resource, REQUIRED_SERVICES_ID,
+ response.getRequiredServices(), requestedIds);
setResourceProperty(resource, SERVICE_CHECK_SUPPORTED_PROPERTY_ID,
response.isServiceCheckSupported(), requestedIds);
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
index ac1c9b5..fe2c5f3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
@@ -133,7 +133,10 @@ public class ServiceInfo {
@XmlElementWrapper(name="customCommands")
@XmlElements(@XmlElement(name="customCommand"))
private List<CustomCommandDefinition> customCommands;
-
+
+ @XmlElementWrapper(name="requiredServices")
+ @XmlElement(name="service")
+ private List<String> requiredServices;
/**
* Meaning: stores subpath from stack root to exact directory, that contains
@@ -175,7 +178,13 @@ public class ServiceInfo {
public void setComment(String comment) {
this.comment = comment;
}
+ public List<String> getRequiredServices() {
+ return requiredServices;
+ }
+ public void setRequiredServices(List<String> requiredServices) {
+ this.requiredServices = requiredServices;
+ }
public List<PropertyInfo> getProperties() {
if (properties == null) properties = new ArrayList<PropertyInfo>();
return properties;
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index aa2bf71..6686f39 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -229,6 +229,7 @@
"StackServices/config_types",
"StackServices/service_check_supported",
"StackServices/custom_commands",
+ "StackServices/required_services",
"_"
],
"StackConfiguration":[
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
index d1e8795..fa53125 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/metainfo.xml
@@ -127,6 +127,11 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HDFS</service>
+ <service>ZOOKEEPER</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>hbase-policy</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
index c1263d5..96e8aba 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/metainfo.xml
@@ -147,6 +147,11 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>MAPREDUCE</service>
+ <service>ZOOKEEPER</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>hive-site</config-type>
@@ -185,6 +190,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HIVE</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>hive-site</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
index a900f2e..7ab788a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/metainfo.xml
@@ -121,6 +121,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HDFS</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>capacity-scheduler</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
index 4e83703..fb8397e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/metainfo.xml
@@ -117,6 +117,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>MAPREDUCE</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>oozie-site</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
index e7f0cad..ee745e8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/PIG/metainfo.xml
@@ -62,6 +62,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>MAPREDUCE</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>pig-env</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
index 1f835cb..1c25945 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/metainfo.xml
@@ -77,6 +77,11 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HDFS</service>
+ </requiredServices>
+
<configuration-dependencies>
<config-type>sqoop-env</config-type>
</configuration-dependencies>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
index 636f499..642f5d0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/metainfo.xml
@@ -86,6 +86,12 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>ZOOKEEPER</service>
+ <service>HIVE</service>
+ </requiredServices>
+
<configuration-dependencies>
<config-type>webhcat-site</config-type>
<config-type>webhcat-env</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
index 4011c28..5b73548 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/FLUME/metainfo.xml
@@ -51,6 +51,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HDFS</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>flume-env</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
index c63b2b8..af6b100 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
@@ -117,6 +117,11 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>ZOOKEEPER</service>
+ <service>HDFS</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>hbase-policy</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
index 62b7cd8..53b9304 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
@@ -201,6 +201,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>ZOOKEEPER</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>core-site</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
index eecc292..985ca18 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/metainfo.xml
@@ -159,6 +159,12 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>ZOOKEEPER</service>
+ <service>YARN</service>
+ <service>TEZ</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>hive-site</config-type>
@@ -200,6 +206,11 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HIVE</service>
+ </requiredServices>
+
<configuration-dependencies>
<config-type>hive-site</config-type>
<config-type>hive-env</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
index bdc83ea..093d5d3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
@@ -146,6 +146,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>YARN</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>oozie-site</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
index 48b42f1..fbd6173 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/metainfo.xml
@@ -75,6 +75,10 @@
<dictionaryName>pig-env</dictionaryName>
</configFile>
</configFiles>
+
+ <requiredServices>
+ <service>YARN</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>pig-env</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
index 4573211..c8c01f1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/metainfo.xml
@@ -77,6 +77,11 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HDFS</service>
+ </requiredServices>
+
<configuration-dependencies>
<config-type>sqoop-env</config-type>
</configuration-dependencies>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
index 325fddf..5e93cf4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/metainfo.xml
@@ -93,6 +93,12 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HIVE</service>
+ <service>ZOOKEEPER</service>
+ </requiredServices>
+
<configuration-dependencies>
<config-type>webhcat-site</config-type>
<config-type>webhcat-env</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
index c2e995d..e08221a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
@@ -126,6 +126,11 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>HDFS</service>
+ <service>TEZ</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>yarn-site</config-type>
@@ -210,6 +215,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>YARN</service>
+ </requiredServices>
<configuration-dir>configuration-mapred</configuration-dir>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
index 43c2a3e..c16893e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/metainfo.xml
@@ -84,6 +84,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>OOZIE</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>falcon-env</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
index 3f13983..1f15f8f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/STORM/metainfo.xml
@@ -106,6 +106,10 @@
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+
+ <requiredServices>
+ <service>ZOOKEEPER</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>storm-site</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
index e6ffd18..177d538 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/TEZ/metainfo.xml
@@ -56,6 +56,10 @@
</packages>
</osSpecific>
</osSpecifics>
+
+ <requiredServices>
+ <service>YARN</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>tez-site</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
index 517f3b7..548ab88 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
@@ -280,6 +280,31 @@ public class StackExtensionHelperTest {
}
}
}
+
+ @Test
+ public void testrequiredServicesPropertyInheritance() throws Exception{
+ File stackRoot = new File(stackRootStr);
+ StackInfo stackInfo = new StackInfo();
+ stackInfo.setName("HDP");
+ stackInfo.setVersion("2.0.7");
+ StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
+ helper.populateServicesForStack(stackInfo);
+ helper.fillInfo();
+ List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
+ assertEquals(13, allServices.size());
+
+ List<String> expectedRequiredServices = new ArrayList<String>();
+ expectedRequiredServices.add("HDFS");
+ expectedRequiredServices.add("TEZ");
+
+ for (ServiceInfo serviceInfo : allServices) {
+ if (serviceInfo.getName().equals("HBASE")) {
+ assertTrue(serviceInfo.getRequiredServices().equals(expectedRequiredServices));
+ } else {
+ assertTrue((serviceInfo.getRequiredServices() == null || serviceInfo.getRequiredServices().isEmpty()));
+ }
+ }
+ }
@Test
public void getSchemaVersion() throws Exception {
http://git-wip-us.apache.org/repos/asf/ambari/blob/84c4b434/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
index c114c83..e4a76c5 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/metainfo.xml
@@ -112,6 +112,10 @@
<scriptType>PYTHON</scriptType>
<timeout>50</timeout>
</commandScript>
+ <requiredServices>
+ <service>HDFS</service>
+ <service>TEZ</service>
+ </requiredServices>
<configuration-dependencies>
<config-type>global</config-type>
<config-type>hbase-policy</config-type>
[26/50] [abbrv] git commit: AMBARI-6907. Config History: should
display new current version after reverting to an old version.(xiwang)
Posted by jo...@apache.org.
AMBARI-6907. Config History: should display new current version after reverting to an old version.(xiwang)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/933f7f8b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/933f7f8b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/933f7f8b
Branch: refs/heads/branch-alerts-dev
Commit: 933f7f8b26bf2897534ffa67ef257336a08d4c20
Parents: 40dc5b7
Author: Xi Wang <xi...@apache.org>
Authored: Mon Aug 18 17:38:10 2014 -0700
Committer: Xi Wang <xi...@apache.org>
Committed: Tue Aug 19 11:36:34 2014 -0700
----------------------------------------------------------------------
.../controllers/main/service/info/configs.js | 46 +++++++++++++++-----
ambari-web/app/messages.js | 2 +-
ambari-web/app/models/service_config_version.js | 4 +-
.../common/configs/config_history_flow.hbs | 5 +--
.../views/common/configs/config_history_flow.js | 13 ++----
.../views/main/dashboard/config_history_view.js | 4 +-
6 files changed, 43 insertions(+), 31 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/933f7f8b/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index 180d352..a856d1e 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -252,9 +252,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend({
* get service config versions of current service
*/
loadServiceConfigVersions: function () {
- var self = this;
-
- App.ajax.send({
+ return App.ajax.send({
name: 'service.serviceConfigVersions.get',
data: {
serviceName: this.get('content.serviceName')
@@ -262,9 +260,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend({
sender: this,
success: 'loadServiceConfigVersionsSuccess',
error: 'loadServiceConfigVersionsError'
- }).complete(function () {
- self.loadSelectedVersion();
- });
+ })
},
/**
@@ -275,13 +271,39 @@ App.MainServiceInfoConfigsController = Em.Controller.extend({
* @param params
*/
loadServiceConfigVersionsSuccess: function (data, opt, params) {
- var currentVersion = Math.max.apply(this, data.items.mapProperty('serviceconfigversion'));
- var currentVersionObject = data.items.findProperty('serviceconfigversion', currentVersion);
+ var self = this;
+ this.loadCurrentVersions().complete(function () {
+ App.serviceConfigVersionsMapper.map(data);
+ self.loadSelectedVersion();
+ });
+ },
+
+ loadCurrentVersions: function () {
+ return App.ajax.send({
+ name: 'service.serviceConfigVersions.get.current',
+ sender: this,
+ data: {},
+ success: 'loadCurrentVersionsSuccess'
+ })
+ },
- this.set('currentVersion', currentVersion);
- App.cache['currentConfigVersions'] = {};
- App.cache['currentConfigVersions'][currentVersionObject.service_name + '_' + currentVersionObject.serviceconfigversion] = true;
- App.serviceConfigVersionsMapper.map(data);
+ /**
+ * load current service config version number
+ * set currentVersion
+ * @param data
+ * @param opt
+ * @param params
+ */
+ loadCurrentVersionsSuccess: function (data, opt, params) {
+ var currentConfigVersions = {};
+ var self = this;
+ for (var service in data.Clusters.desired_serviceconfigversions) {
+ currentConfigVersions[service + '_' + data.Clusters.desired_serviceconfigversions[service].serviceconfigversion] = true;
+ if (self.get('content.serviceName') == service) {
+ self.set('currentVersion', data.Clusters.desired_serviceconfigversions[service].serviceconfigversion);
+ }
+ }
+ App.cache['currentConfigVersions'] = currentConfigVersions;
},
/**
http://git-wip-us.apache.org/repos/asf/ambari/blob/933f7f8b/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 6e118ba..cd25878 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1987,7 +1987,7 @@ Em.I18n.translations = {
'dashboard.configHistory.title': 'Config History',
'dashboard.configHistory.table.version.title' : 'Service',
'dashboard.configHistory.table.configGroup.title' : 'Config Group',
- 'dashboard.configHistory.table.modified.title' : 'Modified',
+ 'dashboard.configHistory.table.created.title' : 'Created',
'dashboard.configHistory.table.empty' : 'No history to display',
'dashboard.configHistory.table.version.versionText' : 'V{0}',
'dashboard.configHistory.table.filteredHostsInfo': '{0} of {1} versions showing',
http://git-wip-us.apache.org/repos/asf/ambari/blob/933f7f8b/ambari-web/app/models/service_config_version.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service_config_version.js b/ambari-web/app/models/service_config_version.js
index 6900492..000ad49 100644
--- a/ambari-web/app/models/service_config_version.js
+++ b/ambari-web/app/models/service_config_version.js
@@ -38,10 +38,10 @@ App.ServiceConfigVersion = DS.Model.extend({
return Em.I18n.t('dashboard.configHistory.table.version.versionText').format(this.get('version'));
}.property('version'),
modifiedDate: function () {
- return dateUtil.dateFormat(this.get('appliedTime'));
+ return dateUtil.dateFormat(this.get('createTime'));
}.property('createTime'),
shortModifiedDate: function () {
- return dateUtil.dateFormat(this.get('appliedTime'), 'MMM DD, YYYY');
+ return dateUtil.dateFormat(this.get('createTime'), 'MMM DD, YYYY');
}.property('createTime'),
/**
* determine whether ServiceConfigVersion is requested from server
http://git-wip-us.apache.org/repos/asf/ambari/blob/933f7f8b/ambari-web/app/templates/common/configs/config_history_flow.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/config_history_flow.hbs b/ambari-web/app/templates/common/configs/config_history_flow.hbs
index 71485f0..a3a39b3 100644
--- a/ambari-web/app/templates/common/configs/config_history_flow.hbs
+++ b/ambari-web/app/templates/common/configs/config_history_flow.hbs
@@ -91,11 +91,8 @@
<button class="btn" {{action doCancel target="controller"}} {{bindAttr disabled="view.versionActionsDisabled"}}>{{t common.cancel}}</button>
<button class="btn btn-success" {{action save target="view"}} {{bindAttr disabled="view.isSaveDisabled"}}>{{t common.save}}</button>
</div>
- <button class="btn btn-success" {{action revert view.displayedServiceVersion target="view"}} {{bindAttr disabled="view.versionActionsDisabled" class="view.displayedServiceVersion.isCurrent:hidden"}}>{{t dashboard.configHistory.info-bar.revert.button}}</button>
+ <button class="btn btn-success" {{action revert target="view"}} {{bindAttr disabled="view.versionActionsDisabled" class="view.displayedServiceVersion.isCurrent:hidden"}}>{{t dashboard.configHistory.info-bar.revert.button}}</button>
</div>
</div>
- <div class="label-wrapper">
- {{view.shortNotes}} {{#if view.showMoreLink}}<a class="pointer" {{bindAttr title="view.displayedServiceVersion.notes"}}>{{t jobs.hive.more}}</a>{{/if}}
- </div>
</div>
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/933f7f8b/ambari-web/app/views/common/configs/config_history_flow.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/config_history_flow.js b/ambari-web/app/views/common/configs/config_history_flow.js
index 1dcb0e0..892c778 100644
--- a/ambari-web/app/views/common/configs/config_history_flow.js
+++ b/ambari-web/app/views/common/configs/config_history_flow.js
@@ -231,16 +231,9 @@ App.ConfigHistoryFlowView = Em.View.extend({
},
sendRevertCallSuccess: function (data, opt, params) {
- var version = params.data.Clusters.desired_serviceconfigversions.serviceconfigversion;
-
- this.get('serviceVersions').forEach(function (serviceVersion) {
- serviceVersion.set('isCurrent', serviceVersion.get('version') === version);
- });
- this.set('controller.currentVersion', version);
-
- this.switchVersion({context: Em.Object.create({
- version: version
- })});
+ // revert to an old version would generate a new version with latest version number,
+ // so, need to loadStep to update
+ this.get('controller').loadStep();
},
/**
http://git-wip-us.apache.org/repos/asf/ambari/blob/933f7f8b/ambari-web/app/views/main/dashboard/config_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/config_history_view.js b/ambari-web/app/views/main/dashboard/config_history_view.js
index d45ed9e..cf395ba 100644
--- a/ambari-web/app/views/main/dashboard/config_history_view.js
+++ b/ambari-web/app/views/main/dashboard/config_history_view.js
@@ -100,9 +100,9 @@ App.MainConfigHistoryView = App.TableView.extend({
}),
modifiedSort: sort.fieldView.extend({
column: 3,
- name: 'appliedTime',
+ name: 'createTime',
status: 'sorting_desc',
- displayName: Em.I18n.t('dashboard.configHistory.table.modified.title')
+ displayName: Em.I18n.t('dashboard.configHistory.table.created.title')
}),
authorSort: sort.fieldView.extend({
column: 4,
[12/50] [abbrv] git commit: AMBARI-6868 - Ambari should disable web
directory listing by default (Alejandro Fernandez via jonathanhurley)
Posted by jo...@apache.org.
AMBARI-6868 - Ambari should disable web directory listing by default (Alejandro Fernandez via jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b8c83263
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b8c83263
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b8c83263
Branch: refs/heads/branch-alerts-dev
Commit: b8c8326378f3fe7f58a453f291508a5fd82ba023
Parents: bba2806
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Aug 18 16:32:28 2014 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Aug 18 16:32:58 2014 -0400
----------------------------------------------------------------------
.../main/java/org/apache/ambari/server/controller/AmbariServer.java | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/b8c83263/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index f4439b8..7b93836 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -226,6 +226,7 @@ public class AmbariServer {
"/", ServletContextHandler.SESSIONS );
ServletHolder rootServlet = root.addServlet(DefaultServlet.class, "/");
+ rootServlet.setInitParameter("dirAllowed", "false");
rootServlet.setInitOrder(1);
/* Configure default servlet for agent server */
[15/50] [abbrv] git commit: AMBARI-6906. Validation error and warning
should be shown as hovers
Posted by jo...@apache.org.
AMBARI-6906. Validation error and warning should be shown as hovers
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/388c52ee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/388c52ee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/388c52ee
Branch: refs/heads/branch-alerts-dev
Commit: 388c52ee8fc0d002a82eef3d8a6ddca360fd5fb3
Parents: bbd9179
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Mon Aug 18 17:09:00 2014 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Mon Aug 18 20:33:19 2014 -0700
----------------------------------------------------------------------
.../stacks/HDP/2.0.6/services/stack_advisor.py | 6 +--
.../app/controllers/wizard/step5_controller.js | 52 +++++++++++++++-----
.../app/controllers/wizard/step6_controller.js | 16 ++++++
ambari-web/app/messages.js | 3 ++
ambari-web/app/templates/wizard/step5.hbs | 12 ++++-
ambari-web/app/templates/wizard/step6.hbs | 29 ++++++++---
ambari-web/app/views/wizard/step5_view.js | 1 +
7 files changed, 93 insertions(+), 26 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/388c52ee/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 77c57e2..69fb23a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -192,8 +192,8 @@ class HDP206StackAdvisor(StackAdvisor):
secondaryNameNodeHosts = secondaryNameNodeHosts[0]
commonHosts = list(set(nameNodeHosts).intersection(secondaryNameNodeHosts))
for host in commonHosts:
- items.append( { "type": 'host-component', "level": 'ERROR', "message": 'NameNode and Secondary NameNode cannot be hosted on same machine', "component-name": 'NAMENODE', "host": str(host) } )
- items.append( { "type": 'host-component', "level": 'ERROR', "message": 'NameNode and Secondary NameNode cannot be hosted on same machine', "component-name": 'SECONDARY_NAMENODE', "host": str(host) } )
+ items.append( { "type": 'host-component', "level": 'WARN', "message": 'NameNode and Secondary NameNode cannot be hosted on same machine', "component-name": 'NAMENODE', "host": str(host) } )
+ items.append( { "type": 'host-component', "level": 'WARN', "message": 'NameNode and Secondary NameNode cannot be hosted on same machine', "component-name": 'SECONDARY_NAMENODE', "host": str(host) } )
# Validating cardinality
for component in componentsList:
@@ -489,7 +489,7 @@ def getXmxSize(value):
return result[0] + result[1].lower()
return result[0]
-def formatXmxSizeToBytes(value):
+def formatXmxSizeToBytes(value):
value = value.lower()
if len(value) == 0:
return 0
http://git-wip-us.apache.org/repos/asf/ambari/blob/388c52ee/ambari-web/app/controllers/wizard/step5_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step5_controller.js b/ambari-web/app/controllers/wizard/step5_controller.js
index b5c1b76..b82fa1c 100644
--- a/ambari-web/app/controllers/wizard/step5_controller.js
+++ b/ambari-web/app/controllers/wizard/step5_controller.js
@@ -141,6 +141,13 @@ App.WizardStep5Controller = Em.Controller.extend({
generalWarningMessages: [],
/**
+ * true if any warning exists
+ */
+ anyWarning: function() {
+ return this.get('servicesMasters').some(function(m) { return m.get('warnMessage'); }) || this.get('generalWarningMessages').some(function(m) { return m; });
+ }.property('servicesMasters.@each.warnMessage', 'generalWarningMessages'),
+
+ /**
* List of host with assigned masters
* Format:
* <code>
@@ -274,8 +281,8 @@ App.WizardStep5Controller = Em.Controller.extend({
updateValidationsSuccessCallback: function (data) {
var self = this;
- this.set('generalErrorMessages', []);
- this.set('generalWarningMessages', []);
+ generalErrorMessages = [];
+ generalWarningMessages = [];
this.get('servicesMasters').setEach('warnMessage', null);
this.get('servicesMasters').setEach('errorMessage', null);
var anyErrors = false;
@@ -296,13 +303,16 @@ App.WizardStep5Controller = Em.Controller.extend({
var details = " (" + item['component-name'] + " on " + item.host + ")";
if (item.level === 'ERROR') {
anyErrors = true;
- self.get('generalErrorMessages').push(item.message + details);
+ generalErrorMessages.push(item.message + details);
} else if (item.level === 'WARN') {
- self.get('generalWarningMessages').push(item.message + details);
+ generalWarningMessages.push(item.message + details);
}
}
});
+ this.set('generalErrorMessages', generalErrorMessages);
+ this.set('generalWarningMessages', generalWarningMessages);
+
this.set('submitDisabled', anyErrors);
},
@@ -1030,19 +1040,35 @@ App.WizardStep5Controller = Em.Controller.extend({
submit: function () {
var self = this;
- var goNextStepIfValid = function() {
- if (!self.get('submitDisabled')) {
- App.router.send('next');
- }
- };
+ var primary = function() {
+ var goNextStepIfValid = function() {
+ if (!self.get('submitDisabled')) {
+ App.router.send('next');
+ }
+ };
- if (App.supports.serverRecommendValidate ) {
- self.recommendAndValidate(function() {
+ if (App.supports.serverRecommendValidate ) {
+ self.recommendAndValidate(function() {
+ goNextStepIfValid();
+ });
+ } else {
+ self.updateIsSubmitDisabled();
goNextStepIfValid();
+ }
+ }
+
+ if (self.get('anyWarning')) {
+ App.ModalPopup.show({
+ primary: Em.I18n.t('common.continueAnyway'),
+ header: Em.I18n.t('installer.step5.warningsAttention.header'),
+ body: Em.I18n.t('installer.step5.warningsAttention'),
+ onPrimary: function () {
+ this.hide();
+ primary();
+ }
});
} else {
- self.updateIsSubmitDisabled();
- goNextStepIfValid();
+ primary();
}
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/388c52ee/ambari-web/app/controllers/wizard/step6_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step6_controller.js b/ambari-web/app/controllers/wizard/step6_controller.js
index 9fe8685..a74f020 100644
--- a/ambari-web/app/controllers/wizard/step6_controller.js
+++ b/ambari-web/app/controllers/wizard/step6_controller.js
@@ -110,6 +110,22 @@ App.WizardStep6Controller = Em.Controller.extend({
generalWarningMessages: [],
/**
+ * true if validation has any general error message
+ */
+ anyErrors: function() {
+ var messages = this.get('generalErrorMessages');
+ return this.get('errorMessage') || (messages && messages.length > 0);
+ }.property('generalErrorMessages', 'generalErrorMessages.@each', 'errorMessage'),
+
+ /**
+ * true if validation has any general warning message
+ */
+ anyWarnings: function() {
+ var messages = this.get('generalWarningMessages');
+ return messages && messages.length > 0;
+ }.property('generalWarningMessages', 'generalWarningMessages.@each'),
+
+ /**
* Verify condition that at least one checkbox of each component was checked
* @method clearError
*/
http://git-wip-us.apache.org/repos/asf/ambari/blob/388c52ee/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index d641607..5bd9a87 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -218,6 +218,7 @@ Em.I18n.translations = {
'common.view': 'View',
'common.compare': 'Compare',
'common.latest': 'Latest',
+ 'common.continueAnyway': 'Continue Anyway',
'passiveState.turnOn':'Turn On Maintenance Mode',
'passiveState.turnOff':'Turn Off Maintenance Mode',
@@ -616,6 +617,8 @@ Em.I18n.translations = {
'installer.step5.body.coHostedComponents':'<i class="icon-asterisks">✵</i> {0} will be hosted on the same host.',
'installer.step5.hostInfo':'%@ (%@, %@ cores)',
'installer.step5.hiveGroup':'HiveServer2, WebHCat Server, MySQL Server',
+ 'installer.step5.warningsAttention.header': 'Warnings',
+ 'installer.step5.warningsAttention': 'Your master component assignment has warnings and needs attention.',
'installer.step6.header':'Assign Slaves and Clients',
'installer.step6.body':'Assign slave and client components to hosts you want to run them on.<br/>Hosts that are assigned master components are shown with <i class=icon-asterisks>✵</i>.',
http://git-wip-us.apache.org/repos/asf/ambari/blob/388c52ee/ambari-web/app/templates/wizard/step5.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/wizard/step5.hbs b/ambari-web/app/templates/wizard/step5.hbs
index c30d89b..7715719 100644
--- a/ambari-web/app/templates/wizard/step5.hbs
+++ b/ambari-web/app/templates/wizard/step5.hbs
@@ -81,8 +81,16 @@
{{view App.RemoveControlView componentNameBinding="component_name" serviceComponentIdBinding="serviceComponentId"}}
{{/if}}
- <span class="help-block">{{warnMessage}}</span>
- <span class="help-block">{{errorMessage}}</span>
+ <span rel="popover" title="Warning" {{bindAttr data-content="warnMessage"}}>
+ {{#if warnMessage}}
+ <i class="icon-warning-sign icon-large"></i>
+ {{/if}}
+ </span>
+ <span rel="popover" title="Error" {{bindAttr data-content="errorMessage"}}>
+ {{#if errorMessage}}
+ <i class="icon-remove-sign icon-large"></i>
+ {{/if}}
+ </span>
</div>
{{/if}}
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/388c52ee/ambari-web/app/templates/wizard/step6.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/wizard/step6.hbs b/ambari-web/app/templates/wizard/step6.hbs
index 77cca2a..d96a519 100644
--- a/ambari-web/app/templates/wizard/step6.hbs
+++ b/ambari-web/app/templates/wizard/step6.hbs
@@ -20,15 +20,28 @@
<h2>{{view.title}}</h2>
<div class="alert alert-info">{{{view.label}}}</div>
- {{#if errorMessage}}
- <div class="alert alert-error">{{errorMessage}}</div>
+ {{#if anyErrors}}
+ <div class="alert alert-error">
+ <ul>
+ {{#if errorMessage}}
+ <li>{{errorMessage}}</li>
+ {{/if}}
+ {{#each msg in controller.generalErrorMessages}}
+ <li>{{msg}}</li>
+ {{/each}}
+ </ul>
+ </div>
+ {{/if}}
+
+ {{#if anyWarnings}}
+ <div class="alert alert-warning">
+ <ul>
+ {{#each msg in controller.generalWarningMessages}}
+ <li>{{msg}}</li>
+ {{/each}}
+ </ul>
+ </div>
{{/if}}
- {{#each msg in controller.generalErrorMessages}}
- <div class="alert alert-error">{{msg}}</div>
- {{/each}}
- {{#each msg in controller.generalWarningMessages}}
- <div class="alert alert-warning">{{msg}}</div>
- {{/each}}
<div class="pre-scrollable">
<table class="table" id="component_assign_table">
http://git-wip-us.apache.org/repos/asf/ambari/blob/388c52ee/ambari-web/app/views/wizard/step5_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/wizard/step5_view.js b/ambari-web/app/views/wizard/step5_view.js
index 33a2aea..141a571 100644
--- a/ambari-web/app/views/wizard/step5_view.js
+++ b/ambari-web/app/views/wizard/step5_view.js
@@ -127,6 +127,7 @@ App.SelectHostView = Em.Select.extend(App.SelectHost, {
didInsertElement: function () {
this.initContent();
this.set("value", this.get("component.selectedHost"));
+ App.popover($("[rel=popover]"), {'placement': 'right', 'trigger': 'hover'});
},
/**
[32/50] [abbrv] git commit: AMBARI-6923. Views: add property to know
if the view instance is xml defined or not.
Posted by jo...@apache.org.
AMBARI-6923. Views: add property to know if the view instance is xml defined or not.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e1eaf3a3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e1eaf3a3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e1eaf3a3
Branch: refs/heads/branch-alerts-dev
Commit: e1eaf3a3176b7e0c6fcb5a75547a0f9f0f0df79d
Parents: 51bebd3
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Tue Aug 19 13:11:45 2014 -0700
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Tue Aug 19 13:11:45 2014 -0700
----------------------------------------------------------------------
.../server/controller/internal/ViewInstanceResourceProvider.java | 3 +++
1 file changed, 3 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e1eaf3a3/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
index c1a3d79..036fa13 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ViewInstanceResourceProvider.java
@@ -63,6 +63,7 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
public static final String PROPERTIES_PROPERTY_ID = "ViewInstanceInfo/properties";
public static final String DATA_PROPERTY_ID = "ViewInstanceInfo/instance_data";
public static final String CONTEXT_PATH_PROPERTY_ID = "ViewInstanceInfo/context_path";
+ public static final String STATIC_PROPERTY_ID = "ViewInstanceInfo/static";
/**
* Property prefix values.
@@ -96,6 +97,7 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
propertyIds.add(PROPERTIES_PROPERTY_ID);
propertyIds.add(DATA_PROPERTY_ID);
propertyIds.add(CONTEXT_PATH_PROPERTY_ID);
+ propertyIds.add(STATIC_PROPERTY_ID);
}
// ----- Constructors ------------------------------------------------------
@@ -214,6 +216,7 @@ public class ViewInstanceResourceProvider extends AbstractResourceProvider {
setResourceProperty(resource, LABEL_PROPERTY_ID, viewInstanceEntity.getLabel(), requestedIds);
setResourceProperty(resource, DESCRIPTION_PROPERTY_ID, viewInstanceEntity.getDescription(), requestedIds);
setResourceProperty(resource, VISIBLE_PROPERTY_ID, viewInstanceEntity.isVisible(), requestedIds);
+ setResourceProperty(resource, STATIC_PROPERTY_ID, viewInstanceEntity.isXmlDriven(), requestedIds);
Map<String, String> properties = new HashMap<String, String>();
for (ViewInstancePropertyEntity viewInstancePropertyEntity : viewInstanceEntity.getProperties()) {
[11/50] [abbrv] git commit: AMBARI-6902. LDAP uses redundant
properties from ambari configuration. (mahadev)
Posted by jo...@apache.org.
AMBARI-6902. LDAP uses redundant properties from ambari configuration. (mahadev)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bba2806f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bba2806f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bba2806f
Branch: refs/heads/branch-alerts-dev
Commit: bba2806f76d98a139e83736fd08b72fae0ab27c1
Parents: e7d864c
Author: Mahadev Konar <ma...@apache.org>
Authored: Mon Aug 18 13:29:51 2014 -0700
Committer: Mahadev Konar <ma...@apache.org>
Committed: Mon Aug 18 13:29:55 2014 -0700
----------------------------------------------------------------------
.../authorization/AmbariLdapDataPopulator.java | 17 ++++-------------
1 file changed, 4 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/bba2806f/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
index 29b69a6..7932833 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapDataPopulator.java
@@ -91,7 +91,7 @@ public class AmbariLdapDataPopulator {
public Object mapFromAttributes(Attributes attributes)
throws NamingException {
- return attributes.get("uid").get();
+ return "";
}
});
return true;
@@ -269,10 +269,7 @@ public class AmbariLdapDataPopulator {
final LdapTemplate ldapTemplate = loadLdapTemplate();
final EqualsFilter equalsFilter = new EqualsFilter("objectClass",
ldapServerProperties.getGroupObjectClass());
- String baseDn = ldapServerProperties.getGroupBase();
- if (baseDn == null) {
- baseDn = ldapServerProperties.getBaseDN();
- }
+ String baseDn = ldapServerProperties.getBaseDN();
ldapTemplate.search(baseDn, equalsFilter.encode(), new AttributesMapper() {
public Object mapFromAttributes(Attributes attributes)
@@ -295,10 +292,7 @@ public class AmbariLdapDataPopulator {
final LdapTemplate ldapTemplate = loadLdapTemplate();
final EqualsFilter equalsFilter = new EqualsFilter("objectClass",
ldapServerProperties.getUserObjectClass());
- String baseDn = ldapServerProperties.getUserBase();
- if (baseDn == null) {
- baseDn = ldapServerProperties.getBaseDN();
- }
+ String baseDn = ldapServerProperties.getBaseDN();
ldapTemplate.search(baseDn, equalsFilter.encode(), new AttributesMapper() {
public Object mapFromAttributes(Attributes attributes)
@@ -323,10 +317,7 @@ public class AmbariLdapDataPopulator {
final AndFilter andFilter = new AndFilter();
andFilter.and(new EqualsFilter("objectClass", ldapServerProperties.getGroupObjectClass()));
andFilter.and(new EqualsFilter(ldapServerProperties.getGroupNamingAttr(), groupName));
- String baseDn = ldapServerProperties.getGroupBase();
- if (baseDn == null) {
- baseDn = ldapServerProperties.getBaseDN();
- }
+ String baseDn = ldapServerProperties.getBaseDN();
ldapTemplate.search(baseDn, andFilter.encode(), new ContextMapper() {
public Object mapFromContext(Object ctx) {