You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by js...@apache.org on 2014/11/10 22:32:00 UTC
[01/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Repository: ambari
Updated Branches:
refs/heads/trunk b71407f06 -> 2fc7adece
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/package/dummy-script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/package/dummy-script.py b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/package/dummy-script.py
new file mode 100644
index 0000000..35de4bb
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/package/dummy-script.py
@@ -0,0 +1,20 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/SQOOP2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/SQOOP2/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/SQOOP2/metainfo.xml
new file mode 100644
index 0000000..33cf448
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/SQOOP2/metainfo.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>SQOOP2</name>
+ <displayName>Sqoop</displayName>
+ <comment>Results in a cycle since v2.0 extends this version</comment>
+ <version>Extended Version</version>
+ <extends>OTHER/2.0/SQOOP</extends>
+ </service>
+ </services>
+
+</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/metainfo.xml
new file mode 100644
index 0000000..716972f
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/metainfo.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <versions>
+ <active>true</active>
+ </versions>
+ <extends>1.0</extends>
+</metainfo>
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/hdp.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/hdp.json b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/hdp.json
new file mode 100644
index 0000000..fc51627
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/hdp.json
@@ -0,0 +1,10 @@
+{
+ "HDP-2.1.1": {
+ "latest": {
+ "centos6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "redhat6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "oraclelinux6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "suse11": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118/hdp.repo"
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/repoinfo.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/repoinfo.xml
new file mode 100644
index 0000000..9d8a232
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/repos/repoinfo.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<reposinfo>
+ <latest>./hdp.json</latest>
+ <os family="centos6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="centos5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="suse11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="sles11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/services/SQOOP2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/services/SQOOP2/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/services/SQOOP2/metainfo.xml
new file mode 100644
index 0000000..d2c0e13
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/2.0/services/SQOOP2/metainfo.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>SQOOP2</name>
+ <displayName>Sqoop</displayName>
+ <comment>Inherited from parent</comment>
+ <version>Extended from parent version</version>
+ </service>
+ </services>
+
+</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/metainfo.xml
new file mode 100644
index 0000000..31716d2
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <versions>
+ <active>true</active>
+ </versions>
+</metainfo>
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/hdp.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/hdp.json b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/hdp.json
new file mode 100644
index 0000000..fc51627
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/hdp.json
@@ -0,0 +1,10 @@
+{
+ "HDP-2.1.1": {
+ "latest": {
+ "centos6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "redhat6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "oraclelinux6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "suse11": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118/hdp.repo"
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/repoinfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/repoinfo.xml
new file mode 100644
index 0000000..9d8a232
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/repos/repoinfo.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<reposinfo>
+ <latest>./hdp.json</latest>
+ <os family="centos6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="centos5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="suse11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="sles11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/services/HDFS/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..6060699
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack1/1.0/services/HDFS/metainfo.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>HDFS</name>
+ <comment>Apache Hadoop Distributed File System</comment>
+ <version>2.1.0.2.0.6.0</version>
+ <extends>stack2/1.0/HDFS</extends>
+ </service>
+ </services>
+</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/metainfo.xml
new file mode 100644
index 0000000..31716d2
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <versions>
+ <active>true</active>
+ </versions>
+</metainfo>
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/hdp.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/hdp.json b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/hdp.json
new file mode 100644
index 0000000..fc51627
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/hdp.json
@@ -0,0 +1,10 @@
+{
+ "HDP-2.1.1": {
+ "latest": {
+ "centos6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "redhat6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "oraclelinux6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "suse11": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118/hdp.repo"
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/repoinfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/repoinfo.xml
new file mode 100644
index 0000000..9d8a232
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/repos/repoinfo.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<reposinfo>
+ <latest>./hdp.json</latest>
+ <os family="centos6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="centos5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="suse11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="sles11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/services/HDFS/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..b036801
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack2/1.0/services/HDFS/metainfo.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>HDFS</name>
+ <comment>Apache Hadoop Distributed File System</comment>
+ <version>2.1.0.2.0.6.0</version>
+ <extends>stack3/1.0/HDFS</extends>
+ </service>
+ </services>
+</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/metainfo.xml
new file mode 100644
index 0000000..31716d2
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <versions>
+ <active>true</active>
+ </versions>
+</metainfo>
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/hdp.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/hdp.json b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/hdp.json
new file mode 100644
index 0000000..fc51627
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/hdp.json
@@ -0,0 +1,10 @@
+{
+ "HDP-2.1.1": {
+ "latest": {
+ "centos6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "redhat6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "oraclelinux6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "suse11": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118/hdp.repo"
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/repoinfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/repoinfo.xml
new file mode 100644
index 0000000..9d8a232
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/repos/repoinfo.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<reposinfo>
+ <latest>./hdp.json</latest>
+ <os family="centos6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="centos5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="suse11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="sles11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/services/HDFS/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..8c6fa13
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle2/stack3/1.0/services/HDFS/metainfo.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>HDFS</name>
+ <comment>Apache Hadoop Distributed File System</comment>
+ <version>2.1.0.2.0.6.0</version>
+ <extends>stack1/1.0/HDFS</extends>
+ </service>
+ </services>
+</metainfo>
[07/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
deleted file mode 100644
index f05aab5..0000000
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
+++ /dev/null
@@ -1,792 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.api.util;
-
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.metadata.ActionMetadata;
-import org.apache.ambari.server.state.*;
-import org.apache.ambari.server.state.stack.ConfigurationXml;
-import org.junit.Test;
-import org.xml.sax.SAXException;
-
-import javax.xml.bind.JAXBException;
-import javax.xml.namespace.QName;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.xpath.XPathExpressionException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.*;
-
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.*;
-
-public class StackExtensionHelperTest {
-
- private final String stackRootStr = "./src/test/resources/stacks/".
- replaceAll("/", File.separator);
-
- private Injector injector = Guice.createInjector(new MockModule());
-
-
- public class MockModule extends AbstractModule {
- @Override
- protected void configure() {
- bind(ActionMetadata.class);
- }
- }
-
- /**
- * Checks than service metainfo is parsed correctly both for ver 1 services
- * and for ver 2 services
- */
- @Test
- public void testPopulateServicesForStack() throws Exception {
- File stackRoot = new File(stackRootStr);
- StackInfo stackInfo = new StackInfo();
- stackInfo.setName("HDP");
- stackInfo.setVersion("2.0.7");
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- helper.populateServicesForStack(stackInfo);
- List<ServiceInfo> services = stackInfo.getServices();
- assertEquals(8, services.size());
- for (ServiceInfo serviceInfo : services) {
- if (serviceInfo.getName().equals("HIVE")) {
- // Check old-style service
- assertEquals("HIVE", serviceInfo.getName());
- assertEquals("Hive", serviceInfo.getDisplayName());
- assertEquals("2.0", serviceInfo.getSchemaVersion());
- assertTrue(serviceInfo.getComment().startsWith("Data warehouse system"));
- assertEquals("0.11.0.2.0.5.0", serviceInfo.getVersion());
- // Check some component definitions
- List<ComponentInfo> components = serviceInfo.getComponents();
- assertEquals("HIVE_METASTORE", components.get(0).getName());
- assertEquals("Hive Metastore", components.get(0).getDisplayName());
- assertEquals("MASTER", components.get(0).getCategory());
- List<PropertyInfo> properties = serviceInfo.getProperties();
- // Check some property
- assertEquals(37, properties.size());
- boolean found = false;
- for (PropertyInfo property : properties) {
- if (property.getName().equals("javax.jdo.option.ConnectionDriverName")) {
- assertEquals("com.mysql.jdbc.Driver", property.getValue());
- assertEquals("hive-site.xml",
- property.getFilename());
- assertEquals(false, property.isDeleted());
- found = true;
- }
- }
- assertTrue("Property not found in a list of properties", found);
- // Check config dependencies
- List<String> configDependencies = serviceInfo.getConfigDependencies();
- assertEquals(2, configDependencies.size());
- assertEquals("hive-site", configDependencies.get(1));
- } else if (serviceInfo.getName().equals("HBASE")) {
- assertEquals("HBASE", serviceInfo.getName());
- assertEquals("HDP/2.0.7/services/HBASE/package",
- serviceInfo.getServicePackageFolder());
- assertEquals("2.0", serviceInfo.getSchemaVersion());
- assertTrue(serviceInfo.getComment().startsWith("Non-relational distr"));
- assertEquals("0.96.0.2.0.6.0", serviceInfo.getVersion());
- // Check some component definitions
- List<ComponentInfo> components = serviceInfo.getComponents();
- assertTrue(components.size() == 3);
- ComponentInfo firstComponent = components.get(0);
- assertEquals("HBASE_MASTER", firstComponent.getName());
- assertEquals("MASTER", firstComponent.getCategory());
- // Check command script for component
- assertEquals("scripts/hbase_master.py",
- firstComponent.getCommandScript().getScript());
- assertEquals(CommandScriptDefinition.Type.PYTHON,
- firstComponent.getCommandScript().getScriptType());
- assertEquals(777,
- firstComponent.getCommandScript().getTimeout());
- // Check custom commands for component
- List<CustomCommandDefinition> customCommands =
- firstComponent.getCustomCommands();
- assertEquals(2, customCommands.size());
- assertEquals("RESTART", customCommands.get(0).getName());
- assertTrue(firstComponent.isCustomCommand("RESTART"));
- assertEquals("scripts/hbase_master_restart.py",
- customCommands.get(0).getCommandScript().getScript());
- assertEquals(CommandScriptDefinition.Type.PYTHON,
- customCommands.get(0).getCommandScript().getScriptType());
- assertEquals(888,
- customCommands.get(0).getCommandScript().getTimeout());
- // Check all parsed os specifics
- Map<String,ServiceOsSpecific> specifics = serviceInfo.getOsSpecifics();
- assertTrue(specifics.size() == 2);
- ServiceOsSpecific anyOs = specifics.get(AmbariMetaInfo.ANY_OS);
- assertEquals(AmbariMetaInfo.ANY_OS, anyOs.getOsFamily());
- assertEquals("wget", anyOs.getPackages().get(0).getName());
-
- // Test default timeout value
- ComponentInfo secondComponent = components.get(1);
- assertEquals("HBASE_REGIONSERVER", secondComponent.getName());
- assertEquals(0,
- secondComponent.getCommandScript().getTimeout());
-
- ServiceOsSpecific c6Os = specifics.get("centos6");
- assertEquals("centos6", c6Os.getOsFamily());
- assertEquals("hbase", c6Os.getPackages().get(0).getName());
- assertEquals("http://something.com/centos6/2.x/updates/1",
- c6Os.getRepo().getBaseUrl());
- assertEquals("Custom-repo-1",
- c6Os.getRepo().getRepoId());
- assertEquals("Custom-repo",
- c6Os.getRepo().getRepoName());
- // Check custom commands for service
- assertTrue(serviceInfo.getCustomCommands().size() == 1);
- CustomCommandDefinition customCommand =
- serviceInfo.getCustomCommands().get(0);
- assertEquals("SERVICE_VALIDATION", customCommand.getName());
- assertEquals("scripts/hbase_validation.py",
- customCommand.getCommandScript().getScript());
- assertEquals(CommandScriptDefinition.Type.PYTHON,
- customCommand.getCommandScript().getScriptType());
- assertEquals(300, customCommand.getCommandScript().getTimeout());
- // Check command script for service
- CommandScriptDefinition serviceScriptDefinition = serviceInfo.getCommandScript();
- assertEquals("scripts/service_check.py", serviceScriptDefinition.getScript());
- assertEquals(CommandScriptDefinition.Type.PYTHON,
- serviceScriptDefinition.getScriptType());
- assertEquals(50, serviceScriptDefinition.getTimeout());
- // Check some property
- List<PropertyInfo> properties = serviceInfo.getProperties();
- List<PropertyInfo> emptyValueProperties = new ArrayList<PropertyInfo>();
- for (PropertyInfo propertyInfo : properties) {
- if (propertyInfo.getValue().isEmpty()) {
- emptyValueProperties.add(propertyInfo);
- }
- }
- assertEquals(28, emptyValueProperties.size());
- assertEquals(68, properties.size());
- boolean foundHBaseClusterDistributed = false;
- boolean foundHBaseRegionServerXmnMax = false;
- boolean foundHBaseRegionServerXmnRatio = false;
- for (PropertyInfo property : properties) {
- if (property.getName().equals("hbase.cluster.distributed")) {
- assertEquals("true",
- property.getValue());
- assertTrue(property.getDescription().startsWith("The mode the"));
- assertEquals("hbase-site.xml",
- property.getFilename());
- foundHBaseClusterDistributed = true;
- } else if (property.getName().equals("hbase_regionserver_xmn_max")) {
- assertEquals("512", property.getValue());
- assertEquals("global.xml",
- property.getFilename());
- foundHBaseRegionServerXmnMax = true;
- } else if (property.getName().equals("hbase_regionserver_xmn_ratio")) {
- assertEquals("global.xml",
- property.getFilename());
- assertEquals("0.2", property.getValue());
- foundHBaseRegionServerXmnRatio = true;
- }
- }
-
- assertTrue("Property hbase.cluster.distributed not found in a list of properties",
- foundHBaseClusterDistributed);
- assertTrue("Property hbase_regionserver_xmn_max not found in a list of properties",
- foundHBaseRegionServerXmnMax);
- assertTrue("Property hbase_regionserver_xmn_ratio not found in a list of properties",
- foundHBaseRegionServerXmnRatio);
-
- List<String> configDependencies = serviceInfo.getConfigDependencies();
- assertEquals(3, configDependencies.size());
- assertEquals("global", configDependencies.get(0));
- assertEquals("hbase-policy", configDependencies.get(1));
- assertEquals("hbase-site", configDependencies.get(2));
- } else if(serviceInfo.getName().equals("ZOOKEEPER")) {
- assertTrue(serviceInfo.isRestartRequiredAfterChange());
- } else {
- if (!serviceInfo.getName().equals("YARN") &&
- !serviceInfo.getName().equals("HDFS") &&
- !serviceInfo.getName().equals("MAPREDUCE2") &&
- !serviceInfo.getName().equals("NAGIOS") &&
- !serviceInfo.getName().equals("SQOOP")) {
- fail("Unknown service");
- }
- }
- }
- }
-
- @Test
- public void testConfigDependenciesInheritance() throws Exception{
- File stackRoot = new File(stackRootStr);
- StackInfo stackInfo = new StackInfo();
- stackInfo.setName("HDP");
- stackInfo.setVersion("2.0.6");
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- helper.populateServicesForStack(stackInfo);
- helper.fillInfo();
- List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
- for (ServiceInfo serviceInfo : allServices) {
- if (serviceInfo.getName().equals("HDFS")) {
- assertEquals(5, serviceInfo.getConfigDependencies().size());
- assertEquals(4, serviceInfo.getConfigTypes().size());
- assertTrue(serviceInfo.getConfigDependencies().contains("core-site"));
- assertTrue(serviceInfo.getConfigDependencies().contains("global"));
- assertTrue(serviceInfo.getConfigDependencies().contains("hdfs-site"));
- assertTrue(serviceInfo.getConfigDependencies().contains("hdfs-log4j"));
- assertTrue(serviceInfo.getConfigDependencies().contains("hadoop-policy"));
- assertTrue(Boolean.valueOf(serviceInfo.getConfigTypes().get("core-site").get("supports").get("final")));
- assertFalse(Boolean.valueOf(serviceInfo.getConfigTypes().get("global").get("supports").get("final")));
- } else if (serviceInfo.getName().equals("WEBHCAT")) {
- assertEquals(1, serviceInfo.getConfigDependencies().size());
- assertEquals(1, serviceInfo.getConfigTypes().size());
- assertTrue(serviceInfo.getConfigDependencies().contains("webhcat-site"));
- assertTrue(Boolean.valueOf(serviceInfo.getConfigTypes().get("webhcat-site").get("supports").get("final")));
- }
- }
- }
-
- @Test
- public void testClientConfigFilesInheritance() throws Exception{
- File stackRoot = new File(stackRootStr);
- StackInfo stackInfo = new StackInfo();
- stackInfo.setName("HDP");
- stackInfo.setVersion("2.0.6");
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- helper.populateServicesForStack(stackInfo);
- helper.fillInfo();
- List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
- for (ServiceInfo serviceInfo : allServices) {
- if (serviceInfo.getName().equals("ZOOKEEPER")) {
- List<ComponentInfo> components = serviceInfo.getComponents();
- assertTrue(components.size() == 2);
- ComponentInfo componentInfo = components.get(1);
- List<ClientConfigFileDefinition> clientConfigs = componentInfo.getClientConfigFiles();
- assertEquals(2,clientConfigs.size());
- assertEquals("zookeeper-env",clientConfigs.get(0).getDictionaryName());
- assertEquals("zookeeper-env.sh",clientConfigs.get(0).getFileName());
- assertEquals("env",clientConfigs.get(0).getType());
- assertEquals("zookeeper-log4j",clientConfigs.get(1).getDictionaryName());
- assertEquals("log4j.properties",clientConfigs.get(1).getFileName());
- assertEquals("env",clientConfigs.get(1).getType());
- }
- }
- }
-
- @Test
- public void testMonitoringServicePropertyInheritance() throws Exception{
- File stackRoot = new File(stackRootStr);
- StackInfo stackInfo = new StackInfo();
- stackInfo.setName("HDP");
- stackInfo.setVersion("2.0.7");
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- helper.populateServicesForStack(stackInfo);
- helper.fillInfo();
- List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
- assertEquals(13, allServices.size());
- for (ServiceInfo serviceInfo : allServices) {
- if (serviceInfo.getName().equals("NAGIOS")) {
- assertTrue(serviceInfo.isMonitoringService());
- } else {
- assertNull(serviceInfo.isMonitoringService());
- }
- }
- }
-
- @Test
- public void getSchemaVersion() throws Exception {
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- File legacyMetaInfoFile = new File("./src/test/resources/stacks/HDP/2.0.7/" +
- "services/HIVE/metainfo.xml".replaceAll("/", File.separator));
- String version = helper.getSchemaVersion(legacyMetaInfoFile);
- assertEquals("2.0", version);
-
- File v2MetaInfoFile = new File("./src/test/resources/stacks/HDP/2.0.7/" +
- "services/HBASE/metainfo.xml".replaceAll("/", File.separator));
- version = helper.getSchemaVersion(v2MetaInfoFile);
- assertEquals("2.0", version);
- }
-
- public StackExtensionHelper getStackExtensionHelper() {
- File stackRoot = new File(stackRootStr);
- return new StackExtensionHelper(injector, stackRoot);
- }
-
- public ServiceInfo getServiceFromStack(StackExtensionHelper helper, String stackName, String stackVersion, String serviceName) throws XPathExpressionException, ParserConfigurationException, SAXException, IOException, JAXBException {
- StackInfo stackInfo = new StackInfo();
- stackInfo.setName(stackName);
- stackInfo.setVersion(stackVersion);
-
- helper.populateServicesForStack(stackInfo);
-
- for(ServiceInfo service:stackInfo.getServices()) {
- if(service.getName().equals(serviceName)) {
- return service;
- }
- }
- return null;
- }
-
- private void addToPropertyMap(Map<String, Map<String, Map<String, String>>> configTypes,String configType,
- String keyword, String attributeName, String value) {
- if(!configTypes.containsKey(configType)) {
- configTypes.put(configType, new HashMap<String, Map<String, String>>());
- }
- Map<String, Map<String, String>> config = configTypes.get(configType);
- if(!config.containsKey(keyword)) {
- config.put(keyword, new HashMap<String, String>());
- }
- Map<String, String> supports = config.get(keyword);
- supports.put(attributeName, value);
- }
-
- /**
- * This test ensures the service status check is added into the action metadata when
- * the stack has no parent and is the only stack in the stack family
- * @throws Exception
- */
- @Test
- public void testGetServiceInfoFromSingleStack() throws Exception {
- File stackRoot = new File("./src/test/resources/single_stack".replace("/", File.separator));
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- helper.fillInfo();
- List<StackInfo> stackInfoList = helper.getAllAvailableStacks();
- assertEquals(1, stackInfoList.size());
-
- List<ServiceInfo> serviceInfoList = helper.getAllApplicableServices(stackInfoList.get(0));
- for(ServiceInfo serviceInfo: serviceInfoList) {
- if ("HDFS".equalsIgnoreCase(serviceInfo.getName())) {
- ActionMetadata actionMetadata = injector.getInstance(ActionMetadata.class);
- String hdfsStatusCheckCmd = actionMetadata.getServiceCheckAction("HDFS");
- assertEquals("HDFS_SERVICE_CHECK", hdfsStatusCheckCmd);
- break;
- }
- }
- }
-
- @Test
- public void testPopulateConfigTypes() throws XPathExpressionException, ParserConfigurationException, SAXException, IOException, JAXBException {
- StackExtensionHelper helper = getStackExtensionHelper();
- ServiceInfo serviceInfo = getServiceFromStack(helper, "HDP", "2.0.7", "HDFS");
-
-
- Map<String, Map<String, Map<String, String>>> expectedConfigTypes = new HashMap<String, Map<String, Map<String, String>>>();
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "true");
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
-
- Map<String, Map<String, Map<String, String>>> configTypes = serviceInfo.getConfigTypes();
- assertEquals(4, configTypes.size());
- assertEquals(expectedConfigTypes, configTypes);
- }
-
- @Test
- public void testAddConfigTypeProperty_configTypesIsNull() {
- // init
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- ServiceInfo serviceInfo = createMock(ServiceInfo.class);
-
- // expectations
- expect(serviceInfo.getConfigTypes()).andReturn(null);
- replay(serviceInfo);
-
- // eval
- helper.addConfigTypeProperty(serviceInfo.getConfigTypes(), "dep", "group", "key", "value");
-
- // verification
- verify(serviceInfo);
- }
-
- @Test
- public void testAddConfigTypeProperty_groupDoesNotExist() {
- // init
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- ServiceInfo serviceInfo = new ServiceInfo();
- Map<String, Map<String, Map<String, String>>> configTypes = new HashMap<String, Map<String, Map<String, String>>>();
- Map<String, Map<String, String>> groupMap = new HashMap<String, Map<String, String>>();
- configTypes.put("dep", groupMap);
- serviceInfo.setConfigTypes(configTypes);
-
- // eval
- helper.addConfigTypeProperty(serviceInfo.getConfigTypes(), "dep", "group", "key", "value");
-
- // assert
- configTypes = serviceInfo.getConfigTypes();
- assertEquals(1, configTypes.size());
- assertTrue(configTypes.containsKey("dep"));
- Map<String, Map<String, String>> configType = configTypes.get("dep");
- assertTrue(configType.containsKey("group"));
- Map<String, String> group = configType.get("group");
- assertEquals(1, group.size());
- assertTrue(group.containsKey("key"));
- assertEquals("value", group.get("key"));
- }
-
- @Test
- public void testAddConfigTypeProperty_typeDoesNotExist() {
- // init
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- ServiceInfo serviceInfo = new ServiceInfo();
- Map<String, Map<String, Map<String, String>>> configTypes = new HashMap<String, Map<String, Map<String, String>>>();
- Map<String, Map<String, String>> groupMap = new HashMap<String, Map<String, String>>();
- configTypes.put("dep", groupMap);
- serviceInfo.setConfigTypes(configTypes);
-
- // eval
- helper.addConfigTypeProperty(serviceInfo.getConfigTypes(), "no_such_dep", "group", "key", "value");
-
- // assert
- configTypes = serviceInfo.getConfigTypes();
- assertEquals(1, configTypes.size());
- assertFalse(configTypes.containsKey("no_such_dep"));
- assertTrue(configTypes.containsKey("dep"));
- Map<String, Map<String, String>> configType = configTypes.get("dep");
- assertEquals(0, configType.size());
- }
-
- @Test
- public void testAddConfigTypeProperty_groupExist() {
- // init
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- ServiceInfo serviceInfo = new ServiceInfo();
- Map<String, Map<String, Map<String, String>>> configTypes = new HashMap<String, Map<String, Map<String, String>>>();
- Map<String, Map<String, String>> groupMap = new HashMap<String, Map<String, String>>();
- Map<String, String> propertiesMap = new HashMap<String, String>();
- groupMap.put("group", propertiesMap);
- configTypes.put("dep", groupMap);
- serviceInfo.setConfigTypes(configTypes);
-
- // eval
- helper.addConfigTypeProperty(serviceInfo.getConfigTypes(), "dep", "group", "key", "value");
-
- // assert
- configTypes = serviceInfo.getConfigTypes();
- assertEquals(1, configTypes.size());
- assertTrue(configTypes.containsKey("dep"));
- Map<String, Map<String, String>> configType = configTypes.get("dep");
- assertTrue(configType.containsKey("group"));
- Map<String, String> group = configType.get("group");
- assertTrue(group.containsKey("key"));
- assertEquals("value", group.get("key"));
- }
-
- @Test
- public void testPopulateServiceProperties_noSupportsFinalFlag() throws Exception {
- StackExtensionHelper helper = getStackExtensionHelper();
- ServiceInfo serviceInfo = getServiceFromStack(helper, "HDP", "2.0.7", "YARN");
-
- File configFile = new File(stackRootStr
- + "HDP/2.0.7/services/YARN/configuration/yarn-site.xml".replaceAll("/", File.separator));
-
- helper.populateServiceProperties(configFile, serviceInfo);
-
- Map<String, Map<String, Map<String, String>>> expectedConfigTypes = new HashMap<String, Map<String, Map<String, String>>>();
- addToPropertyMap(expectedConfigTypes, "yarn-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "yarn-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "true");
- addToPropertyMap(expectedConfigTypes, "yarn-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "true");
-
- assertEquals(expectedConfigTypes, serviceInfo.getConfigTypes());
- }
-
- @Test
- public void testPopulateServiceProperties_supportsFinalTrue() throws Exception {
- StackExtensionHelper helper = getStackExtensionHelper();
- ServiceInfo serviceInfo = getServiceFromStack(helper, "HDP", "2.0.7", "HDFS");
-
- File configFile = new File(stackRootStr
- + "HDP/2.0.7/services/HDFS/configuration/global.xml".replaceAll("/", File.separator));
- helper.populateServiceProperties(configFile, serviceInfo);
-
- Map<String, Map<String, Map<String, String>>> expectedConfigTypes = new HashMap<String, Map<String, Map<String, String>>>();
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "true");
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
-
- assertEquals(expectedConfigTypes, serviceInfo.getConfigTypes());
- }
-
- @Test
- public void testPopulateServiceProperties_supportsFinalFalse() throws Exception {
- StackExtensionHelper helper = getStackExtensionHelper();
- ServiceInfo serviceInfo = getServiceFromStack(helper, "HDP", "2.0.7", "HDFS");
- File configFile = new File(stackRootStr
- + "HDP/2.0.7/services/YARN/configuration/yarn-site.xml".replaceAll("/", File.separator));
- helper.populateServiceProperties(configFile, serviceInfo);
-
- Map<String, Map<String, Map<String, String>>> expectedConfigTypes = new HashMap<String, Map<String, Map<String, String>>>();
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "true");
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "global", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hdfs-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "hadoop-policy", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "core-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "yarn-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.FINAL.getPropertyName(), "false");
- addToPropertyMap(expectedConfigTypes, "yarn-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.ADDING_FORBIDDEN.getPropertyName(), "true");
- addToPropertyMap(expectedConfigTypes, "yarn-site", StackExtensionHelper.Supports.KEYWORD,
- StackExtensionHelper.Supports.DO_NOT_EXTEND.getPropertyName(), "true");
-
- assertEquals(expectedConfigTypes, serviceInfo.getConfigTypes());
- }
-
- @Test
- public void testPopulateServiceProperties_supportsFinalWrongType() throws Exception {
- // init
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = createMockBuilder(StackExtensionHelper.class).addMockedMethod("addConfigTypeProperty")
- .withConstructor(injector, stackRoot).createMock();
- File config = new File("./src/test/resources/bad-stacks/HDP/0.1/services/YARN/configuration/yarn-site.xml"
- .replaceAll("/", File.separator));
- ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
- List<PropertyInfo> properties = createNiceMock(List.class);
-
- // expectations
- expect(serviceInfo.getConfigTypes()).andReturn(new HashMap<String, Map<String, Map<String, String>>>()).times(2);
- expect(serviceInfo.getProperties()).andReturn(properties).times(1);
- expect(properties.addAll((Collection) anyObject())).andReturn(true).times(1);
- replay(properties, serviceInfo);
-
- // eval
- helper.populateServiceProperties(config, serviceInfo);
-
- // verification
- verify(properties, serviceInfo);
- }
-
- @Test
- public void testPopulateServiceProperties_configTypesIsNull() throws Exception {
- // init
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- File config = new File(stackRootStr
- + "HDP/2.1.1/services/PIG/configuration/pig-properties.xml".replaceAll("/", File.separator));
- ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
- List<PropertyInfo> properties = createMock(List.class);
-
- // expectations
- expect(serviceInfo.getConfigTypes()).andReturn(new HashMap<String, Map<String, Map<String, String>>>()).times(2);
- expect(serviceInfo.getProperties()).andReturn(properties).times(1);
- expect(properties.addAll((Collection) anyObject())).andReturn(true).times(1);
- replay(properties);
- replay(serviceInfo);
-
- // eval
- helper.populateServiceProperties(config, serviceInfo);
-
- // verification
- verify(properties, serviceInfo);
- }
-
- @Test
- public void testUnmarshallConfigurationXml() throws Exception {
- File configFile = new File("./src/test/resources/bad-stacks/HDP/0.1/services/YARN/configuration/capacity-scheduler.xml");
- ConfigurationXml config = StackExtensionHelper.unmarshal(ConfigurationXml.class, configFile);
- Map<QName, String> attributes = config.getAttributes();
- List<PropertyInfo> properties = config.getProperties();
-
- // attributes verification
- assertEquals(2, attributes.size());
- QName supportsFinal = new QName("", "supports_final");
- assertTrue(attributes.containsKey(supportsFinal));
- assertEquals("false", attributes.get(supportsFinal));
- QName supportsDeletable = new QName("", "supports_deletable");
- assertTrue(attributes.containsKey(supportsDeletable));
- assertEquals("false", attributes.get(supportsDeletable));
-
- // properties verification
- assertEquals(3, properties.size());
-
- PropertyInfo propertyInfo;
- propertyInfo = properties.get(0);
- assertEquals("yarn.scheduler.capacity.maximum-applications", propertyInfo.getName());
- assertEquals("Maximum number of applications that can be pending and running.", propertyInfo.getDescription());
- assertEquals("10000", propertyInfo.getValue());
- assertEquals(1, propertyInfo.getAttributesMap().size());
- assertEquals("true", propertyInfo.getAttributesMap().get("final"));
- assertEquals(null, propertyInfo.getFilename());
- assertEquals(false, propertyInfo.isDeleted());
- assertEquals(false, propertyInfo.isRequireInput());
- assertTrue(propertyInfo.getPropertyTypes().isEmpty());
-
- propertyInfo = properties.get(1);
- assertEquals("yarn.scheduler.capacity.maximum-am-resource-percent", propertyInfo.getName());
- assertEquals("Maximum percent of resources in the cluster.", propertyInfo.getDescription());
- assertEquals("0.2", propertyInfo.getValue());
- assertEquals(1, propertyInfo.getAttributesMap().size());
- assertEquals("false", propertyInfo.getAttributesMap().get("final"));
- assertEquals(null, propertyInfo.getFilename());
- assertEquals(true, propertyInfo.isDeleted());
- assertEquals(false, propertyInfo.isRequireInput());
- assertTrue(propertyInfo.getPropertyTypes().isEmpty());
-
- propertyInfo = properties.get(2);
- assertEquals("yarn.scheduler.capacity.root.queues", propertyInfo.getName());
- assertEquals("The queues at the this level (root is the root queue).", propertyInfo.getDescription());
- assertEquals("default", propertyInfo.getValue());
- assertEquals(0, propertyInfo.getAttributesMap().size());
- assertEquals(null, propertyInfo.getFilename());
- assertEquals(false, propertyInfo.isDeleted());
- assertEquals(true, propertyInfo.isRequireInput());
- assertTrue(propertyInfo.getPropertyTypes().isEmpty());
- }
-
- @Test
- public void testMergeServices_BothConfigTypesAreNull() throws Exception {
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- ServiceInfo child = new ServiceInfo();
- ServiceInfo parent = new ServiceInfo();
-
- child.setConfigTypes(null);
- child.setConfigDependencies(null);
-
- parent.setConfigTypes(null);
- parent.setConfigDependencies(null);
-
- ServiceInfo merged = helper.mergeServices(parent, child);
-
- assertNotNull(merged.getConfigDependencies());
- assertEquals(0, merged.getConfigDependencies().size());
- assertNotNull(merged.getConfigTypes());
- assertEquals(0, merged.getConfigTypes().size());
- }
-
- @Test
- public void testServiceInheritance() throws Exception {
- File stackRoot = new File(stackRootStr);
- StackInfo stackInfo = new StackInfo();
- stackInfo.setName("HDP");
- stackInfo.setVersion("2.0.6");
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- helper.populateServicesForStack(stackInfo);
- helper.fillInfo();
- List<ServiceInfo> allServices = helper.getAllApplicableServices(stackInfo);
- for (ServiceInfo serviceInfo : allServices) {
- if (serviceInfo.getName().equals("HDFS")){
- assertEquals("HDFS", serviceInfo.getName());
- assertNotNull(serviceInfo.getMetricsFile());
- }
- }
-
- }
-
-
- @Test
- public void testMergeComponentInfo() throws Exception {
- File stackRoot = new File(stackRootStr);
- StackExtensionHelper helper = new StackExtensionHelper(injector, stackRoot);
- ComponentInfo child = new ComponentInfo();
- ComponentInfo parent = new ComponentInfo();
- DependencyInfo a = new DependencyInfo();
- a.setName("serviceName/A");
- DependencyInfo b = new DependencyInfo();
- b.setName("serviceName/B");
- List<DependencyInfo> parentDependencies = new ArrayList<DependencyInfo>();
- parentDependencies.add(a);
- parentDependencies.add(b);
- parent.setDependencies(parentDependencies);
-
- DependencyInfo c = new DependencyInfo();
- c.setName("serviceName/C");
- List<DependencyInfo> childDependencies = new ArrayList<DependencyInfo>();
- childDependencies.add(c);
- child.setDependencies(childDependencies);
-
- child.setCardinality("1");
- parent.setCardinality("1+");
-
- child.setCategory("CLIENT");
- parent.setCategory("MASTER");
-
- ComponentInfo result = helper.mergeComponents(parent, child);
-
- assertEquals(result.getCardinality(),"1");
- assertEquals(result.getCategory(), "CLIENT");
- assertEquals(result.getDependencies().size(), 3);
- }
-}
-
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index 5c8b4ad..e068508 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -299,7 +299,7 @@ public class AmbariManagementControllerImplTest {
expect(serviceInfo.getClientComponent()).andReturn(compInfo);
expect(compInfo.getName()).andReturn("component");
expect(component.getServiceComponentHosts()).andReturn(Collections.<String, ServiceComponentHost>singletonMap("host", null));
- expect(ambariMetaInfo.getServiceInfo("stack", "1.0", "service")).andReturn(serviceInfo);
+ expect(ambariMetaInfo.getService("stack", "1.0", "service")).andReturn(serviceInfo);
replay(injector, cluster, service, component, serviceInfo, compInfo, ambariMetaInfo, stackId);
@@ -339,7 +339,7 @@ public class AmbariManagementControllerImplTest {
ComponentInfo compInfo = createNiceMock(ComponentInfo.class);
expect(serviceInfo.getClientComponent()).andReturn(compInfo);
expect(compInfo.getName()).andReturn("component");
- expect(ambariMetaInfo.getServiceInfo("stack", "1.0", "service")).andReturn(serviceInfo);
+ expect(ambariMetaInfo.getService("stack", "1.0", "service")).andReturn(serviceInfo);
replay(injector, cluster, service, component1, component2, serviceInfo, compInfo, ambariMetaInfo, stackId);
@@ -375,7 +375,7 @@ public class AmbariManagementControllerImplTest {
ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
expect(serviceInfo.getClientComponent()).andReturn(null);
- expect(ambariMetaInfo.getServiceInfo("stack", "1.0", "service")).andReturn(serviceInfo);
+ expect(ambariMetaInfo.getService("stack", "1.0", "service")).andReturn(serviceInfo);
replay(injector, cluster, service, component1, component2, serviceInfo, ambariMetaInfo, stackId);
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index b2c023f..2a0ebc5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -156,7 +156,7 @@ public class AmbariManagementControllerTest {
private static final String NAGIOS_SERVICE_NAME = "NAGIOS";
private static final int STACK_VERSIONS_CNT = 12;
private static final int REPOS_CNT = 3;
- private static final int STACKS_CNT = 2;
+ private static final int STACKS_CNT = 3;
private static final int STACK_PROPERTIES_CNT = 103;
private static final int STACK_COMPONENTS_CNT = 4;
private static final int OS_CNT = 2;
@@ -1718,12 +1718,6 @@ public class AmbariManagementControllerTest {
for (ExecutionCommandWrapper ecw : stage.getExecutionCommands(host)) {
Assert.assertFalse(
ecw.getExecutionCommand().getHostLevelParams().get("repo_info").isEmpty());
-
- LOG.info("Dumping host action details"
- + ", stageId=" + stage.getStageId()
- + ", actionId=" + stage.getActionId()
- + ", commandDetails="
- + StageUtils.jaxbToString(ecw.getExecutionCommand()));
}
}
}
@@ -1786,20 +1780,6 @@ public class AmbariManagementControllerTest {
stages = actionDB.getAllStages(trackAction.getRequestId());
Assert.assertEquals(2, stages.size());
- for (Stage stage : stages) {
- LOG.info("Stage Details for Start Service"
- + ", stageId="+ stage.getStageId()
- + ", actionId=" + stage.getActionId());
-
- for (String host : stage.getHosts()) {
- LOG.info("Dumping host action details"
- + ", stageId=" + stage.getStageId()
- + ", actionId=" + stage.getActionId()
- + ", commandDetails="
- + StageUtils.jaxbToString(stage.getExecutionCommands(host).get(0)));
- }
- }
-
StringBuilder sb = new StringBuilder();
clusters.debugDump(sb);
LOG.info("Cluster Dump: " + sb.toString());
@@ -1837,9 +1817,6 @@ public class AmbariManagementControllerTest {
// TODO validate stages?
stages = actionDB.getAllStages(trackAction.getRequestId());
- for (Stage stage : stages) {
- LOG.info("Stage Details for Stop Service : " + stage.toString());
- }
Assert.assertEquals(1, stages.size());
}
@@ -7071,7 +7048,7 @@ public class AmbariManagementControllerTest {
@Test
public void testGetStackVersionActiveAttr() throws Exception {
- for (StackInfo stackInfo: ambariMetaInfo.getStackInfos(STACK_NAME)) {
+ for (StackInfo stackInfo: ambariMetaInfo.getStacks(STACK_NAME)) {
if (stackInfo.getVersion().equalsIgnoreCase(STACK_VERSION)) {
stackInfo.setActive(true);
}
@@ -8172,7 +8149,7 @@ public class AmbariManagementControllerTest {
@Test
public void testUpdateStacks() throws Exception {
- StackInfo stackInfo = ambariMetaInfo.getStackInfo(STACK_NAME, STACK_VERSION);
+ StackInfo stackInfo = ambariMetaInfo.getStack(STACK_NAME, STACK_VERSION);
for (RepositoryInfo repositoryInfo: stackInfo.getRepositories()) {
assertFalse(INCORRECT_BASE_URL.equals(repositoryInfo.getBaseUrl()));
@@ -8182,7 +8159,7 @@ public class AmbariManagementControllerTest {
controller.updateStacks();
- stackInfo = ambariMetaInfo.getStackInfo(STACK_NAME, STACK_VERSION);
+ stackInfo = ambariMetaInfo.getStack(STACK_NAME, STACK_VERSION);
for (RepositoryInfo repositoryInfo: stackInfo.getRepositories()) {
assertFalse(INCORRECT_BASE_URL.equals(repositoryInfo.getBaseUrl()));
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
index 5438654..9514b21 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
@@ -156,8 +156,7 @@ public class BlueprintResourceProviderTest {
andReturn("test-service").anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component2")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
dao.create(capture(entityCapture));
replay(dao, metaInfo, request, managementController);
@@ -222,8 +221,7 @@ public class BlueprintResourceProviderTest {
andReturn("test-service").anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component2")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
dao.create(capture(entityCapture));
replay(dao, metaInfo, request, managementController);
@@ -290,8 +288,7 @@ public class BlueprintResourceProviderTest {
andReturn("test-service").anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component2")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
dao.create(capture(entityCapture));
replay(dao, metaInfo, request, managementController);
@@ -381,7 +378,7 @@ public class BlueprintResourceProviderTest {
BlueprintResourceProvider.BLUEPRINT_NAME_PROPERTY_ID, BLUEPRINT_NAME);
AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
- ((ObservableResourceProvider)provider).addObserver(observer);
+ provider.addObserver(observer);
provider.deleteResources(predicate);
@@ -639,7 +636,6 @@ public class BlueprintResourceProviderTest {
expect(stackServiceResponse.getServiceName()).andReturn("test-service").anyTimes();
expect(stackServiceResponse.getStackName()).andReturn("test-stack-name").anyTimes();
expect(stackServiceResponse.getStackVersion()).andReturn("test-stack-version").anyTimes();
- expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet());
expect(managementController.getStackComponents(capture(serviceComponentRequestCapture))).andReturn(setServiceComponents).anyTimes();
expect(stackServiceComponentResponse.getCardinality()).andReturn("2").anyTimes();
@@ -673,8 +669,7 @@ public class BlueprintResourceProviderTest {
andReturn("test-service").anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component2")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
dao.create(capture(entityCapture));
replay(dao, metaInfo, request, managementController, stackServiceResponse,
@@ -751,7 +746,6 @@ public class BlueprintResourceProviderTest {
expect(stackServiceResponse.getServiceName()).andReturn("test-service").anyTimes();
expect(stackServiceResponse.getStackName()).andReturn("test-stack-name").anyTimes();
expect(stackServiceResponse.getStackVersion()).andReturn("test-stack-version").anyTimes();
- expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet());
expect(managementController.getStackComponents(capture(serviceComponentRequestCapture))).andReturn(setServiceComponents).anyTimes();
expect(stackServiceComponentResponse.getCardinality()).andReturn("2").anyTimes();
@@ -785,8 +779,7 @@ public class BlueprintResourceProviderTest {
andReturn("test-service").anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component2")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
dao.create(capture(entityCapture));
replay(dao, metaInfo, request, managementController, stackServiceResponse,
@@ -870,8 +863,7 @@ public class BlueprintResourceProviderTest {
Collections.<StackServiceResponse>singleton(stackServiceResponse));
expect(stackServiceResponse.getServiceName()).andReturn("test-service").anyTimes();
expect(stackServiceResponse.getStackName()).andReturn("test-stack-name").anyTimes();
- expect(stackServiceResponse.getStackVersion()).andReturn("test-stack-version").anyTimes();
- expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet());
+ expect(stackServiceResponse.getStackVersion()).andReturn("test-stack-version").anyTimes();;
expect(managementController.getStackComponents(capture(serviceComponentRequestCapture))).andReturn(setServiceComponents).anyTimes();
expect(stackServiceComponentResponse.getCardinality()).andReturn("2").anyTimes();
@@ -905,8 +897,7 @@ public class BlueprintResourceProviderTest {
andReturn("test-service").anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component2")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
dao.create(capture(entityCapture));
replay(dao, metaInfo, request, managementController, stackServiceResponse,
@@ -980,7 +971,6 @@ public class BlueprintResourceProviderTest {
expect(stackServiceResponse.getServiceName()).andReturn("test-service").anyTimes();
expect(stackServiceResponse.getStackName()).andReturn("test-stack-name").anyTimes();
expect(stackServiceResponse.getStackVersion()).andReturn("test-stack-version").anyTimes();
- expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet());
expect(managementController.getStackComponents(capture(serviceComponentRequestCapture))).andReturn(setServiceComponents).anyTimes();
expect(stackServiceComponentResponse.getCardinality()).andReturn("2").anyTimes();
@@ -1014,8 +1004,7 @@ public class BlueprintResourceProviderTest {
andReturn("test-service").anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component2")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
replay(dao, metaInfo, request, managementController, stackServiceResponse,
stackServiceComponentResponse, stackServiceComponentResponse2);
@@ -1062,6 +1051,7 @@ public class BlueprintResourceProviderTest {
serviceComponents.add(component1);
serviceComponents.add(component2);
+
Set<Map<String, Object>> setProperties = getTestProperties();
((HashSet<Map<String, String>>) ((HashSet<Map<String, Object>>) setProperties.iterator().next().get(
BlueprintResourceProvider.HOST_GROUP_PROPERTY_ID)).iterator().next().get("components")).
@@ -1080,8 +1070,7 @@ public class BlueprintResourceProviderTest {
andReturn(serviceComponents).anyTimes();
expect(metaInfo.getComponentToService("test-stack-name", "test-stack-version", "component1")).
andReturn("test-service").anyTimes();
- expect(metaInfo.getRequiredProperties("test-stack-name", "test-stack-version", "test-service")).andReturn(
- Collections.<String, org.apache.ambari.server.state.PropertyInfo>emptyMap()).anyTimes();
+ expect(metaInfo.getService("test-stack-name", "test-stack-version", "test-service")).andReturn(service).anyTimes();
dao.create(capture(entityCapture));
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
index d73eea4..0d3b00f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProviderTest.java
@@ -256,9 +256,9 @@ public class ClientConfigResourceProviderTest {
expect(stackId.getStackVersion()).andReturn(stackVersion).anyTimes();
expect(ambariMetaInfo.getComponent(stackName, stackVersion, serviceName, componentName)).andReturn(componentInfo);
- expect(ambariMetaInfo.getServiceInfo(stackName, stackVersion, serviceName)).andReturn(serviceInfo);
+ expect(ambariMetaInfo.getService(stackName, stackVersion, serviceName)).andReturn(serviceInfo);
expect(serviceInfo.getServicePackageFolder()).andReturn(packageFolder);
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.getCommandScript()).andReturn(commandScriptDefinition);
expect(componentInfo.getClientConfigFiles()).andReturn(clientConfigFileDefinitionList);
@@ -271,7 +271,7 @@ public class ClientConfigResourceProviderTest {
HashMap<String, String> rcaParams = new HashMap<String, String>();
rcaParams.put("key","value");
expect(managementController.getRcaParameters()).andReturn(rcaParams).anyTimes();
- expect(ambariMetaInfo.getServiceInfo(stackName, stackVersion, serviceName)).andReturn(serviceInfo);
+ expect(ambariMetaInfo.getService(stackName, stackVersion, serviceName)).andReturn(serviceInfo);
expect(serviceInfo.getOsSpecifics()).andReturn(new HashMap<String, ServiceOsSpecific>()).anyTimes();
Set<String> userSet = new HashSet<String>();
userSet.add("hdfs");
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
index 98bd017..f7149a8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
@@ -96,7 +96,6 @@ public class ClusterResourceProviderTest {
BaseBlueprintProcessor.stackInfo = null;
}
-
@Test
public void testCreateResources() throws Exception{
Resource.Type type = Resource.Type.Cluster;
@@ -2961,7 +2960,6 @@ public class ClusterResourceProviderTest {
expect(mockStackServiceResponseOne.getServiceName()).andReturn("OOZIE").atLeastOnce();
- expect(mockStackServiceResponseOne.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet()).atLeastOnce();
expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
@@ -3045,7 +3043,6 @@ public class ClusterResourceProviderTest {
expect(mockStackServiceResponseOne.getServiceName()).andReturn("FALCON").atLeastOnce();
- expect(mockStackServiceResponseOne.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet()).atLeastOnce();
expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
@@ -3127,7 +3124,6 @@ public class ClusterResourceProviderTest {
expect(mockStackServiceResponseOne.getServiceName()).andReturn("OOZIE").atLeastOnce();
- expect(mockStackServiceResponseOne.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet()).atLeastOnce();
expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
@@ -3203,9 +3199,7 @@ public class ClusterResourceProviderTest {
expect(mockStackComponentResponse.getCardinality()).andReturn("1");
expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
-
expect(mockStackServiceResponseOne.getServiceName()).andReturn("FALCON").atLeastOnce();
- expect(mockStackServiceResponseOne.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet()).atLeastOnce();
expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
@@ -3283,7 +3277,6 @@ public class ClusterResourceProviderTest {
expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
expect(mockStackServiceResponseOne.getServiceName()).andReturn("HIVE").atLeastOnce();
- expect(mockStackServiceResponseOne.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet()).atLeastOnce();
expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
expect(mockManagementController.getStackConfigurations(isA(Set.class))).andReturn(Collections.<StackConfigurationResponse>emptySet());
@@ -3360,7 +3353,6 @@ public class ClusterResourceProviderTest {
expect(mockStackComponentResponse.getAutoDeploy()).andReturn(new AutoDeployInfo());
expect(mockStackServiceResponseOne.getServiceName()).andReturn("HBASE").atLeastOnce();
- expect(mockStackServiceResponseOne.getExcludedConfigTypes()).andReturn(Collections.<String>emptySet()).atLeastOnce();
expect(mockManagementController.getStackServices(isA(Set.class))).andReturn(Collections.singleton(mockStackServiceResponseOne));
expect(mockManagementController.getStackComponents(isA(Set.class))).andReturn(Collections.singleton(mockStackComponentResponse));
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
index 1455c52..a66add3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ComponentResourceProviderTest.java
@@ -49,6 +49,7 @@ import java.util.Set;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ObjectNotFoundException;
import org.apache.ambari.server.ServiceComponentNotFoundException;
+import org.apache.ambari.server.StackAccessException;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
@@ -187,8 +188,8 @@ public class ComponentResourceProviderTest {
expect(serviceComponent3.convertToResponse()).andReturn(
new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component102", null, "", 1, 1, 0));
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(),
- (String) anyObject(), (String) anyObject(), (String) anyObject()))
+ expect(ambariMetaInfo.getComponent((String) anyObject(),
+ (String) anyObject(), (String) anyObject(), (String) anyObject()))
.andReturn(componentInfo1).times(2).andReturn(componentInfo2);
expect(componentInfo1.getCategory()).andReturn("MASTER").anyTimes();
@@ -254,6 +255,10 @@ public class ComponentResourceProviderTest {
Cluster cluster = createNiceMock(Cluster.class);
AmbariMetaInfo ambariMetaInfo = createNiceMock(AmbariMetaInfo.class);
Service service = createNiceMock(Service.class);
+ ComponentInfo component1Info = createNiceMock(ComponentInfo.class);
+ ComponentInfo component2Info = createNiceMock(ComponentInfo.class);
+ ComponentInfo component3Info = createNiceMock(ComponentInfo.class);
+
ServiceComponent serviceComponent1 = createNiceMock(ServiceComponent.class);
ServiceComponent serviceComponent2 = createNiceMock(ServiceComponent.class);
ServiceComponent serviceComponent3 = createNiceMock(ServiceComponent.class);
@@ -271,20 +276,33 @@ public class ComponentResourceProviderTest {
expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
expect(managementController.getEffectiveMaintenanceState(
capture(new Capture<ServiceComponentHost>()))).andReturn(MaintenanceState.OFF).anyTimes();
-
+
+ expect(stackId.getStackName()).andReturn("stackName").anyTimes();
+ expect(stackId.getStackVersion()).andReturn("1").anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(cluster.getDesiredStackVersion()).andReturn(stackId);
expect(cluster.getService("Service100")).andReturn(service).anyTimes();
+ expect(service.getName()).andReturn("Service100").anyTimes();
expect(service.getServiceComponent("Component101")).andReturn(serviceComponent1).anyTimes();
expect(service.getServiceComponent("Component102")).andReturn(serviceComponent1).anyTimes();
expect(service.getServiceComponent("Component103")).andReturn(serviceComponent2).anyTimes();
+ expect(serviceComponent1.getName()).andReturn("Component101").anyTimes();
+ expect(serviceComponent2.getName()).andReturn("Component102").anyTimes();
+ expect(serviceComponent3.getName()).andReturn("Component103").anyTimes();
expect(cluster.getServices()).andReturn(Collections.singletonMap("Service100", service)).anyTimes();
expect(service.getServiceComponents()).andReturn(serviceComponentMap).anyTimes();
+ expect(ambariMetaInfo.getComponent("stackName", "1", "Service100", "Component101")).andReturn(component1Info).atLeastOnce();
+ expect(ambariMetaInfo.getComponent("stackName", "1", "Service100", "Component102")).andReturn(component2Info).atLeastOnce();
+ expect(ambariMetaInfo.getComponent("stackName", "1", "Service100", "Component103")).andReturn(component3Info).atLeastOnce();
+ expect(component1Info.getCategory()).andReturn(null);
+ expect(component2Info.getCategory()).andReturn(null);
+ expect(component3Info.getCategory()).andReturn(null);
+
expect(serviceComponent1.convertToResponse()).andReturn(
new ServiceComponentResponse(100L, "Cluster100", "Service100", "Component101", null, "", 1, 0, 1));
expect(serviceComponent2.convertToResponse()).andReturn(
@@ -316,16 +334,14 @@ public class ComponentResourceProviderTest {
expect(managementController.createAndPersistStages(capture(clusterCapture), capture(requestPropertiesCapture), capture(requestParametersCapture), capture(changedServicesCapture), capture(changedCompsCapture), capture(changedScHostsCapture), capture(ignoredScHostsCapture), anyBoolean(), anyBoolean()
)).andReturn(requestStatusResponse);
-
-
Map<String, String> mapRequestProps = new HashMap<String, String>();
mapRequestProps.put("context", "Called from a test");
// replay
- replay(managementController, clusters, cluster, ambariMetaInfo, service,
- serviceComponent1, serviceComponent2, serviceComponent3, serviceComponentHost,
- requestStatusResponse, stackId, maintenanceStateHelper);
+ replay(managementController, clusters, cluster, ambariMetaInfo, service, component1Info,
+ component2Info, component3Info, serviceComponent1, serviceComponent2, serviceComponent3,
+ serviceComponentHost, requestStatusResponse, stackId, maintenanceStateHelper);
ResourceProvider provider = new ComponentResourceProvider(
PropertyHelper.getPropertyIds(type),
@@ -340,15 +356,15 @@ public class ComponentResourceProviderTest {
// create the request
Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
- // update the cluster named Cluster102
+ // update the cluster named Cluster100
Predicate predicate = new PredicateBuilder().property(ComponentResourceProvider.COMPONENT_CLUSTER_NAME_PROPERTY_ID).
equals("Cluster100").toPredicate();
provider.updateResources(request, predicate);
// verify
- verify(managementController, clusters, cluster, ambariMetaInfo, service,
- serviceComponent1, serviceComponent2, serviceComponent3, serviceComponentHost,
- requestStatusResponse, stackId, maintenanceStateHelper);
+ verify(managementController, clusters, cluster, ambariMetaInfo, service, component1Info,
+ component2Info, component3Info, serviceComponent1, serviceComponent2, serviceComponent3,
+ serviceComponentHost, requestStatusResponse, stackId, maintenanceStateHelper);
}
public void testSuccessDeleteResources() throws Exception {
@@ -615,6 +631,7 @@ public class ComponentResourceProviderTest {
Cluster cluster = createNiceMock(Cluster.class);
Service service = createNiceMock(Service.class);
+ ComponentInfo componentInfo = createNiceMock(ComponentInfo.class);
ServiceComponent component = createNiceMock(ServiceComponent.class);
ServiceComponentResponse response = createNiceMock(ServiceComponentResponse.class);
@@ -630,25 +647,31 @@ public class ComponentResourceProviderTest {
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+ expect(stackId.getStackName()).andReturn("stackName").anyTimes();
+ expect(stackId.getStackVersion()).andReturn("1").anyTimes();
+
// getComponents
expect(clusters.getCluster("cluster1")).andReturn(cluster);
expect(cluster.getService("service1")).andReturn(service);
expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
+ expect(service.getName()).andReturn("service1").anyTimes();
expect(service.getServiceComponent("component1")).andReturn(component);
+ expect(ambariMetaInfo.getComponent("stackName", "1", "service1", "component1")).andReturn(componentInfo);
+ expect(componentInfo.getCategory()).andReturn(null);
+
expect(component.convertToResponse()).andReturn(response);
// replay mocks
- replay(clusters, cluster, service, component, response, ambariMetaInfo, stackId, managementController);
+ replay(clusters, cluster, service, componentInfo, component, response, ambariMetaInfo, stackId, managementController);
//test
-// AmbariManagementController controller = new AmbariManagementControllerImpl(null, clusters, injector);
Set<ServiceComponentResponse> setResponses = getComponentResourceProvider(managementController).getComponents(setRequests);
// assert and verify
assertEquals(1, setResponses.size());
assertTrue(setResponses.contains(response));
- verify(clusters, cluster, service, component, response, ambariMetaInfo, stackId, managementController);
+ verify(clusters, cluster, service, componentInfo, component, response, ambariMetaInfo, stackId, managementController);
}
/**
@@ -665,6 +688,8 @@ public class ComponentResourceProviderTest {
Cluster cluster = createNiceMock(Cluster.class);
Service service = createNiceMock(Service.class);
+ ComponentInfo component3Info = createNiceMock(ComponentInfo.class);
+ ComponentInfo component4Info = createNiceMock(ComponentInfo.class);
ServiceComponent component1 = createNiceMock(ServiceComponent.class);
ServiceComponent component2 = createNiceMock(ServiceComponent.class);
ServiceComponentResponse response1 = createNiceMock(ServiceComponentResponse.class);
@@ -693,12 +718,22 @@ public class ComponentResourceProviderTest {
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+ expect(stackId.getStackName()).andReturn("stackName").anyTimes();
+ expect(stackId.getStackVersion()).andReturn("1").anyTimes();
+
// getComponents
- expect(clusters.getCluster("cluster1")).andReturn(cluster).times(5);
+ expect(clusters.getCluster("cluster1")).andReturn(cluster).anyTimes();
expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
- expect(cluster.getService("service1")).andReturn(service).times(4);
+ expect(cluster.getService("service1")).andReturn(service).anyTimes();
expect(cluster.getService("service2")).andThrow(new ObjectNotFoundException("service2"));
+ expect(ambariMetaInfo.getComponent("stackName", "1", "service1", "component3")).andReturn(component3Info);
+ expect(ambariMetaInfo.getComponent("stackName", "1", "service1", "component4")).andReturn(component4Info);
+
+ expect(component3Info.getCategory()).andReturn(null);
+ expect(component4Info.getCategory()).andReturn(null);
+
+ expect(service.getName()).andReturn("service1").anyTimes();
expect(service.getServiceComponent("component1")).andThrow(new ServiceComponentNotFoundException("cluster1", "service1", "component1"));
expect(service.getServiceComponent("component2")).andThrow(new ServiceComponentNotFoundException("cluster1", "service1", "component2"));
expect(service.getServiceComponent("component3")).andReturn(component1);
@@ -707,7 +742,8 @@ public class ComponentResourceProviderTest {
expect(component1.convertToResponse()).andReturn(response1);
expect(component2.convertToResponse()).andReturn(response2);
// replay mocks
- replay(clusters, cluster, service, component1, component2, response1, response2, ambariMetaInfo, stackId, managementController);
+ replay(clusters, cluster, service, component3Info, component4Info, component1, component2, response1,
+ response2, ambariMetaInfo, stackId, managementController);
//test
Set<ServiceComponentResponse> setResponses = getComponentResourceProvider(managementController).getComponents(setRequests);
@@ -717,7 +753,8 @@ public class ComponentResourceProviderTest {
assertTrue(setResponses.contains(response1));
assertTrue(setResponses.contains(response2));
- verify(clusters, cluster, service, component1, component2, response1, response2, ambariMetaInfo, stackId, managementController);
+ verify(clusters, cluster, service, component3Info, component4Info, component1, component2, response1,
+ response2, ambariMetaInfo, stackId, managementController);
}
public static ComponentResourceProvider getComponentResourceProvider(AmbariManagementController managementController)
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostResourceProviderTest.java
index 2879094..cab75ee 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/HostResourceProviderTest.java
@@ -282,7 +282,7 @@ public class HostResourceProviderTest {
expect(healthStatus.getHealthReport()).andReturn("HEALTHY").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.getCategory()).andReturn("MASTER").anyTimes();
@@ -391,7 +391,7 @@ public class HostResourceProviderTest {
expect(healthStatus.getHealthReport()).andReturn("HEALTHY").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.getCategory()).andReturn("MASTER").anyTimes();
@@ -499,7 +499,7 @@ public class HostResourceProviderTest {
expect(healthStatus.getHealthReport()).andReturn("HEALTHY").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.getCategory()).andReturn("MASTER").anyTimes();
@@ -684,7 +684,7 @@ public class HostResourceProviderTest {
expect(hostResponse1.getStatus()).andReturn(HealthStatus.ALERT.name()).anyTimes();
expect(healthStatus.getHealthStatus()).andReturn(HostHealthStatus.HealthStatus.HEALTHY).anyTimes();
expect(healthStatus.getHealthReport()).andReturn("HEALTHY").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.getCategory()).andReturn("SLAVE").anyTimes();
expect(resourceProviderFactory.getHostResourceProvider(anyObject(Set.class), anyObject(Map.class),
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
index 05f2e01..5003c16 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
@@ -648,7 +648,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -699,7 +699,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -748,7 +748,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -797,7 +797,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -843,7 +843,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(false);
@@ -890,7 +890,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -938,7 +938,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -986,7 +986,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -1032,7 +1032,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(true);
@@ -1075,7 +1075,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(false);
@@ -1125,7 +1125,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
// replay
@@ -1166,7 +1166,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isClient()).andReturn(true);
@@ -1209,7 +1209,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isClient()).andReturn(true);
@@ -1316,7 +1316,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isClient()).andReturn(false).anyTimes();
@@ -1372,7 +1372,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isMaster()).andReturn(false).anyTimes();
@@ -1436,7 +1436,7 @@ public class ServiceResourceProviderTest {
expect(stackId.getStackVersion()).andReturn("V1").anyTimes();
- expect(ambariMetaInfo.getComponentCategory((String) anyObject(), (String) anyObject(),
+ expect(ambariMetaInfo.getComponent((String) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject())).andReturn(componentInfo).anyTimes();
expect(componentInfo.isClient()).andReturn(false);
[06/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/BlueprintEntityTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/BlueprintEntityTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/BlueprintEntityTest.java
index d8c0e29..b965554 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/BlueprintEntityTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/entities/BlueprintEntityTest.java
@@ -22,12 +22,15 @@ import com.google.gson.Gson;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
import org.junit.Test;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -83,8 +86,11 @@ public class BlueprintEntityTest {
@Test
public void testValidateConfigurations_clusterConfig() throws Exception {
AmbariMetaInfo metaInfo = createMock(AmbariMetaInfo.class);
+ ServiceInfo service = new ServiceInfo();
+ service.setName("service1");
+
+ List<PropertyInfo> serviceProperties = new ArrayList<PropertyInfo>();
- Map<String, PropertyInfo> requiredProps = new HashMap<String, PropertyInfo>();
PropertyInfo prop = new PropertyInfo();
prop.setFilename("core-site.xml");
prop.setName("super.secret.password");
@@ -93,7 +99,9 @@ public class BlueprintEntityTest {
propertyTypes.add(PropertyInfo.PropertyType.PASSWORD);
prop.setPropertyTypes(propertyTypes);
prop.setValue(null);
- requiredProps.put("super.secret.password", prop);
+ serviceProperties.add(prop);
+ service.getProperties().addAll(serviceProperties);
+ service.getProperties().addAll(serviceProperties);
BlueprintEntity entity = new BlueprintEntity();
entity.setStackName("stackName");
@@ -130,7 +138,7 @@ public class BlueprintEntityTest {
entity.setHostGroups(hostGroupEntities);
expect(metaInfo.getComponentToService("stackName", "version", "component1")).andReturn("service1");
- expect(metaInfo.getRequiredProperties("stackName", "version", "service1")).andReturn(requiredProps);
+ expect(metaInfo.getService("stackName", "version", "service1")).andReturn(service);
replay(metaInfo);
@@ -145,17 +153,20 @@ public class BlueprintEntityTest {
@Test
public void testValidateConfigurations_hostGroupConfig() throws Exception {
AmbariMetaInfo metaInfo = createMock(AmbariMetaInfo.class);
-
- Map<String, PropertyInfo> requiredProps = new HashMap<String, PropertyInfo>();
- PropertyInfo prop = new PropertyInfo();
- prop.setFilename("core-site.xml");
- prop.setName("super.secret.password");
- prop.setRequireInput(true);
+ ServiceInfo service = new ServiceInfo();
+ service.setName("service1");
+
+ List<PropertyInfo> serviceProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo prop1 = new PropertyInfo();
+ prop1.setFilename("core-site.xml");
+ prop1.setName("super.secret.password");
+ prop1.setRequireInput(true);
Set<PropertyInfo.PropertyType> propertyTypes = new HashSet<PropertyInfo.PropertyType>();
propertyTypes.add(PropertyInfo.PropertyType.PASSWORD);
- prop.setPropertyTypes(propertyTypes);
- prop.setValue(null);
- requiredProps.put("super.secret.password", prop);
+ prop1.setPropertyTypes(propertyTypes);
+ prop1.setValue(null);
+ serviceProperties.add(prop1);
+ service.getProperties().addAll(serviceProperties);
BlueprintEntity entity = new BlueprintEntity();
entity.setStackName("stackName");
@@ -193,7 +204,7 @@ public class BlueprintEntityTest {
entity.setHostGroups(hostGroupEntities);
expect(metaInfo.getComponentToService("stackName", "version", "component1")).andReturn("service1");
- expect(metaInfo.getRequiredProperties("stackName", "version", "service1")).andReturn(requiredProps);
+ expect(metaInfo.getService("stackName", "version", "service1")).andReturn(service);
replay(metaInfo);
@@ -208,16 +219,20 @@ public class BlueprintEntityTest {
@Test
public void testValidateConfigurations_negative() throws Exception {
AmbariMetaInfo metaInfo = createMock(AmbariMetaInfo.class);
+ ServiceInfo service = new ServiceInfo();
+ service.setName("service1");
- Map<String, PropertyInfo> requiredProps = new HashMap<String, PropertyInfo>();
- PropertyInfo prop = new PropertyInfo();
- prop.setFilename("core-site.xml");
- prop.setName("super.secret.password");
- prop.setRequireInput(true);
+ List<PropertyInfo> serviceProperties = new ArrayList<PropertyInfo>();
+
+ PropertyInfo prop1 = new PropertyInfo();
+ prop1.setFilename("core-site.xml");
+ prop1.setName("super.secret.password");
+ prop1.setRequireInput(true);
Set<PropertyInfo.PropertyType> propertyTypes = new HashSet<PropertyInfo.PropertyType>();
propertyTypes.add(PropertyInfo.PropertyType.PASSWORD);
- prop.setPropertyTypes(propertyTypes);
- prop.setValue(null);
+ prop1.setPropertyTypes(propertyTypes);
+ prop1.setValue(null);
+ serviceProperties.add(prop1);
PropertyInfo prop2 = new PropertyInfo();
prop2.setFilename("global.xml");
@@ -227,9 +242,9 @@ public class BlueprintEntityTest {
propertyTypes2.add(PropertyInfo.PropertyType.PASSWORD);
prop2.setPropertyTypes(propertyTypes2);
prop2.setValue(" ");
+ serviceProperties.add(prop2);
- requiredProps.put("super.secret.password", prop);
- requiredProps.put("another.super.secret.password", prop2);
+ service.getProperties().addAll(serviceProperties);
BlueprintEntity entity = new BlueprintEntity();
entity.setStackName("stackName");
@@ -266,7 +281,7 @@ public class BlueprintEntityTest {
entity.setHostGroups(hostGroupEntities);
expect(metaInfo.getComponentToService("stackName", "version", "component1")).andReturn("service1");
- expect(metaInfo.getRequiredProperties("stackName", "version", "service1")).andReturn(requiredProps);
+ expect(metaInfo.getService("stackName", "version", "service1")).andReturn(service);
replay(metaInfo);
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java
new file mode 100644
index 0000000..8181cbc
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java
@@ -0,0 +1,409 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.state.AutoDeployInfo;
+import org.apache.ambari.server.state.ClientConfigFileDefinition;
+import org.apache.ambari.server.state.CommandScriptDefinition;
+import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.CustomCommandDefinition;
+import org.apache.ambari.server.state.DependencyInfo;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.easymock.EasyMock.createNiceMock;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * ComponentModule unit test case.
+ */
+public class ComponentModuleTest {
+
+ @Test
+ public void testResolve_CommandScript() {
+ CommandScriptDefinition commandScript = new CommandScriptDefinition();
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setCommandScript(commandScript);
+ assertSame(commandScript, resolveComponent(info, parentInfo).getModuleInfo().getCommandScript());
+
+ // child has value set, parent value is null
+ info.setCommandScript(commandScript);
+ parentInfo.setCommandScript(null);
+ assertSame(commandScript, resolveComponent(info, parentInfo).getModuleInfo().getCommandScript());
+
+ // value set in both parent and child; child overwrites
+ CommandScriptDefinition commandScript2 = createNiceMock(CommandScriptDefinition.class);
+ info.setCommandScript(commandScript);
+ parentInfo.setCommandScript(commandScript2);
+ assertSame(commandScript, resolveComponent(info, parentInfo).getModuleInfo().getCommandScript());
+ }
+
+ @Test
+ public void testResolve_DisplayName() {
+ String displayName = "foo";
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setDisplayName(displayName);
+ assertEquals(displayName, resolveComponent(info, parentInfo).getModuleInfo().getDisplayName());
+
+ // child has value set, parent value is null
+ info.setDisplayName(displayName);
+ parentInfo.setDisplayName(null);
+ assertEquals(displayName, resolveComponent(info, parentInfo).getModuleInfo().getDisplayName());
+
+ // value set in both parent and child; child overwrites
+ String displayName2 = "foo2";
+ info.setDisplayName(displayName2);
+ parentInfo.setDisplayName(displayName);
+ assertEquals(displayName2, resolveComponent(info, parentInfo).getModuleInfo().getDisplayName());
+ }
+
+ @Test
+ public void testResolve_ClientConfigFiles() {
+ List<ClientConfigFileDefinition> clientConfigs = new ArrayList<ClientConfigFileDefinition>();
+ ClientConfigFileDefinition clientConfig1 = new ClientConfigFileDefinition();
+ clientConfig1.setType("type1");
+ clientConfig1.setDictionaryName("dictName1");
+ clientConfig1.setFileName("filename1");
+ ClientConfigFileDefinition clientConfig2 = new ClientConfigFileDefinition();
+ clientConfig1.setType("type1");
+ clientConfig1.setDictionaryName("dictName1");
+ clientConfig1.setFileName("filename1");
+ clientConfigs.add(clientConfig1);
+ clientConfigs.add(clientConfig2);
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setClientConfigFiles(clientConfigs);
+ assertEquals(clientConfigs, resolveComponent(info, parentInfo).getModuleInfo().getClientConfigFiles());
+
+ // child has value set, parent value is null
+ info.setClientConfigFiles(clientConfigs);
+ parentInfo.setClientConfigFiles(null);
+ assertEquals(clientConfigs, resolveComponent(info, parentInfo).getModuleInfo().getClientConfigFiles());
+
+ // value set in both parent and child; child overwrites with no merge
+ List<ClientConfigFileDefinition> clientConfigs2 = new ArrayList<ClientConfigFileDefinition>();
+ ClientConfigFileDefinition clientConfig3 = new ClientConfigFileDefinition();
+ clientConfig3.setType("type1");
+ clientConfig3.setDictionaryName("dictName1");
+ clientConfig3.setFileName("DIFFERENT filename");
+ clientConfigs2.add(clientConfig3);
+
+ info.setClientConfigFiles(clientConfigs2);
+ parentInfo.setClientConfigFiles(clientConfigs);
+ assertEquals(clientConfigs2, resolveComponent(info, parentInfo).getModuleInfo().getClientConfigFiles());
+ }
+
+ @Test
+ public void testResolve_Category() {
+ String category = "foo";
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setCategory(category);
+ assertEquals(category, resolveComponent(info, parentInfo).getModuleInfo().getCategory());
+
+ // child has value set, parent value is null
+ info.setCategory(category);
+ parentInfo.setCategory(null);
+ assertEquals(category, resolveComponent(info, parentInfo).getModuleInfo().getCategory());
+
+ // value set in both parent and child; child overwrites
+ String category2 = "foo2";
+ info.setCategory(category2);
+ parentInfo.setCategory(category);
+ assertEquals(category2, resolveComponent(info, parentInfo).getModuleInfo().getCategory());
+ }
+
+ @Test
+ public void testResolve_Cardinality() {
+ String cardinality = "foo";
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setCardinality(cardinality);
+ assertEquals(cardinality, resolveComponent(info, parentInfo).getModuleInfo().getCardinality());
+
+ // child has value set, parent value is null
+ info.setCardinality(cardinality);
+ parentInfo.setCardinality(null);
+ assertEquals(cardinality, resolveComponent(info, parentInfo).getModuleInfo().getCardinality());
+
+ // value set in both parent and child; child overwrites
+ String cardinality2 = "foo2";
+ info.setCardinality(cardinality2);
+ parentInfo.setCardinality(cardinality);
+ assertEquals(cardinality2, resolveComponent(info, parentInfo).getModuleInfo().getCardinality());
+ }
+
+ @Test
+ public void testResolve_AutoDeploy() {
+ AutoDeployInfo autoDeployInfo = new AutoDeployInfo();
+ autoDeployInfo.setEnabled(true);
+ autoDeployInfo.setCoLocate("foo/bar");
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setAutoDeploy(autoDeployInfo);
+ assertEquals(autoDeployInfo, resolveComponent(info, parentInfo).getModuleInfo().getAutoDeploy());
+
+ // child has value set, parent value is null
+ info.setAutoDeploy(autoDeployInfo);
+ parentInfo.setAutoDeploy(null);
+ assertEquals(autoDeployInfo, resolveComponent(info, parentInfo).getModuleInfo().getAutoDeploy());
+
+ // value set in both parent and child; child overwrites
+ AutoDeployInfo autoDeployInfo2 = new AutoDeployInfo();
+ info.setAutoDeploy(autoDeployInfo);
+ parentInfo.setAutoDeploy(autoDeployInfo2);
+ assertEquals(autoDeployInfo, resolveComponent(info, parentInfo).getModuleInfo().getAutoDeploy());
+ }
+
+
+ @Test
+ public void testResolve_Dependencies() {
+ List<DependencyInfo> dependencies = new ArrayList<DependencyInfo>();
+ DependencyInfo dependency1 = new DependencyInfo();
+ dependency1.setName("service/one");
+ DependencyInfo dependency2 = new DependencyInfo();
+ dependency2.setName("service/two");
+ dependencies.add(dependency1);
+ dependencies.add(dependency2);
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setDependencies(dependencies);
+ assertEquals(dependencies, resolveComponent(info, parentInfo).getModuleInfo().getDependencies());
+
+ // child has value set, parent value is null
+ info.setDependencies(dependencies);
+ parentInfo.setDependencies(null);
+ assertEquals(dependencies, resolveComponent(info, parentInfo).getModuleInfo().getDependencies());
+
+ // value set in both parent and child; merge parent and child
+ //todo: currently there is no way to remove an inherited dependency
+ List<DependencyInfo> dependencies2 = new ArrayList<DependencyInfo>();
+ DependencyInfo dependency3 = new DependencyInfo();
+ dependency3.setName("service/two");
+ DependencyInfo dependency4 = new DependencyInfo();
+ dependency4.setName("service/four");
+ dependencies2.add(dependency3);
+ dependencies2.add(dependency4);
+
+ info.setDependencies(dependencies2);
+ parentInfo.setDependencies(dependencies);
+
+ List<DependencyInfo> resolvedDependencies = resolveComponent(info, parentInfo).getModuleInfo().getDependencies();
+ assertEquals(3, resolvedDependencies.size());
+ assertTrue(resolvedDependencies.contains(dependency1));
+ assertTrue(resolvedDependencies.contains(dependency3));
+ assertTrue(resolvedDependencies.contains(dependency4));
+ }
+
+ @Test
+ public void testResolve_CustomCommands() throws Exception {
+ List<CustomCommandDefinition> commands = new ArrayList<CustomCommandDefinition>();
+ CustomCommandDefinition command1 = new CustomCommandDefinition();
+ setPrivateField(command1, "name", "one");
+ CustomCommandDefinition command2 = new CustomCommandDefinition();
+ setPrivateField(command2, "name", "two");
+ commands.add(command1);
+ commands.add(command2);
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setCustomCommands(commands);
+ assertEquals(commands, resolveComponent(info, parentInfo).getModuleInfo().getCustomCommands());
+
+ // child has value set, parent value is null
+ info.setCustomCommands(commands);
+ parentInfo.setCustomCommands(null);
+ assertEquals(commands, resolveComponent(info, parentInfo).getModuleInfo().getCustomCommands());
+
+ // value set in both parent and child; merge parent and child
+ //todo: currently there is no way to remove an inherited command
+ List<CustomCommandDefinition> commands2 = new ArrayList<CustomCommandDefinition>();
+ CustomCommandDefinition command3 = new CustomCommandDefinition();
+ // override command 2
+ setPrivateField(command3, "name", "two");
+ CustomCommandDefinition command4 = new CustomCommandDefinition();
+ setPrivateField(command4, "name", "four");
+ commands2.add(command3);
+ commands2.add(command4);
+
+ info.setCustomCommands(commands2);
+ parentInfo.setCustomCommands(commands);
+
+ List<CustomCommandDefinition> resolvedCommands = resolveComponent(info, parentInfo).getModuleInfo().getCustomCommands();
+ assertEquals(3, resolvedCommands.size());
+ assertTrue(resolvedCommands.contains(command1));
+ assertTrue(resolvedCommands.contains(command3));
+ assertTrue(resolvedCommands.contains(command4));
+ }
+
+ @Test
+ // merging of config dependencies is different than other non-module merges in that the collections aren't
+ // merged if any config dependency is specified in the child. So, the merged result is either the child
+ // dependencies or if null, the parent dependencies.
+ public void testResolve_ConfigDependencies() {
+ List<String> dependencies = new ArrayList<String>();
+ String dependency1 = "one";
+ String dependency2 = "two";
+ dependencies.add(dependency1);
+ dependencies.add(dependency2);
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setConfigDependencies(dependencies);
+ assertEquals(dependencies, resolveComponent(info, parentInfo).getModuleInfo().getConfigDependencies());
+
+ // child has value set, parent value is null
+ info.setConfigDependencies(dependencies);
+ parentInfo.setConfigDependencies(null);
+ assertEquals(dependencies, resolveComponent(info, parentInfo).getModuleInfo().getConfigDependencies());
+
+ // value set in both parent and child; merge parent and child
+ List<String> dependencies2 = new ArrayList<String>();
+ String dependency3 = "two";
+ String dependency4 = "four";
+ dependencies2.add(dependency3);
+ dependencies2.add(dependency4);
+
+ info.setConfigDependencies(dependencies2);
+ parentInfo.setConfigDependencies(dependencies);
+
+ List<String> resolvedDependencies = resolveComponent(info, parentInfo).getModuleInfo().getConfigDependencies();
+ assertEquals(2, resolvedDependencies.size());
+ assertTrue(resolvedDependencies.contains(dependency3));
+ assertTrue(resolvedDependencies.contains(dependency4));
+ }
+
+ @Test
+ // merging of "client to update configs", whatever that means, is different than most other non-module merges
+ // in that the collections aren't merged if any "client to update configs" is specified in the child.
+ // So, the merged result is either the child collection or if null, the parent collection.
+ public void testResolve_ClientToUpdateConfigs() {
+ List<String> clientsToUpdate = new ArrayList<String>();
+ String client1 = "one";
+ String client2 = "two";
+ clientsToUpdate.add(client1);
+ clientsToUpdate.add(client2);
+
+ ComponentInfo info = new ComponentInfo();
+ ComponentInfo parentInfo = new ComponentInfo();
+
+ // parent has value set, child value is null
+ parentInfo.setClientsToUpdateConfigs(clientsToUpdate);
+ assertEquals(clientsToUpdate, resolveComponent(info, parentInfo).getModuleInfo().getClientsToUpdateConfigs());
+
+ // child has value set, parent value is null
+ info.setClientsToUpdateConfigs(clientsToUpdate);
+ parentInfo.setClientsToUpdateConfigs(null);
+ assertEquals(clientsToUpdate, resolveComponent(info, parentInfo).getModuleInfo().getClientsToUpdateConfigs());
+
+ // value set in both parent and child; merge parent and child
+ List<String> clientsToUpdate2 = new ArrayList<String>();
+ String client3 = "two";
+ String client4 = "four";
+ clientsToUpdate2.add(client3);
+ clientsToUpdate2.add(client4);
+
+ info.setClientsToUpdateConfigs(clientsToUpdate2);
+ parentInfo.setClientsToUpdateConfigs(clientsToUpdate);
+
+ List<String> resolvedClientsToUpdate = resolveComponent(info, parentInfo).getModuleInfo().getClientsToUpdateConfigs();
+ assertEquals(2, resolvedClientsToUpdate.size());
+ assertTrue(resolvedClientsToUpdate.contains(client3));
+ assertTrue(resolvedClientsToUpdate.contains(client4));
+ }
+
+ @Test
+ public void testGetId() {
+ ComponentInfo info = new ComponentInfo();
+ info.setName("foo");
+
+ ComponentModule component = new ComponentModule(info);
+ assertEquals("foo", component.getId());
+ }
+
+ @Test
+ public void testIsDeleted() {
+ // default value
+ ComponentInfo info = new ComponentInfo();
+ info.setName("foo");
+
+ ComponentModule component = new ComponentModule(info);
+ assertFalse(component.isDeleted());
+
+ // explicit value
+ info = new ComponentInfo();
+ info.setName("foo");
+ info.setDeleted(true);
+
+ component = new ComponentModule(info);
+ assertTrue(component.isDeleted());
+ }
+
+ private ComponentModule resolveComponent(ComponentInfo info, ComponentInfo parentInfo) {
+ info.setName("FOO");
+ parentInfo.setName("FOO");
+
+ ComponentModule component = new ComponentModule(info);
+ ComponentModule parentComponent = new ComponentModule(parentInfo);
+
+ component.resolve(parentComponent, Collections.<String, StackModule>emptyMap());
+
+ return component;
+ }
+
+ private void setPrivateField(Object o, String field, Object value) throws Exception{
+ Class<?> c = o.getClass();
+ Field f = c.getDeclaredField(field);
+ f.setAccessible(true);
+ f.set(o, value);
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
new file mode 100644
index 0000000..225213f
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
@@ -0,0 +1,983 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.state.CommandScriptDefinition;
+import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.CustomCommandDefinition;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
+import org.junit.Test;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * ServiceModule unit tests.
+ */
+public class ServiceModuleTest {
+
+ @Test
+ public void testResolve_Comment() throws Exception {
+ String comment = "test comment";
+
+ // comment specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setComment(comment);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(comment, service.getModuleInfo().getComment());
+
+ // comment specified in parent only
+ info.setComment(null);
+ parentInfo.setComment(comment);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(comment, service.getModuleInfo().getComment());
+
+ // set in both
+ info.setComment(comment);
+ parentInfo.setComment("other comment");
+
+ service = resolveService(info, parentInfo);
+ assertEquals(comment, service.getModuleInfo().getComment());
+ }
+
+ @Test
+ public void testResolve_DisplayName() throws Exception {
+ String displayName = "test_display_name";
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setDisplayName(displayName);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(displayName, service.getModuleInfo().getDisplayName());
+
+ // specified in parent only
+ info.setDisplayName(null);
+ parentInfo.setDisplayName(displayName);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(displayName, service.getModuleInfo().getDisplayName());
+
+ // specified in both
+ info.setDisplayName(displayName);
+ parentInfo.setDisplayName("other display name");
+
+ service = resolveService(info, parentInfo);
+ assertEquals(displayName, service.getModuleInfo().getDisplayName());
+ }
+
+ @Test
+ public void testResolve_RequiredServices() throws Exception {
+ List<String> requiredServices = new ArrayList<String>();
+ requiredServices.add("foo");
+ requiredServices.add("bar");
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setRequiredServices(requiredServices);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(requiredServices, service.getModuleInfo().getRequiredServices());
+
+ // specified in parent only
+ info.setRequiredServices(null);
+ parentInfo.setRequiredServices(requiredServices);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(requiredServices, service.getModuleInfo().getRequiredServices());
+
+ // specified in both
+ info.setRequiredServices(requiredServices);
+ parentInfo.setRequiredServices(Collections.singletonList("other"));
+
+ service = resolveService(info, parentInfo);
+ assertEquals(requiredServices, service.getModuleInfo().getRequiredServices());
+
+ // not set in either
+ info.setRequiredServices(null);
+ parentInfo.setRequiredServices(null);
+
+ service = resolveService(info, parentInfo);
+ assertTrue(service.getModuleInfo().getRequiredServices().isEmpty());
+ }
+
+ @Test
+ public void testResolve_RestartRequiredAfterChange() throws Exception {
+ Boolean isRestartRequired = true;
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setRestartRequiredAfterChange(isRestartRequired);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(isRestartRequired, service.getModuleInfo().isRestartRequiredAfterChange());
+
+ // specified in parent only
+ info.setRestartRequiredAfterChange(null);
+ parentInfo.setRestartRequiredAfterChange(isRestartRequired);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(isRestartRequired, service.getModuleInfo().isRestartRequiredAfterChange());
+
+ // specified in both
+ info.setRestartRequiredAfterChange(isRestartRequired);
+ parentInfo.setRestartRequiredAfterChange(false);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(isRestartRequired, service.getModuleInfo().isRestartRequiredAfterChange());
+ }
+
+ @Test
+ public void testResolve_MonitoringService() throws Exception {
+ Boolean isMonitoringService = true;
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setMonitoringService(isMonitoringService);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(isMonitoringService, service.getModuleInfo().isMonitoringService());
+
+ // specified in parent only
+ info.setMonitoringService(null);
+ parentInfo.setMonitoringService(isMonitoringService);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(isMonitoringService, service.getModuleInfo().isMonitoringService());
+
+ // specified in both
+ info.setMonitoringService(isMonitoringService);
+ parentInfo.setMonitoringService(false);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(isMonitoringService, service.getModuleInfo().isMonitoringService());
+ }
+
+ @Test
+ public void testResolve_OsSpecifics() throws Exception {
+ Map<String, ServiceOsSpecific> osSpecifics = new HashMap<String, ServiceOsSpecific>();
+ osSpecifics.put("foo", new ServiceOsSpecific());
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setOsSpecifics(osSpecifics);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(osSpecifics, service.getModuleInfo().getOsSpecifics());
+
+ // specified in parent only
+ info.setOsSpecifics(null);
+ parentInfo.setOsSpecifics(osSpecifics);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(osSpecifics, service.getModuleInfo().getOsSpecifics());
+
+ // specified in both
+ Map<String, ServiceOsSpecific> osSpecifics2 = new HashMap<String, ServiceOsSpecific>();
+ osSpecifics.put("bar", new ServiceOsSpecific());
+
+ info.setOsSpecifics(osSpecifics);
+ parentInfo.setOsSpecifics(osSpecifics2);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(osSpecifics, service.getModuleInfo().getOsSpecifics());
+ }
+
+ @Test
+ public void testResolve_CommandScript() throws Exception {
+ CommandScriptDefinition commandScript = new CommandScriptDefinition();
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setCommandScript(commandScript);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(commandScript, service.getModuleInfo().getCommandScript());
+
+ // specified in parent only
+ info.setCommandScript(null);
+ parentInfo.setCommandScript(commandScript);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(commandScript, service.getModuleInfo().getCommandScript());
+
+ // specified in both
+ CommandScriptDefinition commandScript2 = new CommandScriptDefinition();
+
+ info.setCommandScript(commandScript);
+ parentInfo.setCommandScript(commandScript2);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(commandScript, service.getModuleInfo().getCommandScript());
+ }
+
+ @Test
+ public void testResolve_ServicePackageFolder() throws Exception {
+ String servicePackageFolder = "packageDir";
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+
+ ServiceModule child = createServiceModule(info);
+ ServiceModule parent = createServiceModule(parentInfo);
+
+ // set in the module constructor from a value obtained from service directory
+ assertEquals("packageDir", child.getModuleInfo().getServicePackageFolder());
+ parent.getModuleInfo().setServicePackageFolder(null);
+
+ resolveService(child, parent);
+ assertEquals(servicePackageFolder, child.getModuleInfo().getServicePackageFolder());
+
+ // specified in parent only
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setServicePackageFolder(servicePackageFolder);
+ child.getModuleInfo().setServicePackageFolder(null);
+
+ resolveService(child, parent);
+ assertEquals(servicePackageFolder, child.getModuleInfo().getServicePackageFolder());
+
+ // specified in both
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setServicePackageFolder("someOtherDir");
+ child.getModuleInfo().setServicePackageFolder(servicePackageFolder);
+
+ resolveService(child, parent);
+ assertEquals(servicePackageFolder, child.getModuleInfo().getServicePackageFolder());
+ }
+
+ @Test
+ public void testResolve_MetricsFile() throws Exception {
+ File metricsFile = new File("testMetricsFile");
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+
+ ServiceModule child = createServiceModule(info);
+ ServiceModule parent = createServiceModule(parentInfo);
+
+ // set in the module constructor from a value obtained from service directory which is mocked
+ assertEquals(metricsFile, child.getModuleInfo().getMetricsFile());
+ parent.getModuleInfo().setMetricsFile(null);
+
+ resolveService(child, parent);
+ assertEquals(metricsFile, child.getModuleInfo().getMetricsFile());
+
+ // specified in parent only
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setMetricsFile(metricsFile);
+ child.getModuleInfo().setMetricsFile(null);
+
+ resolveService(child, parent);
+ assertEquals(metricsFile, child.getModuleInfo().getMetricsFile());
+
+ // specified in both
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setMetricsFile(new File("someOtherDir"));
+ child.getModuleInfo().setMetricsFile(metricsFile);
+
+ resolveService(child, parent);
+ assertEquals(metricsFile, child.getModuleInfo().getMetricsFile());
+ }
+
+ @Test
+ public void testResolve_AlertsFile() throws Exception {
+ File alertsFile = new File("testAlertsFile");
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+
+ ServiceModule child = createServiceModule(info);
+ ServiceModule parent = createServiceModule(parentInfo);
+
+ // set in the module constructor from a value obtained from service directory which is mocked
+ assertEquals(alertsFile, child.getModuleInfo().getAlertsFile());
+ parent.getModuleInfo().setAlertsFile(null);
+
+ resolveService(child, parent);
+ assertEquals(alertsFile, child.getModuleInfo().getAlertsFile());
+
+ // specified in parent only
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setAlertsFile(alertsFile);
+ child.getModuleInfo().setAlertsFile(null);
+
+ resolveService(child, parent);
+ assertEquals(alertsFile, child.getModuleInfo().getAlertsFile());
+
+ // specified in both
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setAlertsFile(new File("someOtherDir"));
+ child.getModuleInfo().setAlertsFile(alertsFile);
+
+ resolveService(child, parent);
+ assertEquals(alertsFile, child.getModuleInfo().getAlertsFile());
+ }
+
+ @Test
+ public void testResolve_CustomCommands() throws Exception {
+ List<CustomCommandDefinition> customCommands = new ArrayList<CustomCommandDefinition>();
+ CustomCommandDefinition cmd1 = new CustomCommandDefinition();
+ setPrivateField(cmd1, "name", "cmd1");
+ setPrivateField(cmd1, "background", false);
+ CustomCommandDefinition cmd2 = new CustomCommandDefinition();
+ setPrivateField(cmd2, "name", "cmd2");
+ customCommands.add(cmd1);
+ customCommands.add(cmd2);
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setCustomCommands(customCommands);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(customCommands, service.getModuleInfo().getCustomCommands());
+
+ // specified in parent only
+ info.setCustomCommands(null);
+ parentInfo.setCustomCommands(customCommands);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(customCommands, service.getModuleInfo().getCustomCommands());
+
+ // specified in both
+ List<CustomCommandDefinition> parentCustomCommands = new ArrayList<CustomCommandDefinition>();
+ CustomCommandDefinition cmd3 = new CustomCommandDefinition();
+ setPrivateField(cmd3, "name", "cmd1");
+ setPrivateField(cmd3, "background", true);
+ CustomCommandDefinition cmd4 = new CustomCommandDefinition();
+ setPrivateField(cmd4, "name", "cmd4");
+ parentCustomCommands.add(cmd3);
+ parentCustomCommands.add(cmd4);
+
+ info.setCustomCommands(customCommands);
+ parentInfo.setCustomCommands(parentCustomCommands);
+
+ service = resolveService(info, parentInfo);
+ Collection<CustomCommandDefinition> mergedCommands = service.getModuleInfo().getCustomCommands();
+ assertEquals(3, mergedCommands.size());
+ assertTrue(mergedCommands.contains(cmd2));
+ assertTrue(mergedCommands.contains(cmd3));
+ assertTrue(mergedCommands.contains(cmd4));
+
+ // not set in either
+ info.setCustomCommands(null);
+ parentInfo.setCustomCommands(null);
+
+ service = resolveService(info, parentInfo);
+ assertTrue(service.getModuleInfo().getCustomCommands().isEmpty());
+ }
+
+ @Test
+ public void testResolve_ConfigDependencies() throws Exception {
+ List<String> configDependencies = new ArrayList<String>();
+ configDependencies.add("foo");
+ configDependencies.add("bar");
+
+ // specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ info.setConfigDependencies(configDependencies);
+
+ ServiceModule service = resolveService(info, parentInfo);
+ assertEquals(configDependencies, service.getModuleInfo().getConfigDependencies());
+
+ // specified in parent only
+ info.setConfigDependencies(null);
+ parentInfo.setConfigDependencies(configDependencies);
+
+ service = resolveService(info, parentInfo);
+ assertEquals(configDependencies, service.getModuleInfo().getConfigDependencies());
+
+ // specified in both
+ List<String> parentCustomCommands = new ArrayList<String>();
+ parentCustomCommands.add("bar");
+ parentCustomCommands.add("other");
+
+ info.setConfigDependencies(configDependencies);
+ parentInfo.setConfigDependencies(parentCustomCommands);
+
+ service = resolveService(info, parentInfo);
+ Collection<String> mergedConfigDependencies = service.getModuleInfo().getConfigDependencies();
+ assertEquals(3, mergedConfigDependencies.size());
+ assertTrue(mergedConfigDependencies.contains("foo"));
+ assertTrue(mergedConfigDependencies.contains("bar"));
+ assertTrue(mergedConfigDependencies.contains("other"));
+
+ // not set in either
+ info.setConfigDependencies(null);
+ parentInfo.setConfigDependencies(null);
+
+ service = resolveService(info, parentInfo);
+ assertTrue(service.getModuleInfo().getConfigDependencies().isEmpty());
+ }
+
+ @Test
+ public void testResolve_Components() throws Exception {
+ // resolve should merge the child component collections
+ // components 1, 2 and XX are set on the parent
+ // components 1, 4 and XX are set on the child
+ // component XX is marked for delete on the child and shouldn't be included
+ // component 1 should be merged
+ // both non-intersecting components 2 and 4 should be included
+ ComponentInfo info1 = new ComponentInfo();
+ info1.setName("1");
+ info1.setCardinality("ALL");
+ ComponentInfo info2 = new ComponentInfo();
+ info2.setName("2");
+ ComponentInfo XX = new ComponentInfo();
+ XX.setName("XX");
+
+ ComponentInfo info3 = new ComponentInfo();
+ // overlaps with info1
+ info3.setName("1");
+ info3.setCategory("category");
+ ComponentInfo info4 = new ComponentInfo();
+ info4.setName("4");
+ ComponentInfo info5 = new ComponentInfo();
+ // overlaps with componentToBeDeleted
+ info5.setName("XX");
+ info5.setDeleted(true);
+
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+
+ //todo: getComponents() should return a protective copy, but for now there is no set/add method
+ List<ComponentInfo> childComponents = info.getComponents();
+ childComponents.add(info3);
+ childComponents.add(info4);
+ childComponents.add(info5);
+
+ List<ComponentInfo> parentComponents = parentInfo.getComponents();
+ parentComponents.add(info1);
+ parentComponents.add(info2);
+
+ ServiceModule child = createServiceModule(info);
+ ServiceModule parent = createServiceModule(parentInfo);
+
+ resolveService(child, parent);
+
+ List<ComponentInfo> components = child.getModuleInfo().getComponents();
+ assertEquals(3, components.size());
+
+ Map<String, ComponentInfo> mergedComponents = new HashMap<String, ComponentInfo>();
+ for (ComponentInfo component : components) {
+ mergedComponents.put(component.getName(), component);
+ }
+ assertTrue(mergedComponents.containsKey("1"));
+ assertTrue(mergedComponents.containsKey("2"));
+ assertTrue(mergedComponents.containsKey("4"));
+
+ // ensure that overlapping components were merged.
+ //don't test all properties, this is done in ComponentModuleTest
+ assertEquals("ALL", mergedComponents.get("1").getCardinality());
+ assertEquals("category", mergedComponents.get("1").getCategory());
+ }
+
+ @Test
+ public void testResolve_Configuration__properties() throws Exception {
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+
+ // child configurations
+ //FOO
+ Collection<PropertyInfo> childFooProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo childProp1 = new PropertyInfo();
+ childProp1.setName("childName1");
+ childProp1.setValue("childVal1");
+ childFooProperties.add(childProp1);
+
+ //BAR : Doesn't inherit parents BAR due to attribute Supports.DO_NOT_EXTEND
+ Collection<PropertyInfo> childBarProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo childProp2 = new PropertyInfo();
+ childProp2.setName("childName2");
+ childProp2.setValue("childVal2");
+ childBarProperties.add(childProp2);
+
+ // add attributes for BAR
+ Map<String, String> attributes = new HashMap<String, String>();
+ attributes.put(ConfigurationInfo.Supports.DO_NOT_EXTEND.getXmlAttributeName(), "true");
+
+ // create child config modules
+ ConfigurationModule childConfigModule1 = createConfigurationModule("FOO", childFooProperties);
+ ConfigurationModule childConfigModule2 = createConfigurationModule("BAR", childBarProperties, attributes);
+ Collection<ConfigurationModule> childModules = new ArrayList<ConfigurationModule>();
+ childModules.add(childConfigModule1);
+ childModules.add(childConfigModule2);
+
+ // parent configurations
+ //FOO
+ Collection<PropertyInfo> parentFooProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo parentProp1 = new PropertyInfo();
+ parentProp1.setName("parentName1");
+ parentProp1.setValue("parentVal1");
+ parentFooProperties.add(parentProp1);
+ PropertyInfo parentProp12 = new PropertyInfo();
+ // overwritten by child
+ parentProp12.setName("childName1");
+ parentProp12.setValue("parentVal1");
+ parentFooProperties.add(parentProp12);
+
+ //BAR
+ Collection<PropertyInfo> parentBarProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo parentProp2 = new PropertyInfo();
+ parentProp2.setName("parentName2");
+ parentProp2.setValue("parentVal2");
+ parentBarProperties.add(parentProp2);
+
+ //OTHER
+ Collection<PropertyInfo> parentOtherProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo parentProp3 = new PropertyInfo();
+ parentProp3.setName("parentName3");
+ parentProp3.setValue("parentVal3");
+ parentOtherProperties.add(parentProp3);
+
+ // create parent config modules
+ ConfigurationModule parentConfigModule1 = createConfigurationModule("FOO", parentFooProperties);
+ ConfigurationModule parentConfigModule2 = createConfigurationModule("BAR", parentBarProperties);
+ ConfigurationModule parentConfigModule3 = createConfigurationModule("OTHER", parentOtherProperties);
+ Collection<ConfigurationModule> parentModules = new ArrayList<ConfigurationModule>();
+ parentModules.add(parentConfigModule1);
+ parentModules.add(parentConfigModule2);
+ parentModules.add(parentConfigModule3);
+
+ // create service modules
+ ServiceModule child = createServiceModule(info, childModules);
+ ServiceModule parent = createServiceModule(parentInfo, parentModules);
+
+ // resolve child with parent
+ resolveService(child, parent);
+
+ // assertions
+ List<PropertyInfo> mergedProperties = child.getModuleInfo().getProperties();
+ assertEquals(4, mergedProperties.size());
+
+ Map<String, PropertyInfo> mergedPropertyMap = new HashMap<String, PropertyInfo>();
+ for (PropertyInfo prop : mergedProperties) {
+ mergedPropertyMap.put(prop.getName(), prop);
+ }
+
+ // filename is null for all props because that is set in ConfigurationDirectory which is mocked
+ assertEquals("childVal1", mergedPropertyMap.get("childName1").getValue());
+ assertEquals("childVal2", mergedPropertyMap.get("childName2").getValue());
+ assertEquals("parentVal1", mergedPropertyMap.get("parentName1").getValue());
+ assertEquals("parentVal3", mergedPropertyMap.get("parentName3").getValue());
+
+ Map<String, Map<String, Map<String, String>>> childAttributes = child.getModuleInfo().getConfigTypeAttributes();
+ Map<String, Map<String, Map<String, String>>> parentAttributes = parent.getModuleInfo().getConfigTypeAttributes();
+
+ assertEquals(3, childAttributes.size());
+ assertAttributes(childAttributes.get("FOO"), Collections.<String, String>emptyMap());
+ assertAttributes(childAttributes.get("BAR"), attributes);
+ assertAttributes(childAttributes.get("OTHER"), Collections.<String, String>emptyMap());
+
+ assertEquals(3, parentAttributes.size());
+ assertAttributes(parentAttributes.get("FOO"), Collections.<String, String>emptyMap());
+ assertAttributes(parentAttributes.get("BAR"), Collections.<String, String>emptyMap());
+ assertAttributes(parentAttributes.get("OTHER"), Collections.<String, String>emptyMap());
+ }
+
+ @Test
+ public void testResolve_Configuration__attributes() throws Exception {
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+
+ // child configurations
+ //FOO
+ Collection<PropertyInfo> childFooProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo childProp1 = new PropertyInfo();
+ childProp1.setName("childName1");
+ childProp1.setValue("childVal1");
+ childFooProperties.add(childProp1);
+
+ // add attributes for parent FOO
+ Map<String, String> childFooAttributes = new HashMap<String, String>();
+ // override parents value
+ childFooAttributes.put(ConfigurationInfo.Supports.ADDING_FORBIDDEN.getXmlAttributeName(), "false");
+
+ // create child config modules
+ ConfigurationModule childConfigModule1 = createConfigurationModule("FOO", childFooProperties, childFooAttributes);
+ Collection<ConfigurationModule> childModules = new ArrayList<ConfigurationModule>();
+ childModules.add(childConfigModule1);
+
+ // parent configurations
+ //FOO
+ Collection<PropertyInfo> parentFooProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo parentProp1 = new PropertyInfo();
+ parentProp1.setName("parentName1");
+ parentProp1.setValue("parentVal1");
+ parentFooProperties.add(parentProp1);
+
+ // add attributes for parent FOO
+ Map<String, String> parentFooAttributes = new HashMap<String, String>();
+ // child will inherit
+ parentFooAttributes.put(ConfigurationInfo.Supports.FINAL.getXmlAttributeName(), "true");
+ // child will override
+ parentFooAttributes.put(ConfigurationInfo.Supports.ADDING_FORBIDDEN.getXmlAttributeName(), "true");
+
+ //BAR
+ Collection<PropertyInfo> parentBarProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo parentProp2 = new PropertyInfo();
+ parentProp2.setName("parentName2");
+ parentProp2.setValue("parentVal2");
+ parentBarProperties.add(parentProp2);
+
+
+ // create parent config modules
+ ConfigurationModule parentConfigModule1 = createConfigurationModule("FOO", parentFooProperties, parentFooAttributes);
+ ConfigurationModule parentConfigModule2 = createConfigurationModule("BAR", parentBarProperties);
+ Collection<ConfigurationModule> parentModules = new ArrayList<ConfigurationModule>();
+ parentModules.add(parentConfigModule1);
+ parentModules.add(parentConfigModule2);
+
+ // create service modules
+ ServiceModule child = createServiceModule(info, childModules);
+ ServiceModule parent = createServiceModule(parentInfo, parentModules);
+
+ // resolve child with parent
+ resolveService(child, parent);
+
+ // assertions
+ Map<String, Map<String, Map<String, String>>> childTypeAttributes = child.getModuleInfo().getConfigTypeAttributes();
+ Map<String, Map<String, Map<String, String>>> parentTypeAttributes = parent.getModuleInfo().getConfigTypeAttributes();
+ assertTrue(childTypeAttributes.containsKey("FOO"));
+ Map<String, Map<String, String>> mergedChildFooAttributes = childTypeAttributes.get("FOO");
+ assertTrue(mergedChildFooAttributes.containsKey(ConfigurationInfo.Supports.KEYWORD));
+ // inherited value
+ assertEquals("true", mergedChildFooAttributes.get(ConfigurationInfo.Supports.KEYWORD).
+ get(ConfigurationInfo.Supports.valueOf("FINAL").getPropertyName()));
+ // overridden value
+ assertEquals("false", mergedChildFooAttributes.get(ConfigurationInfo.Supports.KEYWORD).
+ get(ConfigurationInfo.Supports.valueOf("ADDING_FORBIDDEN").getPropertyName()));
+
+ assertEquals(2, childTypeAttributes.size());
+
+ assertEquals(2, parentTypeAttributes.size());
+ assertAttributes(parentTypeAttributes.get("FOO"), parentFooAttributes);
+ assertAttributes(parentTypeAttributes.get("BAR"), Collections.<String, String>emptyMap());
+ }
+
+ @Test
+ public void testResolve_Configuration__ExcludedTypes() throws Exception {
+ ServiceInfo info = new ServiceInfo();
+ info.setExcludedConfigTypes(Collections.singleton("BAR"));
+
+ //FOO
+ Collection<PropertyInfo> fooProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo prop1 = new PropertyInfo();
+ prop1.setName("name1");
+ prop1.setValue("val1");
+ fooProperties.add(prop1);
+ PropertyInfo prop2 = new PropertyInfo();
+ prop2.setName("name2");
+ prop2.setValue("val2");
+ fooProperties.add(prop2);
+
+ //BAR
+ Collection<PropertyInfo> barProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo prop3 = new PropertyInfo();
+ prop3.setName("name1");
+ prop3.setValue("val3");
+ barProperties.add(prop3);
+
+ //OTHER
+ Collection<PropertyInfo> otherProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo prop4 = new PropertyInfo();
+ prop4.setName("name1");
+ prop4.setValue("val4");
+ otherProperties.add(prop4);
+
+ ConfigurationModule configModule1 = createConfigurationModule("FOO", fooProperties);
+ ConfigurationModule configModule2 = createConfigurationModule("BAR", barProperties);
+ ConfigurationModule configModule3 = createConfigurationModule("OTHER", otherProperties);
+ Collection<ConfigurationModule> configModules = new ArrayList<ConfigurationModule>();
+ configModules.add(configModule1);
+ configModules.add(configModule2);
+ configModules.add(configModule3);
+
+ ServiceModule service = createServiceModule(info, configModules);
+
+ List<PropertyInfo> properties = service.getModuleInfo().getProperties();
+ assertEquals(3, properties.size());
+
+ Map<String, Map<String, Map<String, String>>> attributes = service.getModuleInfo().getConfigTypeAttributes();
+ assertEquals(2, attributes.size());
+ assertTrue(attributes.containsKey("FOO"));
+ assertTrue(attributes.containsKey("OTHER"));
+ }
+
+ @Test
+ public void testResolve_Configuration__ExcludedTypes__ParentType() throws Exception {
+ // child
+ ServiceInfo info = new ServiceInfo();
+ info.setExcludedConfigTypes(Collections.singleton("BAR"));
+
+ //FOO
+ Collection<PropertyInfo> fooProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo prop1 = new PropertyInfo();
+ prop1.setName("name1");
+ prop1.setValue("val1");
+ fooProperties.add(prop1);
+ PropertyInfo prop2 = new PropertyInfo();
+ prop2.setName("name2");
+ prop2.setValue("val2");
+ fooProperties.add(prop2);
+
+ ConfigurationModule childConfigModule = createConfigurationModule("FOO", fooProperties);
+ Collection<ConfigurationModule> childConfigModules = new ArrayList<ConfigurationModule>();
+ childConfigModules.add(childConfigModule);
+
+ // parent
+ ServiceInfo parentInfo = new ServiceInfo();
+
+ //BAR
+ Collection<PropertyInfo> barProperties = new ArrayList<PropertyInfo>();
+ PropertyInfo prop3 = new PropertyInfo();
+ prop3.setName("name1");
+ prop3.setValue("val3");
+ barProperties.add(prop3);
+
+ ConfigurationModule parentConfigModule = createConfigurationModule("BAR", barProperties);
+ Collection<ConfigurationModule> parentConfigModules = new ArrayList<ConfigurationModule>();
+ parentConfigModules.add(parentConfigModule);
+
+ // create service modules
+ ServiceModule service = createServiceModule(info, childConfigModules);
+ ServiceModule parentService = createServiceModule(parentInfo, parentConfigModules);
+ // resolve child with parent
+ resolveService(service, parentService);
+ // assertions
+ List<PropertyInfo> properties = service.getModuleInfo().getProperties();
+ assertEquals(2, properties.size());
+
+ Map<String, Map<String, Map<String, String>>> attributes = service.getModuleInfo().getConfigTypeAttributes();
+ assertEquals(1, attributes.size());
+ assertTrue(attributes.containsKey("FOO"));
+
+ Map<String, Map<String, Map<String, String>>> parentAttributes = parentService.getModuleInfo().getConfigTypeAttributes();
+ assertEquals(1, parentAttributes.size());
+ assertTrue(parentAttributes.containsKey("BAR"));
+ }
+
+ @Test
+ public void testServiceCheckRegistered() throws Exception {
+ ServiceInfo info = new ServiceInfo();
+ info.setName("service1");
+ info.setCommandScript(createNiceMock(CommandScriptDefinition.class));
+
+ StackContext context = createStackContext(info.getName(), true);
+ ServiceModule service = createServiceModule(info, Collections.<ConfigurationModule>emptySet(), context);
+ service.finalizeModule();
+
+ verify(context);
+ }
+
+ @Test
+ public void testServiceCheckNotRegisteredForDeletedService() throws Exception {
+ ServiceInfo info = new ServiceInfo();
+ info.setName("service1");
+ info.setCommandScript(createNiceMock(CommandScriptDefinition.class));
+ info.setDeleted(true);
+
+ StackContext context = createStackContext(info.getName(), false);
+ ServiceModule service = createServiceModule(info, Collections.<ConfigurationModule>emptySet(), context);
+ service.finalizeModule();
+
+ verify(context);
+ }
+
+ private ServiceModule createServiceModule(ServiceInfo serviceInfo) {
+ String configType = "type1";
+
+ if (serviceInfo.getName() == null) {
+ serviceInfo.setName("service1");
+ }
+
+ StackContext context = createStackContext(serviceInfo.getName(), true);
+ // no config props
+ ConfigurationInfo configInfo = createConfigurationInfo(Collections.<PropertyInfo>emptyList(),
+ Collections.<String, String>emptyMap());
+
+ ConfigurationModule module = createConfigurationModule(configType, configInfo);
+ ConfigurationDirectory configDirectory = createConfigurationDirectory(Collections.singletonList(module));
+ ServiceDirectory serviceDirectory = createServiceDirectory(serviceInfo.getConfigDir(), configDirectory);
+
+ return createServiceModule(context, serviceInfo, serviceDirectory);
+ }
+
+ private ServiceModule createServiceModule(ServiceInfo serviceInfo,
+ Collection<ConfigurationModule> configurations,
+ StackContext context) {
+
+ if (serviceInfo.getName() == null) {
+ serviceInfo.setName("service1");
+ }
+
+ ConfigurationDirectory configDirectory = createConfigurationDirectory(configurations);
+ ServiceDirectory serviceDirectory = createServiceDirectory(serviceInfo.getConfigDir(), configDirectory);
+
+ return createServiceModule(context, serviceInfo, serviceDirectory);
+ }
+
+ private ServiceModule createServiceModule(ServiceInfo serviceInfo, Collection<ConfigurationModule> configurations) {
+ String serviceName = serviceInfo.getName();
+
+ if (serviceInfo.getName() == null) {
+ serviceInfo.setName("service1");
+ }
+
+ return createServiceModule(serviceInfo, configurations, createStackContext(serviceName, true));
+ }
+
+ private ServiceModule createServiceModule(StackContext context, ServiceInfo serviceInfo,
+ ServiceDirectory serviceDirectory) {
+
+ return new ServiceModule(context, serviceInfo, serviceDirectory);
+ }
+
+ private ServiceDirectory createServiceDirectory(String dir, ConfigurationDirectory configDir) {
+
+ ServiceDirectory serviceDirectory = createNiceMock(ServiceDirectory.class);
+
+ expect(serviceDirectory.getConfigurationDirectory(dir)).andReturn(configDir).anyTimes();
+ expect(serviceDirectory.getMetricsFile()).andReturn(new File("testMetricsFile")).anyTimes();
+ expect(serviceDirectory.getAlertsFile()).andReturn(new File("testAlertsFile")).anyTimes();
+ expect(serviceDirectory.getPackageDir()).andReturn("packageDir").anyTimes();
+ replay(serviceDirectory);
+
+ return serviceDirectory;
+ }
+
+ private ConfigurationDirectory createConfigurationDirectory(Collection<ConfigurationModule> modules) {
+ ConfigurationDirectory configDir = createNiceMock(ConfigurationDirectory.class);
+
+ expect(configDir.getConfigurationModules()).andReturn(modules).anyTimes();
+ replay(configDir);
+
+ return configDir;
+ }
+
+ private ConfigurationModule createConfigurationModule(String configType, ConfigurationInfo info) {
+ return new ConfigurationModule(configType, info);
+ }
+
+ private ConfigurationModule createConfigurationModule(String configType, Collection<PropertyInfo> properties) {
+ ConfigurationInfo info = new ConfigurationInfo(properties, Collections.<String, String>emptyMap());
+ return new ConfigurationModule(configType, info);
+ }
+
+ private ConfigurationModule createConfigurationModule(String configType,
+ Collection<PropertyInfo> properties,
+ Map<String, String> attributes) {
+
+ ConfigurationInfo info = new ConfigurationInfo(properties, attributes);
+ return new ConfigurationModule(configType, info);
+ }
+
+ private ConfigurationInfo createConfigurationInfo(Collection<PropertyInfo> properties,
+ Map<String, String> attributes) {
+
+ return new ConfigurationInfo(properties, attributes);
+ }
+
+ private StackContext createStackContext(String serviceName, boolean expectServiceRegistration) {
+ StackContext context = createStrictMock(StackContext.class);
+
+ if (expectServiceRegistration) {
+ context.registerServiceCheck(serviceName);
+ }
+ replay(context);
+
+ return context;
+ }
+
+ private ServiceModule resolveService(ServiceInfo info, ServiceInfo parentInfo) throws AmbariException {
+ ServiceModule service = createServiceModule(info);
+ ServiceModule parentService = createServiceModule(parentInfo);
+
+ resolveService(service, parentService);
+ return service;
+ }
+
+ private void resolveService(ServiceModule service, ServiceModule parent) throws AmbariException {
+ service.resolve(parent, Collections.<String, StackModule>emptyMap());
+ // during runtime this would be called by the Stack module when it's resolve completed
+ service.finalizeModule();
+ parent.finalizeModule();
+ }
+
+ private void assertAttributes(Map<String, Map<String, String>> mergedAttributes, Map<String, String> specifiedAttributes) {
+ assertEquals(1, mergedAttributes.size()); // only supports
+ Map<String, String> supportsAttributes = mergedAttributes.get(ConfigurationInfo.Supports.KEYWORD);
+ assertEquals(ConfigurationInfo.Supports.values().length, supportsAttributes.size());
+ for (Map.Entry<String, String> attribute : supportsAttributes.entrySet()) {
+ String attributeName = attribute.getKey();
+ String attributeValue = attribute.getValue();
+
+ //need to call toUpper() because propertyName is name().toLowerCase()
+ ConfigurationInfo.Supports s = ConfigurationInfo.Supports.valueOf(attributeName.toUpperCase());
+ String specifiedVal = specifiedAttributes.get(s.getXmlAttributeName());
+ if (specifiedVal != null) {
+ assertEquals(specifiedVal, attributeValue);
+ } else {
+ assertEquals(s.getDefaultValue(), attributeValue);
+ }
+ }
+ }
+
+ private void setPrivateField(Object o, String field, Object value) throws Exception{
+ Class<?> c = o.getClass();
+ Field f = c.getDeclaredField(field);
+ f.setAccessible(true);
+ f.set(o, value); }
+}
[05/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
new file mode 100644
index 0000000..a6cbc6a
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerTest.java
@@ -0,0 +1,594 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.metadata.ActionMetadata;
+import org.apache.ambari.server.orm.dao.MetainfoDAO;
+import org.apache.ambari.server.state.ClientConfigFileDefinition;
+import org.apache.ambari.server.state.CommandScriptDefinition;
+import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.stack.OsFamily;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * StackManager unit tests.
+ */
+public class StackManagerTest {
+
+ private static StackManager stackManager;
+ private static MetainfoDAO dao;
+ private static ActionMetadata actionMetadata;
+ private static OsFamily osFamily;
+
+ @BeforeClass
+ public static void initStack() throws Exception{
+ stackManager = createTestStackManager();
+ }
+
+ public static StackManager createTestStackManager() throws Exception {
+ return createTestStackManager("./src/test/resources/stacks/");
+ }
+
+ public static StackManager createTestStackManager(String stackRoot) throws Exception {
+ try {
+ //todo: dao , actionMetaData expectations
+ dao = createNiceMock(MetainfoDAO.class);
+ actionMetadata = createNiceMock(ActionMetadata.class);
+ Configuration config = createNiceMock(Configuration.class);
+ expect(config.getSharedResourcesDirPath()).andReturn("./src/test/resources").anyTimes();
+ replay(config);
+ osFamily = new OsFamily(config);
+
+ replay(dao, actionMetadata);
+ return new StackManager(new File(stackRoot), new StackContext(dao, actionMetadata, osFamily));
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
+
+ @Test
+ public void testGetStacks_count() throws Exception {
+ Collection<StackInfo> stacks = stackManager.getStacks();
+ assertEquals(16, stacks.size());
+ }
+
+ @Test
+ public void testGetStack_name__count() {
+ Collection<StackInfo> stacks = stackManager.getStacks("HDP");
+ assertEquals(12, stacks.size());
+
+ stacks = stackManager.getStacks("OTHER");
+ assertEquals(2, stacks.size());
+ }
+
+ @Test
+ public void testGetStack_basic() {
+ StackInfo stack = stackManager.getStack("HDP", "0.1");
+ assertNotNull(stack);
+ assertEquals("HDP", stack.getName());
+ assertEquals("0.1", stack.getVersion());
+
+
+ Collection<ServiceInfo> services = stack.getServices();
+ assertEquals(3, services.size());
+
+ Map<String, ServiceInfo> serviceMap = new HashMap<String, ServiceInfo>();
+ for (ServiceInfo service : services) {
+ serviceMap.put(service.getName(), service);
+ }
+ ServiceInfo hdfsService = serviceMap.get("HDFS");
+ assertNotNull(hdfsService);
+ List<ComponentInfo> components = hdfsService.getComponents();
+ assertEquals(6, components.size());
+ List<PropertyInfo> properties = hdfsService.getProperties();
+ assertEquals(62, properties.size());
+
+ // test a couple of the properties for filename
+ boolean hdfsPropFound = false;
+ boolean hbasePropFound = false;
+ for (PropertyInfo p : properties) {
+ if (p.getName().equals("hbase.regionserver.msginterval")) {
+ assertEquals("hbase-site.xml", p.getFilename());
+ hbasePropFound = true;
+ } else if (p.getName().equals("dfs.name.dir")) {
+ assertEquals("hdfs-site.xml", p.getFilename());
+ hdfsPropFound = true;
+ }
+ }
+ assertTrue(hbasePropFound);
+ assertTrue(hdfsPropFound);
+
+ ServiceInfo mrService = serviceMap.get("MAPREDUCE");
+ assertNotNull(mrService);
+ components = mrService.getComponents();
+ assertEquals(3, components.size());
+
+ ServiceInfo pigService = serviceMap.get("PIG");
+ assertNotNull(pigService);
+ assertEquals("PIG", pigService.getName());
+ assertEquals("1.0", pigService.getVersion());
+ assertNull(pigService.getParent());
+ assertEquals("This is comment for PIG service", pigService.getComment());
+ components = pigService.getComponents();
+ assertEquals(1, components.size());
+ CommandScriptDefinition commandScript = pigService.getCommandScript();
+ assertEquals("scripts/service_check.py", commandScript.getScript());
+ assertEquals(CommandScriptDefinition.Type.PYTHON, commandScript.getScriptType());
+ assertEquals(300, commandScript.getTimeout());
+ List<String> configDependencies = pigService.getConfigDependencies();
+ assertEquals(1, configDependencies.size());
+ assertEquals("global", configDependencies.get(0));
+ assertEquals("global", pigService.getConfigDependenciesWithComponents().get(0));
+ ComponentInfo client = pigService.getClientComponent();
+ assertNotNull(client);
+ assertEquals("PIG", client.getName());
+ assertEquals("0+", client.getCardinality());
+ assertEquals("CLIENT", client.getCategory());
+ assertEquals("configuration", pigService.getConfigDir());
+ assertEquals("2.0", pigService.getSchemaVersion());
+ Map<String, ServiceOsSpecific> osInfoMap = pigService.getOsSpecifics();
+ assertEquals(1, osInfoMap.size());
+ ServiceOsSpecific osSpecific = osInfoMap.get("centos6");
+ assertNotNull(osSpecific);
+ assertEquals("centos6", osSpecific.getOsFamily());
+ assertNull(osSpecific.getRepo());
+ List<ServiceOsSpecific.Package> packages = osSpecific.getPackages();
+ assertEquals(1, packages.size());
+ ServiceOsSpecific.Package pkg = packages.get(0);
+ assertEquals("pig", pkg.getName());
+ }
+
+ @Test
+ public void testStackVersionInheritance_includeAllServices() {
+ StackInfo stack = stackManager.getStack("HDP", "2.1.1");
+ assertNotNull(stack);
+ assertEquals("HDP", stack.getName());
+ assertEquals("2.1.1", stack.getVersion());
+ Collection<ServiceInfo> services = stack.getServices();
+
+ //should include all stacks in hierarchy
+ assertEquals(14, services.size());
+ HashSet<String> expectedServices = new HashSet<String>();
+ expectedServices.add("GANGLIA");
+ expectedServices.add("HBASE");
+ expectedServices.add("HCATALOG");
+ expectedServices.add("HDFS");
+ expectedServices.add("HIVE");
+ expectedServices.add("MAPREDUCE2");
+ expectedServices.add("NAGIOS");
+ expectedServices.add("OOZIE");
+ expectedServices.add("PIG");
+ expectedServices.add("SQOOP");
+ expectedServices.add("YARN");
+ expectedServices.add("ZOOKEEPER");
+ expectedServices.add("STORM");
+ expectedServices.add("FLUME");
+
+ ServiceInfo pigService = null;
+ for (ServiceInfo service : services) {
+ if (service.getName().equals("PIG")) {
+ pigService = service;
+ }
+ assertTrue(expectedServices.remove(service.getName()));
+ }
+ assertTrue(expectedServices.isEmpty());
+
+ // extended values
+ assertNotNull(pigService);
+ assertEquals("0.12.1.2.1.1", pigService.getVersion());
+ assertEquals("Scripting platform for analyzing large datasets (Extended)", pigService.getComment());
+ //base value
+ ServiceInfo basePigService = stackManager.getStack("HDP", "2.0.5").getService("PIG");
+ assertEquals("0.11.1.2.0.5.0", basePigService.getVersion());
+ assertEquals(1, basePigService.getComponents().size());
+ // new component added in extended version
+ assertEquals(2, pigService.getComponents().size());
+ // no properties in base service
+ assertEquals(0, basePigService.getProperties().size());
+ assertEquals(1, pigService.getProperties().size());
+ assertEquals("content", pigService.getProperties().get(0).getName());
+ }
+
+ @Test
+ public void testGetStack_explicitServiceExtension() {
+ StackInfo stack = stackManager.getStack("OTHER", "1.0");
+ assertNotNull(stack);
+ assertEquals("OTHER", stack.getName());
+ assertEquals("1.0", stack.getVersion());
+ Collection<ServiceInfo> services = stack.getServices();
+
+ assertEquals(3, services.size());
+
+ // hdfs service
+ assertEquals(6, stack.getService("HDFS").getComponents().size());
+
+ // Extended Sqoop service via explicit service extension
+ ServiceInfo sqoopService = stack.getService("SQOOP2");
+ assertNotNull(sqoopService);
+
+ assertEquals("Extended SQOOP", sqoopService.getComment());
+ assertEquals("Extended Version", sqoopService.getVersion());
+ assertNull(sqoopService.getServicePackageFolder());
+
+ Collection<ComponentInfo> components = sqoopService.getComponents();
+ assertEquals(1, components.size());
+ ComponentInfo component = components.iterator().next();
+ assertEquals("SQOOP", component.getName());
+
+ // Get the base sqoop service
+ StackInfo baseStack = stackManager.getStack("HDP", "2.1.1");
+ ServiceInfo baseSqoopService = baseStack.getService("SQOOP");
+
+ // values from base service
+ assertEquals(baseSqoopService.isDeleted(), sqoopService.isDeleted());
+ assertEquals(baseSqoopService.getAlertsFile(),sqoopService.getAlertsFile());
+ assertEquals(baseSqoopService.getClientComponent(), sqoopService.getClientComponent());
+ assertEquals(baseSqoopService.getCommandScript(), sqoopService.getCommandScript());
+ assertEquals(baseSqoopService.getConfigDependencies(), sqoopService.getConfigDependencies());
+ assertEquals(baseSqoopService.getConfigDir(), sqoopService.getConfigDir());
+ assertEquals(baseSqoopService.getConfigDependenciesWithComponents(), sqoopService.getConfigDependenciesWithComponents());
+ assertEquals(baseSqoopService.getConfigTypeAttributes(), sqoopService.getConfigTypeAttributes());
+ assertEquals(baseSqoopService.getCustomCommands(), sqoopService.getCustomCommands());
+ assertEquals(baseSqoopService.getExcludedConfigTypes(), sqoopService.getExcludedConfigTypes());
+ assertEquals(baseSqoopService.getProperties(), sqoopService.getProperties());
+ assertEquals(baseSqoopService.getMetrics(), sqoopService.getMetrics());
+ assertNull(baseSqoopService.getMetricsFile());
+ assertNull(sqoopService.getMetricsFile());
+ assertEquals(baseSqoopService.getOsSpecifics(), sqoopService.getOsSpecifics());
+ assertEquals(baseSqoopService.getRequiredServices(), sqoopService.getRequiredServices());
+ assertEquals(baseSqoopService.getSchemaVersion(), sqoopService.getSchemaVersion());
+
+ // extended Storm service via explicit service extension
+ ServiceInfo stormService = stack.getService("STORM");
+ assertNotNull(stormService);
+ assertEquals("STORM", stormService.getName());
+
+ // base storm service
+ ServiceInfo baseStormService = baseStack.getService("STORM");
+
+ // overridden value
+ assertEquals("Apache Hadoop Stream processing framework (Extended)", stormService.getComment());
+ assertEquals("New version", stormService.getVersion());
+ assertEquals("OTHER/1.0/services/STORM/package", stormService.getServicePackageFolder());
+ // compare components
+ List<ComponentInfo> stormServiceComponents = stormService.getComponents();
+ List<ComponentInfo> baseStormServiceComponents = baseStormService.getComponents();
+ assertEquals(new HashSet<ComponentInfo>(stormServiceComponents), new HashSet<ComponentInfo>(baseStormServiceComponents));
+ // values from base service
+ assertEquals(baseStormService.isDeleted(), stormService.isDeleted());
+ //todo: specify alerts file in stack
+ assertEquals(baseStormService.getAlertsFile(),stormService.getAlertsFile());
+
+ assertEquals(baseStormService.getClientComponent(), stormService.getClientComponent());
+ assertEquals(baseStormService.getCommandScript(), stormService.getCommandScript());
+ assertEquals(baseStormService.getConfigDependencies(), stormService.getConfigDependencies());
+ assertEquals(baseStormService.getConfigDir(), stormService.getConfigDir());
+ assertEquals(baseStormService.getConfigDependenciesWithComponents(), stormService.getConfigDependenciesWithComponents());
+ assertEquals(baseStormService.getConfigTypeAttributes(), stormService.getConfigTypeAttributes());
+ assertEquals(baseStormService.getCustomCommands(), stormService.getCustomCommands());
+ assertEquals(baseStormService.getExcludedConfigTypes(), stormService.getExcludedConfigTypes());
+ assertEquals(baseStormService.getProperties(), stormService.getProperties());
+ assertEquals(baseStormService.getMetrics(), stormService.getMetrics());
+ assertNotNull(baseStormService.getMetricsFile());
+ assertNotNull(stormService.getMetricsFile());
+ assertFalse(baseStormService.getMetricsFile().equals(stormService.getMetricsFile()));
+ assertEquals(baseStormService.getOsSpecifics(), stormService.getOsSpecifics());
+ assertEquals(baseStormService.getRequiredServices(), stormService.getRequiredServices());
+ assertEquals(baseStormService.getSchemaVersion(), stormService.getSchemaVersion());
+ }
+
+ @Test
+ public void testGetStack_versionInheritance__explicitServiceExtension() {
+ StackInfo baseStack = stackManager.getStack("OTHER", "1.0");
+ StackInfo stack = stackManager.getStack("OTHER", "2.0");
+
+ assertEquals(4, stack.getServices().size());
+
+ ServiceInfo service = stack.getService("SQOOP2");
+ ServiceInfo baseSqoopService = baseStack.getService("SQOOP2");
+
+ assertEquals("SQOOP2", service.getName());
+ assertEquals("Inherited from parent", service.getComment());
+ assertEquals("Extended from parent version", service.getVersion());
+ assertNull(service.getServicePackageFolder());
+ // compare components
+ List<ComponentInfo> serviceComponents = service.getComponents();
+ List<ComponentInfo> baseStormServiceCompoents = baseSqoopService.getComponents();
+ assertEquals(serviceComponents, baseStormServiceCompoents);
+ // values from base service
+ assertEquals(baseSqoopService.isDeleted(), service.isDeleted());
+ assertEquals(baseSqoopService.getAlertsFile(),service.getAlertsFile());
+ assertEquals(baseSqoopService.getClientComponent(), service.getClientComponent());
+ assertEquals(baseSqoopService.getCommandScript(), service.getCommandScript());
+ assertEquals(baseSqoopService.getConfigDependencies(), service.getConfigDependencies());
+ assertEquals(baseSqoopService.getConfigDir(), service.getConfigDir());
+ assertEquals(baseSqoopService.getConfigDependenciesWithComponents(), service.getConfigDependenciesWithComponents());
+ assertEquals(baseSqoopService.getConfigTypeAttributes(), service.getConfigTypeAttributes());
+ assertEquals(baseSqoopService.getCustomCommands(), service.getCustomCommands());
+ assertEquals(baseSqoopService.getExcludedConfigTypes(), service.getExcludedConfigTypes());
+ assertEquals(baseSqoopService.getProperties(), service.getProperties());
+ assertEquals(baseSqoopService.getMetrics(), service.getMetrics());
+ assertNull(baseSqoopService.getMetricsFile());
+ assertNull(service.getMetricsFile());
+ assertEquals(baseSqoopService.getOsSpecifics(), service.getOsSpecifics());
+ assertEquals(baseSqoopService.getRequiredServices(), service.getRequiredServices());
+ assertEquals(baseSqoopService.getSchemaVersion(), service.getSchemaVersion());
+ }
+
+ @Test
+ public void testConfigDependenciesInheritance() throws Exception{
+ StackInfo stack = stackManager.getStack("HDP", "2.0.6");
+ ServiceInfo hdfsService = stack.getService("HDFS");
+ assertEquals(5, hdfsService.getConfigDependencies().size());
+ assertEquals(4, hdfsService.getConfigTypeAttributes().size());
+ assertTrue(hdfsService.getConfigDependencies().contains("core-site"));
+ assertTrue(hdfsService.getConfigDependencies().contains("global"));
+ assertTrue(hdfsService.getConfigDependencies().contains("hdfs-site"));
+ assertTrue(hdfsService.getConfigDependencies().contains("hdfs-log4j"));
+ assertTrue(hdfsService.getConfigDependencies().contains("hadoop-policy"));
+ assertTrue(Boolean.valueOf(hdfsService.getConfigTypeAttributes().get("core-site").get("supports").get("final")));
+ assertFalse(Boolean.valueOf(hdfsService.getConfigTypeAttributes().get("global").get("supports").get("final")));
+ }
+
+ @Test
+ public void testClientConfigFilesInheritance() throws Exception{
+ StackInfo stack = stackManager.getStack("HDP", "2.0.6");
+ ServiceInfo zkService = stack.getService("ZOOKEEPER");
+ List<ComponentInfo> components = zkService.getComponents();
+ assertTrue(components.size() == 2);
+ ComponentInfo componentInfo = components.get(1);
+ List<ClientConfigFileDefinition> clientConfigs = componentInfo.getClientConfigFiles();
+ assertEquals(2,clientConfigs.size());
+ assertEquals("zookeeper-env",clientConfigs.get(0).getDictionaryName());
+ assertEquals("zookeeper-env.sh",clientConfigs.get(0).getFileName());
+ assertEquals("env",clientConfigs.get(0).getType());
+ assertEquals("zookeeper-log4j",clientConfigs.get(1).getDictionaryName());
+ assertEquals("log4j.properties",clientConfigs.get(1).getFileName());
+ assertEquals("env", clientConfigs.get(1).getType());
+ }
+
+ @Test
+ public void testMonitoringServicePropertyInheritance() throws Exception{
+ StackInfo stack = stackManager.getStack("HDP", "2.0.7");
+ Collection<ServiceInfo> allServices = stack.getServices();
+ assertEquals(13, allServices.size());
+ for (ServiceInfo serviceInfo : allServices) {
+ if (serviceInfo.getName().equals("NAGIOS")) {
+ assertTrue(serviceInfo.isMonitoringService());
+ } else {
+ assertNull(serviceInfo.isMonitoringService());
+ }
+ }
+ }
+
+ @Test
+ public void testServiceDeletion() {
+ StackInfo stack = stackManager.getStack("HDP", "2.0.6");
+ Collection<ServiceInfo> allServices = stack.getServices();
+
+ assertEquals(12, allServices.size());
+ HashSet<String> expectedServices = new HashSet<String>();
+ expectedServices.add("GANGLIA");
+ expectedServices.add("HBASE");
+ expectedServices.add("HCATALOG");
+ expectedServices.add("HDFS");
+ expectedServices.add("HIVE");
+ expectedServices.add("MAPREDUCE2");
+ expectedServices.add("NAGIOS");
+ expectedServices.add("OOZIE");
+ expectedServices.add("PIG");
+ expectedServices.add("ZOOKEEPER");
+ expectedServices.add("FLUME");
+ expectedServices.add("YARN");
+
+ for (ServiceInfo service : allServices) {
+ assertTrue(expectedServices.remove(service.getName()));
+ }
+ assertTrue(expectedServices.isEmpty());
+ }
+
+ @Test
+ public void testComponentDeletion() {
+ StackInfo stack = stackManager.getStack("HDP", "2.0.6");
+ ServiceInfo yarnService = stack.getService("YARN");
+ assertNull(yarnService.getComponentByName("YARN_CLIENT"));
+
+ stack = stackManager.getStack("HDP", "2.0.7");
+ yarnService = stack.getService("YARN");
+ assertNotNull(yarnService.getComponentByName("YARN_CLIENT"));
+ }
+
+ @Test
+ public void testPopulateConfigTypes() throws Exception {
+ StackInfo stack = stackManager.getStack("HDP", "2.0.7");
+ ServiceInfo hdfsService = stack.getService("HDFS");
+
+ Map<String, Map<String, Map<String, String>>> configTypes = hdfsService.getConfigTypeAttributes();
+ assertEquals(4, configTypes.size());
+
+ Map<String, Map<String, String>> configType = configTypes.get("global");
+ assertEquals(1, configType.size());
+ Map<String, String> supportsMap = configType.get("supports");
+ assertEquals(3, supportsMap.size());
+ assertEquals("true", supportsMap.get("final"));
+ assertEquals("false", supportsMap.get("adding_forbidden"));
+ assertEquals("false", supportsMap.get("do_not_extend"));
+
+ configType = configTypes.get("hdfs-site");
+ assertEquals(1, configType.size());
+ supportsMap = configType.get("supports");
+ assertEquals(3, supportsMap.size());
+ assertEquals("false", supportsMap.get("final"));
+ assertEquals("false", supportsMap.get("adding_forbidden"));
+ assertEquals("false", supportsMap.get("do_not_extend"));
+
+ configType = configTypes.get("core-site");
+ assertEquals(1, configType.size());
+ supportsMap = configType.get("supports");
+ assertEquals(3, supportsMap.size());
+ assertEquals("false", supportsMap.get("final"));
+ assertEquals("false", supportsMap.get("adding_forbidden"));
+ assertEquals("false", supportsMap.get("do_not_extend"));
+
+ configType = configTypes.get("hadoop-policy");
+ assertEquals(1, configType.size());
+ supportsMap = configType.get("supports");
+ assertEquals(3, supportsMap.size());
+ assertEquals("false", supportsMap.get("final"));
+ assertEquals("false", supportsMap.get("adding_forbidden"));
+ assertEquals("false", supportsMap.get("do_not_extend"));
+
+ ServiceInfo yarnService = stack.getService("YARN");
+ configTypes = yarnService.getConfigTypeAttributes();
+ assertEquals(4, configTypes.size());
+ assertTrue(configTypes.containsKey("yarn-site"));
+ assertTrue(configTypes.containsKey("core-site"));
+ assertTrue(configTypes.containsKey("global"));
+ assertTrue(configTypes.containsKey("capacity-scheduler"));
+
+ configType = configTypes.get("yarn-site");
+ supportsMap = configType.get("supports");
+ assertEquals(3, supportsMap.size());
+ assertEquals("false", supportsMap.get("final"));
+ assertEquals("true", supportsMap.get("adding_forbidden"));
+ assertEquals("true", supportsMap.get("do_not_extend"));
+
+ ServiceInfo mrService = stack.getService("MAPREDUCE2");
+ configTypes = mrService.getConfigTypeAttributes();
+ assertEquals(3, configTypes.size());
+ assertTrue(configTypes.containsKey("mapred-site"));
+ assertTrue(configTypes.containsKey("core-site"));
+ assertTrue(configTypes.containsKey("mapred-queue-acls"));
+ }
+
+ @Test
+ public void testCycleDetection() throws Exception {
+ ActionMetadata actionMetadata = createNiceMock(ActionMetadata.class);
+ OsFamily osFamily = createNiceMock(OsFamily.class);
+ replay(actionMetadata);
+ try {
+ new StackManager(new File("./src/test/resources/stacks_with_cycle/"),
+ new StackContext(null, actionMetadata, osFamily));
+ fail("Expected exception due to cyclic stack");
+ } catch (AmbariException e) {
+ // expected
+ assertEquals("Cycle detected while parsing stack definition", e.getMessage());
+ }
+
+ try {
+ new StackManager(new File("./src/test/resources/stacks_with_cycle2/"),
+ new StackContext(null, actionMetadata, osFamily));
+ fail("Expected exception due to cyclic stack");
+ } catch (AmbariException e) {
+ // expected
+ assertEquals("Cycle detected while parsing stack definition", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testExcludedConfigTypes() {
+ StackInfo stack = stackManager.getStack("HDP", "2.0.8");
+ ServiceInfo service = stack.getService("HBASE");
+ assertFalse(service.hasConfigType("global"));
+ Map<String, Map<String, Map<String, String>>> configTypes = service.getConfigTypeAttributes();
+ assertEquals(2, configTypes.size());
+ assertTrue(configTypes.containsKey("hbase-site"));
+ assertTrue(configTypes.containsKey("hbase-policy"));
+
+ // test version that inherits the service via version inheritance
+ stack = stackManager.getStack("HDP", "2.1.1");
+ service = stack.getService("HBASE");
+ assertFalse(service.hasConfigType("global"));
+ configTypes = service.getConfigTypeAttributes();
+ assertEquals(2, configTypes.size());
+ assertTrue(configTypes.containsKey("hbase-site"));
+ assertTrue(configTypes.containsKey("hbase-policy"));
+ assertFalse(configTypes.containsKey("global"));
+
+ // test version that inherits the service explicit service extension
+ // the new version also excludes hbase-policy
+ stack = stackManager.getStack("OTHER", "2.0");
+ service = stack.getService("HBASE");
+ assertFalse(service.hasConfigType("global"));
+ configTypes = service.getConfigTypeAttributes();
+ assertEquals(1, configTypes.size());
+ assertTrue(configTypes.containsKey("hbase-site"));
+ }
+
+ @Test
+ public void testHDFSServiceContainsMetricsFile() throws Exception {
+ StackInfo stack = stackManager.getStack("HDP", "2.0.6");
+ ServiceInfo hdfsService = stack.getService("HDFS");
+
+ assertEquals("HDFS", hdfsService.getName());
+ assertNotNull(hdfsService.getMetricsFile());
+ }
+
+ /**
+ * This test ensures the service status check is added into the action metadata when
+ * the stack has no parent and is the only stack in the stack family
+ */
+ @Test
+ public void testGetServiceInfoFromSingleStack() throws Exception {
+ dao = createNiceMock(MetainfoDAO.class);
+ actionMetadata = createNiceMock(ActionMetadata.class);
+ osFamily = createNiceMock(OsFamily.class);
+
+ // ensure that service check is added for HDFS
+ actionMetadata.addServiceCheckAction("HDFS");
+ replay(dao, actionMetadata, osFamily);
+ StackManager stackManager = new StackManager(
+ new File("./src/test/resources/single_stack".replace("/", File.separator)),
+ new StackContext(dao, actionMetadata, osFamily));
+
+ Collection<StackInfo> stacks = stackManager.getStacks();
+ assertEquals(1, stacks.size());
+ assertNotNull(stacks.iterator().next().getService("HDFS"));
+
+ verify(dao, actionMetadata, osFamily);
+ }
+
+ //todo: component override assertions
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
index c4eff8c..e8a2a8d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
@@ -17,46 +17,71 @@
*/
package org.apache.ambari.server.state;
-import org.apache.ambari.server.api.util.StackExtensionHelper;
-import org.apache.ambari.server.state.stack.ConfigurationXml;
+import org.apache.ambari.server.state.stack.StackMetainfoXml;
import org.junit.Test;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
-import java.io.File;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.Unmarshaller;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
import static org.junit.Assert.*;
public class PropertyInfoTest {
@Test
- public void testGetAttributesMap() throws Exception {
- Map<String, String> attributes;
- File configFile = new File("./src/test/resources/stacks/HDP/2.0.8/services/HDFS/configuration/hdfs-site.xml");
- ConfigurationXml configuration = StackExtensionHelper.unmarshal(ConfigurationXml.class, configFile);
- List<PropertyInfo> properties = configuration.getProperties();
- PropertyInfo dfsNameDir = properties.get(0);
- assertNotNull(dfsNameDir);
- assertEquals("dfs.name.dir", dfsNameDir.getName());
- attributes = dfsNameDir.getAttributesMap();
- assertEquals(1, attributes.size());
- assertTrue(attributes.containsKey("final"));
- assertEquals("true", attributes.get("final"));
-
- PropertyInfo dfsSupportAppend = properties.get(1);
- assertNotNull(dfsSupportAppend);
- assertEquals("dfs.support.append", dfsSupportAppend.getName());
- attributes = dfsSupportAppend.getAttributesMap();
+ public void testProperty() {
+ PropertyInfo property = new PropertyInfo();
+ property.setName("name");
+ property.setValue("value");
+ property.setDescription("desc");
+ property.setFilename("filename");
+
+ assertEquals("name", property.getName());
+ assertEquals("value", property.getValue());
+ assertEquals("desc", property.getDescription());
+ assertEquals("filename", property.getFilename());
+ }
+
+ @Test
+ public void testAttributes() throws Exception {
+ PropertyInfo property = new PropertyInfo();
+
+ List<Element> elements = new ArrayList<Element>();
+ Element e1 = createNiceMock(Element.class);
+ Element e2 = createNiceMock(Element.class);
+ Node n1 = createNiceMock(Node.class);
+ Node n2 = createNiceMock(Node.class);
+
+ elements.add(e1);
+ elements.add(e2);
+
+ // set mock expectations
+ expect(e1.getTagName()).andReturn("foo").anyTimes();
+ expect(e1.getFirstChild()).andReturn(n1).anyTimes();
+ expect(n1.getNodeValue()).andReturn("value1").anyTimes();
+
+ expect(e2.getTagName()).andReturn("bar").anyTimes();
+ expect(e2.getFirstChild()).andReturn(n2).anyTimes();
+ expect(n2.getNodeValue()).andReturn("value2").anyTimes();
+
+ replay(e1, e2, n1, n2);
+
+ // set attributes
+ Field f = property.getClass().getDeclaredField("propertyAttributes");
+ f.setAccessible(true);
+ f.set(property, elements);
+
+ Map<String, String> attributes = property.getAttributesMap();
assertEquals(2, attributes.size());
- assertTrue(attributes.containsKey("final"));
- assertEquals("true", attributes.get("final"));
- assertTrue(attributes.containsKey("deletable"));
- assertEquals("false", attributes.get("deletable"));
-
- PropertyInfo dfsWebhdfsEnabled = properties.get(2);
- assertNotNull(dfsWebhdfsEnabled);
- assertEquals("dfs.webhdfs.enabled", dfsWebhdfsEnabled.getName());
- attributes = dfsWebhdfsEnabled.getAttributesMap();
- assertEquals(0, attributes.size());
+ assertEquals("value1", attributes.get("foo"));
+ assertEquals("value2", attributes.get("bar"));
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/bad-stacks/HDP/0.1/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/bad-stacks/HDP/0.1/repos/repoinfo.xml b/ambari-server/src/test/resources/bad-stacks/HDP/0.1/repos/repoinfo.xml
index 9ea91b8..6948ae7 100644
--- a/ambari-server/src/test/resources/bad-stacks/HDP/0.1/repos/repoinfo.xml
+++ b/ambari-server/src/test/resources/bad-stacks/HDP/0.1/repos/repoinfo.xml
@@ -16,42 +16,11 @@
limitations under the License.
-->
<reposinfo>
- <os type="centos6, redhat6">
+ <os family="redhat6">
<repo>
- <baseurl>http://public-repo-1.hortonworks.com/HDP-1.1.1.16/repos/centos6</baseurl>
- <repoid>HDP-1.1.1.16</repoid>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
<reponame>HDP</reponame>
</repo>
- <repo>
- <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.15/repos/centos6</baseurl>
- <repoid>HDP-UTILS-1.1.0.15</repoid>
- <reponame>HDP-UTILS</reponame>
- <mirrorslist></mirrorslist>
- </repo>
- <repo>
- <baseurl></baseurl>
- <repoid>epel</repoid>
- <reponame>epel</reponame>
- <mirrorslist><![CDATA[https://mirrors.fedoraproject.org/metalink?repo=epel-6&arch=$basearch]]></mirrorslist>
- </repo>
- </os>
- <os type="centos5">
- <repo>
- <baseurl>http://public-repo-1.hortonworks.com/HDP-1.1.1.16/repos/centos5</baseurl>
- <repoid>HDP-1.1.1.16</repoid>
- <reponame>HDP</reponame>
- </repo>
- <repo>
- <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.15/repos/centos5</baseurl>
- <repoid>HDP-UTILS-1.1.0.15</repoid>
- <reponame>HDP-UTILS</reponame>
- <mirrorslist></mirrorslist>
- </repo>
- <repo>
- <baseurl></baseurl>
- <repoid>epel</repoid>
- <reponame>epel</reponame>
- <mirrorslist><![CDATA[https://mirrors.fedoraproject.org/metalink?repo=epel-5&arch=$basearch]]></mirrorslist>
- </repo>
</os>
</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
index 3bfd4f7..35a4d05 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HIVE/metainfo.xml
@@ -24,9 +24,7 @@
<displayName>Hive</displayName>
<comment>Data warehouse system for ad-hoc queries & analysis of large datasets and table & storage management service</comment>
<version>0.11.0.2.0.5.0</version>
-
<components>
-
<component>
<name>HIVE_METASTORE</name>
<displayName>Hive Metastore</displayName>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/YARN/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/YARN/metainfo.xml
index 08f9949..beee6a0 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/YARN/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/YARN/metainfo.xml
@@ -161,6 +161,10 @@
<config-type>mapred-site</config-type>
<config-type>mapred-queue-acls</config-type>
</configuration-dependencies>
+
+ <excluded-config-types>
+ <config-type>global</config-type>
+ </excluded-config-types>
</service>
</services>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/configuration/hbase-site.xml
new file mode 100644
index 0000000..4270410
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/configuration/hbase-site.xml
@@ -0,0 +1,356 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+ <property>
+ <name>hbase.rootdir</name>
+ <value></value>
+ <description>The directory shared by region servers and into
+ which HBase persists. The URL should be 'fully-qualified'
+ to include the filesystem scheme. For example, to specify the
+ HDFS directory '/hbase' where the HDFS instance's namenode is
+ running at namenode.example.org on port 9000, set this value to:
+ hdfs://namenode.example.org:9000/hbase. By default HBase writes
+ into /tmp. Change this configuration else all data will be lost
+ on machine restart.
+ </description>
+ </property>
+ <property>
+ <name>hbase.cluster.distributed</name>
+ <value>true</value>
+ <description>The mode the cluster will be in. Possible values are
+ false for standalone mode and true for distributed mode. If
+ false, startup will run all HBase and ZooKeeper daemons together
+ in the one JVM.
+ </description>
+ </property>
+ <property>
+ <name>hbase.tmp.dir</name>
+ <value></value>
+ <description>Temporary directory on the local filesystem.
+ Change this setting to point to a location more permanent
+ than '/tmp' (The '/tmp' directory is often cleared on
+ machine restart).
+ </description>
+ </property>
+ <property>
+ <name>hbase.master.info.bindAddress</name>
+ <value></value>
+ <description>The bind address for the HBase Master web UI
+ </description>
+ </property>
+ <property>
+ <name>hbase.master.info.port</name>
+ <value></value>
+ <description>The port for the HBase Master web UI.</description>
+ </property>
+ <property>
+ <name>hbase.regionserver.info.port</name>
+ <value></value>
+ <description>The port for the HBase RegionServer web UI.</description>
+ </property>
+ <property>
+ <name>hbase.regionserver.global.memstore.upperLimit</name>
+ <value></value>
+ <description>Maximum size of all memstores in a region server before new
+ updates are blocked and flushes are forced. Defaults to 40% of heap
+ </description>
+ </property>
+ <property>
+ <name>hbase.regionserver.handler.count</name>
+ <value></value>
+ <description>Count of RPC Listener instances spun up on RegionServers.
+ Same property is used by the Master for count of master handlers.
+ Default is 10.
+ </description>
+ </property>
+ <property>
+ <name>hbase.hregion.majorcompaction</name>
+ <value></value>
+ <description>The time (in miliseconds) between 'major' compactions of all
+ HStoreFiles in a region. Default: 1 day.
+ Set to 0 to disable automated major compactions.
+ </description>
+ </property>
+
+ <property>
+ <name>hbase.regionserver.global.memstore.lowerLimit</name>
+ <value></value>
+ <description>When memstores are being forced to flush to make room in
+ memory, keep flushing until we hit this mark. Defaults to 35% of heap.
+ This value equal to hbase.regionserver.global.memstore.upperLimit causes
+ the minimum possible flushing to occur when updates are blocked due to
+ memstore limiting.
+ </description>
+ </property>
+ <property>
+ <name>hbase.hregion.memstore.block.multiplier</name>
+ <value></value>
+ <description>Block updates if memstore has hbase.hregion.memstore.block.multiplier
+ time hbase.hregion.flush.size bytes. Useful preventing
+ runaway memstore during spikes in update traffic. Without an
+ upper-bound, memstore fills such that when it flushes the
+ resultant flush files take a long time to compact or split, or
+ worse, we OOME
+ </description>
+ </property>
+ <property>
+ <name>hbase.hregion.memstore.flush.size</name>
+ <value></value>
+ <description>
+ Memstore will be flushed to disk if size of the memstore
+ exceeds this number of bytes. Value is checked by a thread that runs
+ every hbase.server.thread.wakefrequency.
+ </description>
+ </property>
+ <property>
+ <name>hbase.hregion.memstore.mslab.enabled</name>
+ <value></value>
+ <description>
+ Enables the MemStore-Local Allocation Buffer,
+ a feature which works to prevent heap fragmentation under
+ heavy write loads. This can reduce the frequency of stop-the-world
+ GC pauses on large heaps.
+ </description>
+ </property>
+ <property>
+ <name>hbase.hregion.max.filesize</name>
+ <value></value>
+ <description>
+ Maximum HStoreFile size. If any one of a column families' HStoreFiles has
+ grown to exceed this value, the hosting HRegion is split in two.
+ Default: 1G.
+ </description>
+ </property>
+ <property>
+ <name>hbase.client.scanner.caching</name>
+ <value></value>
+ <description>Number of rows that will be fetched when calling next
+ on a scanner if it is not served from (local, client) memory. Higher
+ caching values will enable faster scanners but will eat up more memory
+ and some calls of next may take longer and longer times when the cache is empty.
+ Do not set this value such that the time between invocations is greater
+ than the scanner timeout; i.e. hbase.regionserver.lease.period
+ </description>
+ </property>
+ <property>
+ <name>zookeeper.session.timeout</name>
+ <value>30000</value>
+ <description>ZooKeeper session timeout.
+ HBase passes this to the zk quorum as suggested maximum time for a
+ session (This setting becomes zookeeper's 'maxSessionTimeout'). See
+ http://hadoop.apache.org/zookeeper/docs/current/zookeeperProgrammers.html#ch_zkSessions
+ "The client sends a requested timeout, the server responds with the
+ timeout that it can give the client. " In milliseconds.
+ </description>
+ </property>
+ <property>
+ <name>hbase.client.keyvalue.maxsize</name>
+ <value></value>
+ <description>Specifies the combined maximum allowed size of a KeyValue
+ instance. This is to set an upper boundary for a single entry saved in a
+ storage file. Since they cannot be split it helps avoiding that a region
+ cannot be split any further because the data is too large. It seems wise
+ to set this to a fraction of the maximum region size. Setting it to zero
+ or less disables the check.
+ </description>
+ </property>
+ <property>
+ <name>hbase.hstore.compactionThreshold</name>
+ <value></value>
+ <description>
+ If more than this number of HStoreFiles in any one HStore
+ (one HStoreFile is written per flush of memstore) then a compaction
+ is run to rewrite all HStoreFiles files as one. Larger numbers
+ put off compaction but when it runs, it takes longer to complete.
+ </description>
+ </property>
+ <property>
+ <name>hbase.hstore.flush.retries.number</name>
+ <value>120</value>
+ <description>
+ The number of times the region flush operation will be retried.
+ </description>
+ </property>
+
+ <property>
+ <name>hbase.hstore.blockingStoreFiles</name>
+ <value></value>
+ <description>
+ If more than this number of StoreFiles in any one Store
+ (one StoreFile is written per flush of MemStore) then updates are
+ blocked for this HRegion until a compaction is completed, or
+ until hbase.hstore.blockingWaitTime has been exceeded.
+ </description>
+ </property>
+ <property>
+ <name>hfile.block.cache.size</name>
+ <value></value>
+ <description>
+ Percentage of maximum heap (-Xmx setting) to allocate to block cache
+ used by HFile/StoreFile. Default of 0.25 means allocate 25%.
+ Set to 0 to disable but it's not recommended.
+ </description>
+ </property>
+
+ <!-- The following properties configure authentication information for
+ HBase processes when using Kerberos security. There are no default
+ values, included here for documentation purposes -->
+ <property>
+ <name>hbase.master.keytab.file</name>
+ <value></value>
+ <description>Full path to the kerberos keytab file to use for logging in
+ the configured HMaster server principal.
+ </description>
+ </property>
+ <property>
+ <name>hbase.master.kerberos.principal</name>
+ <value></value>
+ <description>Ex. "hbase/_HOST@EXAMPLE.COM". The kerberos principal name
+ that should be used to run the HMaster process. The principal name should
+ be in the form: user/hostname@DOMAIN. If "_HOST" is used as the hostname
+ portion, it will be replaced with the actual hostname of the running
+ instance.
+ </description>
+ </property>
+ <property>
+ <name>hbase.regionserver.keytab.file</name>
+ <value></value>
+ <description>Full path to the kerberos keytab file to use for logging in
+ the configured HRegionServer server principal.
+ </description>
+ </property>
+ <property>
+ <name>hbase.regionserver.kerberos.principal</name>
+ <value></value>
+ <description>Ex. "hbase/_HOST@EXAMPLE.COM". The kerberos principal name
+ that should be used to run the HRegionServer process. The principal name
+ should be in the form: user/hostname@DOMAIN. If "_HOST" is used as the
+ hostname portion, it will be replaced with the actual hostname of the
+ running instance. An entry for this principal must exist in the file
+ specified in hbase.regionserver.keytab.file
+ </description>
+ </property>
+
+ <!-- Additional configuration specific to HBase security -->
+ <property>
+ <name>hbase.superuser</name>
+ <value>hbase</value>
+ <description>List of users or groups (comma-separated), who are allowed
+ full privileges, regardless of stored ACLs, across the cluster.
+ Only used when HBase security is enabled.
+ </description>
+ </property>
+
+ <property>
+ <name>hbase.security.authentication</name>
+ <value>simple</value>
+ </property>
+
+ <property>
+ <name>hbase.security.authorization</name>
+ <value>false</value>
+ <description>Enables HBase authorization. Set the value of this property to false to disable HBase authorization.
+ </description>
+ </property>
+
+ <property>
+ <name>hbase.coprocessor.region.classes</name>
+ <value></value>
+ <description>A comma-separated list of Coprocessors that are loaded by
+ default on all tables. For any override coprocessor method, these classes
+ will be called in order. After implementing your own Coprocessor, just put
+ it in HBase's classpath and add the fully qualified class name here.
+ A coprocessor can also be loaded on demand by setting HTableDescriptor.
+ </description>
+ </property>
+
+ <property>
+ <name>hbase.coprocessor.master.classes</name>
+ <value></value>
+ <description>A comma-separated list of
+ org.apache.hadoop.hbase.coprocessor.MasterObserver coprocessors that are
+ loaded by default on the active HMaster process. For any implemented
+ coprocessor methods, the listed classes will be called in order. After
+ implementing your own MasterObserver, just put it in HBase's classpath
+ and add the fully qualified class name here.
+ </description>
+ </property>
+
+ <property>
+ <name>hbase.zookeeper.property.clientPort</name>
+ <value>2181</value>
+ <description>Property from ZooKeeper's config zoo.cfg.
+ The port at which the clients will connect.
+ </description>
+ </property>
+
+ <!--
+ The following three properties are used together to create the list of
+ host:peer_port:leader_port quorum servers for ZooKeeper.
+ -->
+ <property>
+ <name>hbase.zookeeper.quorum</name>
+ <value></value>
+ <description>Comma separated list of servers in the ZooKeeper Quorum.
+ For example, "host1.mydomain.com,host2.mydomain.com,host3.mydomain.com".
+ By default this is set to localhost for local and pseudo-distributed modes
+ of operation. For a fully-distributed setup, this should be set to a full
+ list of ZooKeeper quorum servers. If HBASE_MANAGES_ZK is set in hbase-env.sh
+ this is the list of servers which we will start/stop ZooKeeper on.
+ </description>
+ </property>
+ <!-- End of properties used to generate ZooKeeper host:port quorum list. -->
+
+ <property>
+ <name>hbase.zookeeper.useMulti</name>
+ <value>true</value>
+ <description>Instructs HBase to make use of ZooKeeper's multi-update functionality.
+ This allows certain ZooKeeper operations to complete more quickly and prevents some issues
+ with rare Replication failure scenarios (see the release note of HBASE-2611 for an example).·
+ IMPORTANT: only set this to true if all ZooKeeper servers in the cluster are on version 3.4+
+ and will not be downgraded. ZooKeeper versions before 3.4 do not support multi-update and will
+ not fail gracefully if multi-update is invoked (see ZOOKEEPER-1495).
+ </description>
+ </property>
+ <property>
+ <name>zookeeper.znode.parent</name>
+ <value>/hbase-unsecure</value>
+ <description>Root ZNode for HBase in ZooKeeper. All of HBase's ZooKeeper
+ files that are configured with a relative path will go under this node.
+ By default, all of HBase's ZooKeeper file path are configured with a
+ relative path, so they will all go under this directory unless changed.
+ </description>
+ </property>
+
+ <property>
+ <name>hbase.defaults.for.version.skip</name>
+ <value>true</value>
+ <description>Disables version verification.</description>
+ </property>
+
+ <property>
+ <name>dfs.domain.socket.path</name>
+ <value>/var/lib/hadoop-hdfs/dn_socket</value>
+ <description>Path to domain socket.</description>
+ </property>
+
+</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/metainfo.xml
index 336701a..0864de8 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.8/services/HBASE/metainfo.xml
@@ -74,6 +74,9 @@
</commandScript>
</customCommand>
</customCommands>
+ <excluded-config-types>
+ <config-type>global</config-type>
+ </excluded-config-types>
<configuration-dependencies>
</configuration-dependencies>
</service>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/PIG/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/PIG/metainfo.xml
index 50d3146..f310b70 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/PIG/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.1.1/services/PIG/metainfo.xml
@@ -20,8 +20,14 @@
<services>
<service>
<name>PIG</name>
- <comment>Scripting platform for analyzing large datasets</comment>
+ <comment>Scripting platform for analyzing large datasets (Extended)</comment>
<version>0.12.1.2.1.1</version>
+ <components>
+ <component>
+ <name>NewComponent</name>
+ <category>SLAVE</category>
+ </component>
+ </components>
</service>
</services>
</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/metainfo.xml b/ambari-server/src/test/resources/stacks/OTHER/1.0/metainfo.xml
new file mode 100644
index 0000000..31716d2
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <versions>
+ <active>true</active>
+ </versions>
+</metainfo>
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/hdp.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/hdp.json b/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/hdp.json
new file mode 100644
index 0000000..fc51627
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/hdp.json
@@ -0,0 +1,10 @@
+{
+ "HDP-2.1.1": {
+ "latest": {
+ "centos6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "redhat6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "oraclelinux6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "suse11": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118/hdp.repo"
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/repoinfo.xml b/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/repoinfo.xml
new file mode 100644
index 0000000..9d8a232
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/repos/repoinfo.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<reposinfo>
+ <latest>./hdp.json</latest>
+ <os family="centos6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="centos5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="suse11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="sles11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/role_command_order.json b/ambari-server/src/test/resources/stacks/OTHER/1.0/role_command_order.json
new file mode 100644
index 0000000..c45ba07
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/role_command_order.json
@@ -0,0 +1,104 @@
+{
+ "_comment" : "Record format:",
+ "_comment" : "blockedRole-blockedCommand: [blockerRole1-blockerCommand1, blockerRole2-blockerCommand2, ...]",
+ "general_deps" : {
+ "_comment" : "dependencies for all cases",
+ "NAGIOS_SERVER-INSTALL" : ["HIVE_CLIENT-INSTALL", "HCAT-INSTALL",
+ "MAPREDUCE_CLIENT-INSTALL", "OOZIE_CLIENT-INSTALL"],
+ "HBASE_MASTER-START": ["ZOOKEEPER_SERVER-START"],
+ "HBASE_REGIONSERVER-START": ["HBASE_MASTER-START"],
+ "OOZIE_SERVER-START": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "WEBHCAT_SERVER-START": ["TASKTRACKER-START", "HIVE_SERVER-START"],
+ "HIVE_METASTORE-START": ["MYSQL_SERVER-START"],
+ "HIVE_SERVER-START": ["TASKTRACKER-START", "MYSQL_SERVER-START"],
+ "HUE_SERVER-START": ["HIVE_SERVER-START", "HCAT-START", "OOZIE_SERVER-START"],
+ "FLUME_HANDLER-START": ["OOZIE_SERVER-START"],
+ "NAGIOS_SERVER-START": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START",
+ "GANGLIA_SERVER-START", "GANGLIA_MONITOR-START", "HCAT-START",
+ "HIVE_SERVER-START", "HIVE_METASTORE-START", "HUE_SERVER-START",
+ "JOBTRACKER-START", "TASKTRACKER-START", "ZOOKEEPER_SERVER-START",
+ "MYSQL_SERVER-START", "OOZIE_SERVER-START", "PIG-START", "SQOOP-START",
+ "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
+ "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
+ "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
+ "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
+ "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
+ "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+ "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
+ "ZOOKEEPER_QUORUM_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
+ "ZOOKEEPER_SERVER-STOP" : ["HBASE_MASTER-STOP", "HBASE_REGIONSERVER-STOP"],
+ "HBASE_MASTER-STOP": ["HBASE_REGIONSERVER-STOP"],
+ "TASKTRACKER-UPGRADE": ["JOBTRACKER-UPGRADE"],
+ "MAPREDUCE_CLIENT-UPGRADE": ["TASKTRACKER-UPGRADE", "JOBTRACKER-UPGRADE"],
+ "ZOOKEEPER_SERVER-UPGRADE": ["MAPREDUCE_CLIENT-UPGRADE"],
+ "ZOOKEEPER_CLIENT-UPGRADE": ["ZOOKEEPER_SERVER-UPGRADE"],
+ "HBASE_MASTER-UPGRADE": ["ZOOKEEPER_CLIENT-UPGRADE"],
+ "HBASE_REGIONSERVER-UPGRADE": ["HBASE_MASTER-UPGRADE"],
+ "HBASE_CLIENT-UPGRADE": ["HBASE_REGIONSERVER-UPGRADE"],
+ "HIVE_SERVER-UPGRADE" : ["HBASE_CLIENT-UPGRADE"],
+ "HIVE_METASTORE-UPGRADE" : ["HIVE_SERVER-UPGRADE"],
+ "MYSQL_SERVER-UPGRADE": ["HIVE_METASTORE-UPGRADE"],
+ "HIVE_CLIENT-UPGRADE": ["MYSQL_SERVER-UPGRADE"],
+ "HCAT-UPGRADE": ["HIVE_CLIENT-UPGRADE"],
+ "OOZIE_SERVER-UPGRADE" : ["HCAT-UPGRADE"],
+ "OOZIE_CLIENT-UPGRADE" : ["OOZIE_SERVER-UPGRADE"],
+ "WEBHCAT_SERVER-UPGRADE" : ["OOZIE_CLIENT-UPGRADE"],
+ "PIG-UPGRADE" : ["WEBHCAT_SERVER-UPGRADE"],
+ "SQOOP-UPGRADE" : ["PIG-UPGRADE"],
+ "NAGIOS_SERVER-UPGRADE" : ["SQOOP-UPGRADE"],
+ "GANGLIA_SERVER-UPGRADE" : ["NAGIOS_SERVER-UPGRADE"],
+ "GANGLIA_MONITOR-UPGRADE" : ["GANGLIA_SERVER-UPGRADE"]
+ },
+ "_comment" : "GLUSTERFS-specific dependencies",
+ "optional_glusterfs": {
+ "HBASE_MASTER-START": ["PEERSTATUS-START"],
+ "JOBTRACKER-START": ["PEERSTATUS-START"],
+ "TASKTRACKER-START": ["PEERSTATUS-START"],
+ "GLUSTERFS_SERVICE_CHECK-SERVICE_CHECK": ["PEERSTATUS-START"],
+ "JOBTRACKER-UPGRADE": ["GLUSTERFS_CLIENT-UPGRADE"]
+ },
+ "_comment" : "Dependencies that are used when GLUSTERFS is not present in cluster",
+ "optional_no_glusterfs": {
+ "SECONDARY_NAMENODE-START": ["NAMENODE-START"],
+ "RESOURCEMANAGER-START": ["NAMENODE-START", "DATANODE-START"],
+ "NODEMANAGER-START": ["NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START"],
+ "HISTORYSERVER-START": ["NAMENODE-START", "DATANODE-START"],
+ "HBASE_MASTER-START": ["NAMENODE-START", "DATANODE-START"],
+ "JOBTRACKER-START": ["NAMENODE-START", "DATANODE-START"],
+ "TASKTRACKER-START": ["NAMENODE-START", "DATANODE-START"],
+ "HIVE_SERVER-START": ["DATANODE-START"],
+ "WEBHCAT_SERVER-START": ["DATANODE-START"],
+ "NAGIOS_SERVER-START": ["NAMENODE-START", "SECONDARY_NAMENODE-START",
+ "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START", "HISTORYSERVER-START"],
+ "HDFS_SERVICE_CHECK-SERVICE_CHECK": ["NAMENODE-START", "DATANODE-START",
+ "SECONDARY_NAMENODE-START"],
+ "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START",
+ "RESOURCEMANAGER-START", "HISTORYSERVER-START", "YARN_SERVICE_CHECK-SERVICE_CHECK"],
+ "YARN_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
+ "RESOURCEMANAGER_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START"],
+ "PIG_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
+ "NAMENODE-STOP": ["JOBTRACKER-STOP", "TASKTRACKER-STOP", "RESOURCEMANAGER-STOP",
+ "NODEMANAGER-STOP", "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
+ "DATANODE-STOP": ["JOBTRACKER-STOP", "TASKTRACKER-STOP", "RESOURCEMANAGER-STOP",
+ "NODEMANAGER-STOP", "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
+ "SECONDARY_NAMENODE-UPGRADE": ["NAMENODE-UPGRADE"],
+ "DATANODE-UPGRADE": ["SECONDARY_NAMENODE-UPGRADE"],
+ "HDFS_CLIENT-UPGRADE": ["DATANODE-UPGRADE"],
+ "JOBTRACKER-UPGRADE": ["HDFS_CLIENT-UPGRADE"]
+ },
+ "_comment" : "Dependencies that are used in HA NameNode cluster",
+ "namenode_optional_ha": {
+ "NAMENODE-START": ["JOURNALNODE-START", "ZOOKEEPER_SERVER-START"],
+ "ZKFC-START": ["NAMENODE-START"],
+ "NAGIOS_SERVER-START": ["ZKFC-START", "JOURNALNODE-START"],
+ "HDFS_SERVICE_CHECK-SERVICE_CHECK": ["ZKFC-START"]
+ },
+ "_comment" : "Dependencies that are used in ResourceManager HA cluster",
+ "resourcemanager_optional_ha" : {
+ "RESOURCEMANAGER-START": ["ZOOKEEPER_SERVER-START"]
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml
new file mode 100644
index 0000000..c40fbd0
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<configuration>
+
+ <property>
+ <name>dfs.name.dir</name>
+ <value></value>
+ <description>Determines where on the local filesystem the DFS name node
+ should store the name table.</description>
+ <final>true</final>
+ </property>
+
+ <property>
+ <name>dfs.support.append</name>
+ <value>true</value>
+ <description>to enable dfs append</description>
+ <final>true</final>
+ <deletable>false</deletable>
+ </property>
+
+ <property>
+ <name>dfs.webhdfs.enabled</name>
+ <value>true</value>
+ <description>to enable webhdfs</description>
+ </property>
+
+</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metainfo.xml b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..d3bef74
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metainfo.xml
@@ -0,0 +1,146 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>HDFS</name>
+ <comment>Apache Hadoop Distributed File System</comment>
+ <version>2.1.0.2.0.6.0</version>
+
+ <components>
+ <component>
+ <name>NAMENODE</name>
+ <category>MASTER</category>
+ <commandScript>
+ <script>scripts/namenode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ <customCommands>
+ <customCommand>
+ <name>DECOMMISSION</name>
+ <commandScript>
+ <script>scripts/namenode_dec_overr.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ <customCommand>
+ <name>YET_ANOTHER_CHILD_COMMAND</name>
+ <commandScript>
+ <script>scripts/yet_another_child_command.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ </customCommands>
+ </component>
+
+ <component>
+ <name>DATANODE</name>
+ <category>SLAVE</category>
+ <commandScript>
+ <script>scripts/datanode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>SECONDARY_NAMENODE</name>
+ <category>MASTER</category>
+ <commandScript>
+ <script>scripts/snamenode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>HDFS_CLIENT</name>
+ <category>CLIENT</category>
+ <commandScript>
+ <script>scripts/hdfs_client_overridden.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>JOURNALNODE</name>
+ <category>MASTER</category>
+ <commandScript>
+ <script>scripts/journalnode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>ZKFC</name>
+ <category>SLAVE</category>
+ <commandScript>
+ <script>scripts/zkfc_slave.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+ </components>
+
+ <osSpecifics>
+ <osSpecific>
+ <osFamily>any</osFamily>
+ <packages>
+ <package>
+ <name>child-package-def</name>
+ </package>
+ </packages>
+ </osSpecific>
+ </osSpecifics>
+
+ <commandScript>
+ <script>scripts/service_check_2.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>300</timeout>
+ </commandScript>
+
+ <customCommands>
+ <customCommand>
+ <name>RESTART</name>
+ <commandScript>
+ <script>scripts/restart_child.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ <customCommand>
+ <name>YET_ANOTHER_CHILD_SRV_COMMAND</name>
+ <commandScript>
+ <script>scripts/yet_another_child_srv_command.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ </customCommands>
+
+ <configuration-dependencies>
+ </configuration-dependencies>
+ </service>
+ </services>
+</metainfo>
[08/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
index c9794d8..dcfd00f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
@@ -207,6 +207,10 @@ public class ComponentInfo {
this.dependencies = dependencies;
}
+ public void setAutoDeploy(AutoDeployInfo autoDeploy) {
+ this.autoDeploy = autoDeploy;
+ }
+
public AutoDeployInfo getAutoDeploy() {
return autoDeploy;
}
@@ -226,4 +230,48 @@ public class ComponentInfo {
public void setClientsToUpdateConfigs(List<String> clientsToUpdateConfigs) {
this.clientsToUpdateConfigs = clientsToUpdateConfigs;
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ ComponentInfo that = (ComponentInfo) o;
+
+ if (deleted != that.deleted) return false;
+ if (autoDeploy != null ? !autoDeploy.equals(that.autoDeploy) : that.autoDeploy != null) return false;
+ if (cardinality != null ? !cardinality.equals(that.cardinality) : that.cardinality != null) return false;
+ if (category != null ? !category.equals(that.category) : that.category != null) return false;
+ if (clientConfigFiles != null ? !clientConfigFiles.equals(that.clientConfigFiles) : that.clientConfigFiles != null)
+ return false;
+ if (commandScript != null ? !commandScript.equals(that.commandScript) : that.commandScript != null) return false;
+ if (configDependencies != null ? !configDependencies.equals(that.configDependencies) : that.configDependencies != null)
+ return false;
+ if (customCommands != null ? !customCommands.equals(that.customCommands) : that.customCommands != null)
+ return false;
+ if (dependencies != null ? !dependencies.equals(that.dependencies) : that.dependencies != null) return false;
+ if (displayName != null ? !displayName.equals(that.displayName) : that.displayName != null) return false;
+ if (name != null ? !name.equals(that.name) : that.name != null) return false;
+ if (clientConfigFiles != null ? !clientConfigFiles.equals(that.clientConfigFiles) :
+ that.clientConfigFiles != null) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = name != null ? name.hashCode() : 0;
+ result = 31 * result + (displayName != null ? displayName.hashCode() : 0);
+ result = 31 * result + (category != null ? category.hashCode() : 0);
+ result = 31 * result + (deleted ? 1 : 0);
+ result = 31 * result + (cardinality != null ? cardinality.hashCode() : 0);
+ result = 31 * result + (commandScript != null ? commandScript.hashCode() : 0);
+ result = 31 * result + (clientConfigFiles != null ? clientConfigFiles.hashCode() : 0);
+ result = 31 * result + (customCommands != null ? customCommands.hashCode() : 0);
+ result = 31 * result + (dependencies != null ? dependencies.hashCode() : 0);
+ result = 31 * result + (autoDeploy != null ? autoDeploy.hashCode() : 0);
+ result = 31 * result + (configDependencies != null ? configDependencies.hashCode() : 0);
+ result = 31 * result + (clientConfigFiles != null ? clientConfigFiles.hashCode() : 0);
+ return result;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index e15a62a..d4cbd4e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -417,13 +417,13 @@ public class ConfigHelper {
* @param propertyName
*/
public Set<String> findConfigTypesByPropertyName(StackId stackId, String propertyName, String clusterName) throws AmbariException {
- StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(),
stackId.getStackVersion());
Set<String> result = new HashSet<String>();
for(Service service : clusters.getCluster(clusterName).getServices().values()) {
- Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), service.getName());
+ Set<PropertyInfo> stackProperties = ambariMetaInfo.getServiceProperties(stack.getName(), stack.getVersion(), service.getName());
Set<PropertyInfo> stackLevelProperties = ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
stackProperties.addAll(stackLevelProperties);
@@ -440,18 +440,17 @@ public class ConfigHelper {
}
public Set<String> getPropertyValuesWithPropertyType(StackId stackId, PropertyType propertyType, Cluster cluster) throws AmbariException {
- StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(),
stackId.getStackVersion());
Set<String> result = new HashSet<String>();
for(Service service : cluster.getServices().values()) {
- Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), service.getName());
-
- for (PropertyInfo stackProperty : stackProperties) {
- if(stackProperty.getPropertyTypes().contains(propertyType)) {
- String stackPropertyConfigType = fileNameToConfigType(stackProperty.getFilename());
- result.add(cluster.getDesiredConfigByType(stackPropertyConfigType).getProperties().get(stackProperty.getName()));
+ Set<PropertyInfo> serviceProperties = ambariMetaInfo.getServiceProperties(stack.getName(), stack.getVersion(), service.getName());
+ for (PropertyInfo serviceProperty : serviceProperties) {
+ if(serviceProperty.getPropertyTypes().contains(propertyType)) {
+ String stackPropertyConfigType = fileNameToConfigType(serviceProperty.getFilename());
+ result.add(cluster.getDesiredConfigByType(stackPropertyConfigType).getProperties().get(serviceProperty.getName()));
}
}
}
@@ -470,15 +469,15 @@ public class ConfigHelper {
public String getPropertyValueFromStackDefenitions(Cluster cluster, String configType, String propertyName) throws AmbariException {
StackId stackId = cluster.getCurrentStackVersion();
- StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(),
stackId.getStackVersion());
- for(ServiceInfo serviceInfo:stack.getServices()) {
- Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
- Set<PropertyInfo> stackLevelProperties = ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
- stackProperties.addAll(stackLevelProperties);
+ for(ServiceInfo serviceInfo:stack.getServices()) {
+ Set<PropertyInfo> serviceProperties = ambariMetaInfo.getServiceProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
+ Set<PropertyInfo> stackProperties = ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
+ serviceProperties.addAll(stackProperties);
- for (PropertyInfo stackProperty : stackProperties) {
+ for (PropertyInfo stackProperty : serviceProperties) {
String stackPropertyConfigType = fileNameToConfigType(stackProperty.getFilename());
if(stackProperty.getName().equals(propertyName) && stackPropertyConfigType.equals(configType)) {
@@ -493,13 +492,12 @@ public class ConfigHelper {
public ServiceInfo getPropertyOwnerService(Cluster cluster, String configType, String propertyName) throws AmbariException {
StackId stackId = cluster.getCurrentStackVersion();
- StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
- stackId.getStackVersion());
+ StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion());
for(ServiceInfo serviceInfo:stack.getServices()) {
- Set<PropertyInfo> stackProperties = ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
+ Set<PropertyInfo> serviceProperties = ambariMetaInfo.getServiceProperties(stack.getName(), stack.getVersion(), serviceInfo.getName());
- for (PropertyInfo stackProperty : stackProperties) {
+ for (PropertyInfo stackProperty : serviceProperties) {
String stackPropertyConfigType = fileNameToConfigType(stackProperty.getFilename());
if(stackProperty.getName().equals(propertyName) && stackPropertyConfigType.equals(configType)) {
@@ -514,16 +512,14 @@ public class ConfigHelper {
public Set<PropertyInfo> getServiceProperties(Cluster cluster, String serviceName) throws AmbariException {
StackId stackId = cluster.getCurrentStackVersion();
- StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
- stackId.getStackVersion());
+ StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion());
- return ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), serviceName);
+ return ambariMetaInfo.getServiceProperties(stack.getName(), stack.getVersion(), serviceName);
}
public Set<PropertyInfo> getStackProperties(Cluster cluster) throws AmbariException {
StackId stackId = cluster.getCurrentStackVersion();
- StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
- stackId.getStackVersion());
+ StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion());
return ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
}
@@ -654,14 +650,14 @@ public class ConfigHelper {
if (!actual.containsKey(type)) {
// desired is set, but actual is not
- if (!serviceInfo.hasConfigType(type)) {
+ if (!serviceInfo.hasConfigDependency(type)) {
stale = componentInfo != null && componentInfo.hasConfigType(type);
} else if (type.equals(Configuration.GLOBAL_CONFIG_TAG)) {
// find out if the keys are stale by first checking the target service,
// then all services
Collection<String> keys = mergeKeyNames(cluster, type, tags.values());
- if (serviceInfo.hasPropertyFor(type, keys) || !hasPropertyFor(stackId, type, keys)) {
+ if (serviceInfo.hasDependencyAndPropertyFor(type, keys) || !hasPropertyFor(stackId, type, keys)) {
stale = true;
}
} else {
@@ -680,11 +676,11 @@ public class ConfigHelper {
// to the service
Collection<String> changed = findChangedKeys(cluster, type,
tags.values(), actualTags.values());
- if (serviceInfo.hasPropertyFor(type, changed)) {
+ if (serviceInfo.hasDependencyAndPropertyFor(type, changed)) {
stale = true;
}
} else {
- stale = serviceInfo.hasConfigType(type) || componentInfo.hasConfigType(type);
+ stale = serviceInfo.hasConfigDependency(type) || componentInfo.hasConfigType(type);
}
}
}
@@ -726,7 +722,7 @@ public class ConfigHelper {
for (ServiceInfo svc : ambariMetaInfo.getServices(stack.getStackName(),
stack.getStackVersion()).values()) {
- if (svc.hasPropertyFor(type, keys))
+ if (svc.hasDependencyAndPropertyFor(type, keys))
return true;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/DependencyInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/DependencyInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/DependencyInfo.java
index 58e6e4c..e3db662 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/DependencyInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/DependencyInfo.java
@@ -143,4 +143,30 @@ public class DependencyInfo {
", auto-deploy=" + m_autoDeploy.isEnabled() +
"]";
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ DependencyInfo that = (DependencyInfo) o;
+
+ if (componentName != null ? !componentName.equals(that.componentName) : that.componentName != null) return false;
+ if (m_autoDeploy != null ? !m_autoDeploy.equals(that.m_autoDeploy) : that.m_autoDeploy != null) return false;
+ if (name != null ? !name.equals(that.name) : that.name != null) return false;
+ if (scope != null ? !scope.equals(that.scope) : that.scope != null) return false;
+ if (serviceName != null ? !serviceName.equals(that.serviceName) : that.serviceName != null) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = name != null ? name.hashCode() : 0;
+ result = 31 * result + (scope != null ? scope.hashCode() : 0);
+ result = 31 * result + (serviceName != null ? serviceName.hashCode() : 0);
+ result = 31 * result + (componentName != null ? componentName.hashCode() : 0);
+ result = 31 * result + (m_autoDeploy != null ? m_autoDeploy.hashCode() : 0);
+ return result;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
index 45ea1f9..a31e42c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceComponentImpl.java
@@ -25,6 +25,7 @@ import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import com.google.inject.persist.Transactional;
import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ObjectNotFoundException;
import org.apache.ambari.server.ServiceComponentHostNotFoundException;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.controller.ServiceComponentResponse;
@@ -88,10 +89,12 @@ public class ServiceComponentImpl implements ServiceComponent {
this.hostComponents = new HashMap<String, ServiceComponentHost>();
StackId stackId = service.getDesiredStackVersion();
- ComponentInfo compInfo = ambariMetaInfo.getComponentCategory(
- stackId.getStackName(), stackId.getStackVersion(), service.getName(),
- componentName);
- if (compInfo == null) {
+ try {
+ ComponentInfo compInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
+ stackId.getStackVersion(), service.getName(), componentName);
+ this.isClientComponent = compInfo.isClient();
+ this.isMasterComponent = compInfo.isMaster();
+ } catch (ObjectNotFoundException e) {
throw new RuntimeException("Trying to create a ServiceComponent"
+ " not recognized in stack info"
+ ", clusterName=" + service.getCluster().getClusterName()
@@ -99,9 +102,6 @@ public class ServiceComponentImpl implements ServiceComponent {
+ ", componentName=" + componentName
+ ", stackInfo=" + stackId.getStackId());
}
- this.isClientComponent = compInfo.isClient();
- this.isMasterComponent = compInfo.isMaster();
-
init();
}
@@ -130,10 +130,13 @@ public class ServiceComponentImpl implements ServiceComponent {
}
StackId stackId = service.getDesiredStackVersion();
- ComponentInfo compInfo = ambariMetaInfo.getComponentCategory(
- stackId.getStackName(), stackId.getStackVersion(), service.getName(),
- getName());
- if (compInfo == null) {
+ try {
+ ComponentInfo compInfo = ambariMetaInfo.getComponent(
+ stackId.getStackName(), stackId.getStackVersion(), service.getName(),
+ getName());
+ this.isClientComponent = compInfo.isClient();
+ this.isMasterComponent = compInfo.isMaster();
+ } catch (ObjectNotFoundException e) {
throw new AmbariException("Trying to create a ServiceComponent"
+ " not recognized in stack info"
+ ", clusterName=" + service.getCluster().getClusterName()
@@ -141,8 +144,6 @@ public class ServiceComponentImpl implements ServiceComponent {
+ ", componentName=" + getName()
+ ", stackInfo=" + stackId.getStackId());
}
- this.isClientComponent = compInfo.isClient();
- this.isMasterComponent = compInfo.isMaster();
persisted = true;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
index bb5057f..4b4a305 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceImpl.java
@@ -107,7 +107,7 @@ public class ServiceImpl implements Service {
StackId stackId = cluster.getDesiredStackVersion();
setDesiredStackVersion(stackId);
- ServiceInfo sInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
+ ServiceInfo sInfo = ambariMetaInfo.getService(stackId.getStackName(),
stackId.getStackVersion(), serviceName);
isClientOnlyService = sInfo.isClientOnlyService();
@@ -145,7 +145,7 @@ public class ServiceImpl implements Service {
}
StackId stackId = getDesiredStackVersion();
- ServiceInfo sInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
+ ServiceInfo sInfo = ambariMetaInfo.getService(stackId.getStackName(),
stackId.getStackVersion(), getName());
isClientOnlyService = sInfo.isClientOnlyService();
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
index ae746d6..9277ec6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
@@ -74,7 +74,7 @@ public class ServiceInfo {
@XmlElementWrapper(name="excluded-config-types")
@XmlElement(name="config-type")
- private Set<String> excludedConfigTypes;
+ private Set<String> excludedConfigTypes = new HashSet<String>();
@XmlTransient
private Map<String, Map<String, Map<String, String>>> configTypes;
@@ -84,7 +84,13 @@ public class ServiceInfo {
@JsonIgnore
@XmlElement(name = "restartRequiredAfterChange")
- private Boolean restartRequiredAfterChange;
+ private Boolean restartRequiredAfterChange;
+
+ @XmlElement(name = "extends")
+ private String parent;
+
+ @XmlTransient
+ private volatile Map<String, PropertyInfo> requiredProperties;
public Boolean isRestartRequiredAfterChange() {
return restartRequiredAfterChange;
@@ -137,7 +143,7 @@ public class ServiceInfo {
@XmlElementWrapper(name="requiredServices")
@XmlElement(name="service")
- private List<String> requiredServices;
+ private List<String> requiredServices = new ArrayList<String>();
/**
* Meaning: stores subpath from stack root to exact directory, that contains
@@ -164,6 +170,14 @@ public class ServiceInfo {
this.name = name;
}
+ public String getParent() {
+ return parent;
+ }
+
+ public void setParent(String parent) {
+ this.parent = parent;
+ }
+
public String getDisplayName() {
return displayName;
}
@@ -205,7 +219,7 @@ public class ServiceInfo {
}
/**
* Finds ComponentInfo by component name
- * @param componentName
+ * @param componentName name of the component
* @return ComponentInfo componentName or null
*/
public ComponentInfo getComponentByName(String componentName){
@@ -229,65 +243,112 @@ public class ServiceInfo {
}
public ComponentInfo getClientComponent() {
- if (components == null || components.isEmpty()) {
- return null;
- }
- for (ComponentInfo compInfo : components) {
- if (compInfo.isClient()) {
- return compInfo;
+ ComponentInfo client = null;
+
+ if (components != null) {
+ for (ComponentInfo compInfo : components) {
+ if (compInfo.isClient()) {
+ client = compInfo;
+ break;
+ }
}
}
- return components.get(0);
+ return client;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("Service name:" + name + "\nversion:" + version +
- "\ncomment:" + comment);
+ sb.append("Service name:");
+ sb.append(name);
+ sb.append("\nversion:");
+ sb.append(version);
+ sb.append("\ncomment:");
+ sb.append(comment);
//for (PropertyInfo property : getProperties()) {
// sb.append("\tProperty name=" + property.getName() +
//"\nproperty value=" + property.getValue() + "\ndescription=" + property.getDescription());
//}
for (ComponentInfo component : getComponents()) {
sb.append("\n\n\nComponent:\n");
- sb.append("name=" + component.getName());
- sb.append("\tcategory=" + component.getCategory());
+ sb.append("name=");
+ sb.append(component.getName());
+ sb.append("\tcategory=");
+ sb.append(component.getCategory());
}
return sb.toString();
}
-
- public Map<String, Map<String, Map<String, String>>> getConfigTypes() {
- if (configTypes == null) configTypes = new HashMap<String, Map<String, Map<String, String>>>();
- return configTypes;
+
+ /**
+ * Obtain the config types associated with this service.
+ * The returned map is an unmodifiable view.
+ * @return unmodifiable map of config types associated with this service
+ */
+ public synchronized Map<String, Map<String, Map<String, String>>> getConfigTypeAttributes() {
+ return configTypes == null ?
+ Collections.<String, Map<String, Map<String, String>>>emptyMap() :
+ Collections.unmodifiableMap(configTypes);
}
- public void setConfigTypes(Map<String, Map<String, Map<String, String>>> configTypes) {
- this.configTypes = configTypes;
+ /**
+ * Add the given type and set it's attributes.
+ * If the type is marked for exclusion, it will not be added.
+ *
+ * @param type configuration type
+ * @param typeAttributes attributes associated with the type
+ */
+ public synchronized void setTypeAttributes(String type, Map<String, Map<String, String>> typeAttributes) {
+ if (this.configTypes == null) {
+ configTypes = new HashMap<String, Map<String, Map<String, String>>>();
+ }
+
+ if (! excludedConfigTypes.contains(type)) {
+ configTypes.put(type, typeAttributes);
+ }
}
/**
+ * Set all types and associated attributes. Any previously existing types and
+ * attributes are removed prior to setting the new values.
+ *
+ * @param types map of type attributes
+ */
+ public synchronized void setAllConfigAttributes(Map<String, Map<String, Map<String, String>>> types) {
+ configTypes = new HashMap<String, Map<String, Map<String, String>>>();
+ for (Map.Entry<String, Map<String, Map<String, String>>> entry : types.entrySet()) {
+ setTypeAttributes(entry.getKey(), entry.getValue());
+ }
+ }
+
+ /**
+ * Determine of the service has a dependency on the provided configuration type.
* @param type the config type
- * @return <code>true</code> if the service defines the supplied type
+ * @return <code>true</code> if the service defines a dependency on the provided type
*/
- public boolean hasConfigType(String type) {
+ public boolean hasConfigDependency(String type) {
return configDependencies != null && configDependencies.contains(type);
}
/**
- * The purpose of this method is to determine if a service has a property
- * defined in a supplied set:
- * <ul>
- * <li>If the type is not defined for the service, then no property can exist.</li>
- * <li>If the type is defined, then check each supplied property for existence.</li>
- * </ul>
+ * Determine if the service contains the specified config type
+ * @param type config type to check
+ * @return true if the service has the specified config type; false otherwise
+ */
+ public boolean hasConfigType(String type) {
+ return configTypes != null && configTypes.containsKey(type);
+ }
+
+ /**
+ * Determine if the service has a dependency on the provided type and contains any of the provided properties.
+ * This can be used in determining if a property is stale.
+
* @param type the config type
* @param keyNames the names of all the config keys for the given type
* @return <code>true</code> if the config is stale
*/
- public boolean hasPropertyFor(String type, Collection<String> keyNames) {
- if (!hasConfigType(type))
+ public boolean hasDependencyAndPropertyFor(String type, Collection<String> keyNames) {
+ if (!hasConfigDependency(type))
return false;
buildConfigLayout();
@@ -371,7 +432,7 @@ public class ServiceInfo {
/**
* Exposes (and initializes on first use) map of os-specific details.
- * @return
+ * @return map of OS specific details keyed by family
*/
public Map<String, ServiceOsSpecific> getOsSpecifics() {
if (serviceOsSpecificsMap == null) {
@@ -487,4 +548,24 @@ public class ServiceInfo {
public void setExcludedConfigTypes(Set<String> excludedConfigTypes) {
this.excludedConfigTypes = excludedConfigTypes;
}
+
+ //todo: ensure that required properties are never modified...
+ public Map<String, PropertyInfo> getRequiredProperties() {
+ Map<String, PropertyInfo> result = requiredProperties;
+ if (result == null) {
+ synchronized(this) {
+ result = requiredProperties;
+ if (result == null) {
+ requiredProperties = result = new HashMap<String, PropertyInfo>();
+ List<PropertyInfo> properties = getProperties();
+ for (PropertyInfo propertyInfo : properties) {
+ if (propertyInfo.isRequireInput()) {
+ result.put(propertyInfo.getName(), propertyInfo);
+ }
+ }
+ }
+ }
+ }
+ return result;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
index a143ba1..d81c182 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceOsSpecific.java
@@ -62,6 +62,28 @@ public class ServiceOsSpecific {
this.packages.addAll(packages);
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ ServiceOsSpecific that = (ServiceOsSpecific) o;
+
+ if (osFamily != null ? !osFamily.equals(that.osFamily) : that.osFamily != null) return false;
+ if (packages != null ? !packages.equals(that.packages) : that.packages != null) return false;
+ if (repo != null ? !repo.equals(that.repo) : that.repo != null) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = osFamily != null ? osFamily.hashCode() : 0;
+ result = 31 * result + (repo != null ? repo.hashCode() : 0);
+ result = 31 * result + (packages != null ? packages.hashCode() : 0);
+ return result;
+ }
+
/**
* The <code>repo</code> tag. It has different set of fields compared to
* <link>org.apache.ambari.server.state.RepositoryInfo</link>,
@@ -110,6 +132,29 @@ public class ServiceOsSpecific {
return reponame;
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Repo repo = (Repo) o;
+
+ if (baseurl != null ? !baseurl.equals(repo.baseurl) : repo.baseurl != null) return false;
+ if (mirrorslist != null ? !mirrorslist.equals(repo.mirrorslist) : repo.mirrorslist != null) return false;
+ if (repoid != null ? !repoid.equals(repo.repoid) : repo.repoid != null) return false;
+ if (reponame != null ? !reponame.equals(repo.reponame) : repo.reponame != null) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = baseurl != null ? baseurl.hashCode() : 0;
+ result = 31 * result + (mirrorslist != null ? mirrorslist.hashCode() : 0);
+ result = 31 * result + (repoid != null ? repoid.hashCode() : 0);
+ result = 31 * result + (reponame != null ? reponame.hashCode() : 0);
+ return result;
+ }
}
@@ -130,6 +175,21 @@ public class ServiceOsSpecific {
}
public Package() { }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Package that = (Package) o;
+
+ return name.equals(that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return name.hashCode();
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/Stack.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Stack.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Stack.java
deleted file mode 100644
index 92b799d..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Stack.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.server.state;
-
-import org.apache.ambari.server.controller.StackResponse;
-
-public class Stack {
-
- private String stackName;
-
- public Stack(String stackName) {
- setStackName(stackName);
- }
-
- public String getStackName() {
- return stackName;
- }
-
- public void setStackName(String stackName) {
- this.stackName = stackName;
- }
-
- @Override
- public int hashCode() {
- return stackName.hashCode();
- }
-
- @Override
- public boolean equals(Object obj) {
-
- if (obj == null)
- return false;
-
- if (!(obj instanceof Stack)) {
- return false;
- }
- if (this == obj) {
- return true;
- }
- Stack stack = (Stack) obj;
- return getStackName().equals(stack.getStackName());
- }
-
-
- public StackResponse convertToResponse()
- {
- return new StackResponse(getStackName());
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
index 64782cc..f19cf81 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
@@ -19,11 +19,14 @@
package org.apache.ambari.server.state;
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.ambari.server.controller.StackVersionResponse;
+import org.apache.ambari.server.state.stack.UpgradePack;
public class StackInfo implements Comparable<StackInfo>{
private String name;
@@ -32,11 +35,12 @@ public class StackInfo implements Comparable<StackInfo>{
private boolean active;
private String rcoFileLocation;
private List<RepositoryInfo> repositories;
- private List<ServiceInfo> services;
+ private Collection<ServiceInfo> services;
private String parentStackVersion;
// stack-level properties
private List<PropertyInfo> properties;
private Map<String, Map<String, Map<String, String>>> configTypes;
+ private Map<String, UpgradePack> upgradePacks;
/**
* Meaning: stores subpath from stack root to exact hooks folder for stack. These hooks are
@@ -71,12 +75,23 @@ public class StackInfo implements Comparable<StackInfo>{
this.repositories = repositories;
}
- public synchronized List<ServiceInfo> getServices() {
+ public synchronized Collection<ServiceInfo> getServices() {
if (services == null) services = new ArrayList<ServiceInfo>();
return services;
}
- public synchronized void setServices(List<ServiceInfo> services) {
+ public ServiceInfo getService(String name) {
+ Collection<ServiceInfo> services = getServices();
+ for (ServiceInfo service : services) {
+ if (service.getName().equals(name)) {
+ return service;
+ }
+ }
+ //todo: exception?
+ return null;
+ }
+
+ public synchronized void setServices(Collection<ServiceInfo> services) {
this.services = services;
}
@@ -89,14 +104,43 @@ public class StackInfo implements Comparable<StackInfo>{
this.properties = properties;
}
- public Map<String, Map<String, Map<String, String>>> getConfigTypes() {
- if (configTypes == null) configTypes = new HashMap<String, Map<String, Map<String, String>>>();
- return configTypes;
+ /**
+ * Obtain the config types associated with this stack.
+ * The returned map is an unmodifiable view.
+ * @return copy of the map of config types associated with this stack
+ */
+ public synchronized Map<String, Map<String, Map<String, String>>> getConfigTypeAttributes() {
+ return configTypes == null ?
+ Collections.<String, Map<String, Map<String, String>>>emptyMap() :
+ Collections.unmodifiableMap(configTypes);
}
- public void setConfigTypes(
- Map<String, Map<String, Map<String, String>>> configTypes) {
- this.configTypes = configTypes;
+
+ /**
+ * Add the given type and set it's attributes.
+ *
+ * @param type configuration type
+ * @param typeAttributes attributes associated with the type
+ */
+ public synchronized void setConfigTypeAttributes(String type, Map<String, Map<String, String>> typeAttributes) {
+ if (this.configTypes == null) {
+ configTypes = new HashMap<String, Map<String, Map<String, String>>>();
+ }
+ // todo: no exclusion mechanism for stack config types
+ configTypes.put(type, typeAttributes);
+ }
+
+ /**
+ * Set all types and associated attributes. Any previously existing types and
+ * attributes are removed prior to setting the new values.
+ *
+ * @param types map of type attributes
+ */
+ public synchronized void setAllConfigAttributes(Map<String, Map<String, Map<String, String>>> types) {
+ configTypes = new HashMap<String, Map<String, Map<String, String>>>();
+ for (Map.Entry<String, Map<String, Map<String, String>>> entry : types.entrySet()) {
+ setConfigTypeAttributes(entry.getKey(), entry.getValue());
+ }
}
@Override
@@ -106,14 +150,16 @@ public class StackInfo implements Comparable<StackInfo>{
if (services != null) {
sb.append("\n\t\tService:");
for (ServiceInfo service : services) {
- sb.append("\t\t" + service.toString());
+ sb.append("\t\t");
+ sb.append(service);
}
}
if (repositories != null) {
sb.append("\n\t\tRepositories:");
for (RepositoryInfo repository : repositories) {
- sb.append("\t\t" + repository.toString());
+ sb.append("\t\t");
+ sb.append(repository.toString());
}
}
@@ -123,9 +169,7 @@ public class StackInfo implements Comparable<StackInfo>{
@Override
public int hashCode() {
- int result = 1;
- result = 31 + name.hashCode() + version.hashCode();
- return result;
+ return 31 + name.hashCode() + version.hashCode();
}
@Override
@@ -143,7 +187,7 @@ public class StackInfo implements Comparable<StackInfo>{
public StackVersionResponse convertToResponse() {
return new StackVersionResponse(getVersion(), getMinUpgradeVersion(),
- isActive(), getParentStackVersion(), getConfigTypes());
+ isActive(), getParentStackVersion(), getConfigTypeAttributes());
}
public String getMinUpgradeVersion() {
@@ -186,25 +230,47 @@ public class StackInfo implements Comparable<StackInfo>{
this.stackHooksFolder = stackHooksFolder;
}
- @Override
- public int compareTo(StackInfo o) {
- String myId = name + "-" + version;
- String oId = o.name + "-" + o.version;
- return myId.compareTo(oId);
- }
-
/**
- * @param path the path to the upgrades folder
+ * Set the path of the stack upgrade directory.
+ *
+ * @param path the path to the upgrades directory
*/
public void setUpgradesFolder(String path) {
upgradesFolder = path;
}
/**
+ * Obtain the path of the upgrades folder or null if directory doesn't exist.
+ *
* @return the upgrades folder, or {@code null} if not set
*/
public String getUpgradesFolder() {
return upgradesFolder;
}
+ /**
+ * Set upgrade packs.
+ *
+ * @param upgradePacks map of upgrade packs
+ */
+ public void setUpgradePacks(Map<String, UpgradePack> upgradePacks) {
+ this.upgradePacks = upgradePacks;
+ }
+
+ /**
+ * Obtain all stack upgrade packs.
+ *
+ * @return map of upgrade pack name to upgrade pack or {@code null} of no packs
+ */
+ public Map<String, UpgradePack> getUpgradePacks() {
+ return upgradePacks;
+ }
+
+
+ @Override
+ public int compareTo(StackInfo o) {
+ String myId = name + "-" + version;
+ String oId = o.name + "-" + o.version;
+ return myId.compareTo(oId);
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index d22e250..30dceb0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -268,11 +268,11 @@ public class ClusterImpl implements Cluster {
String serviceName = entry.getKey();
ServiceInfo serviceInfo = entry.getValue();
//collect config types for service
- Set<PropertyInfo> properties = ambariMetaInfo.getProperties(desiredStackVersion.getStackName(), desiredStackVersion.getStackVersion(), serviceName);
+ Set<PropertyInfo> properties = ambariMetaInfo.getServiceProperties(desiredStackVersion.getStackName(),
+ desiredStackVersion.getStackVersion(), serviceName);
for (PropertyInfo property : properties) {
String configType = ConfigHelper.fileNameToConfigType(property.getFilename());
- if (serviceInfo.getExcludedConfigTypes() == null ||
- !serviceInfo.getExcludedConfigTypes().contains(configType)) {
+ if (serviceInfo.hasConfigType(configType)) {
serviceConfigTypes.put(serviceName, configType);
}
}
@@ -359,7 +359,7 @@ public class ClusterImpl implements Cluster {
for (ClusterServiceEntity serviceEntity : clusterEntity.getClusterServiceEntities()) {
StackId stackId = getCurrentStackVersion();
try {
- if (ambariMetaInfo.getServiceInfo(stackId.getStackName(), stackId.getStackVersion(),
+ if (ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(),
serviceEntity.getServiceName()) != null) {
services.put(serviceEntity.getServiceName(), serviceFactory.createExisting(this, serviceEntity));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
index 6e58267..e7db9d3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
@@ -30,7 +30,7 @@ import org.apache.ambari.server.api.util.TreeNodeImpl;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.internal.ResourceImpl;
import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
import org.junit.Test;
import java.net.InetAddress;
@@ -103,9 +103,13 @@ public class ClusterBlueprintRendererTest {
AmbariManagementController controller = createMock(AmbariManagementController.class);
AmbariMetaInfo stackInfo = createNiceMock(AmbariMetaInfo.class);
+ ServiceInfo hdfsService = new ServiceInfo();
+ hdfsService.setName("HDFS");
+ ServiceInfo mrService = new ServiceInfo();
+ mrService.setName("MAPREDUCE");
- expect(stackInfo.getRequiredProperties("HDP", "1.3.3", "HDFS")).andReturn(Collections.<String, PropertyInfo>emptyMap());
- expect(stackInfo.getRequiredProperties("HDP", "1.3.3", "MAPREDUCE")).andReturn(Collections.<String, PropertyInfo>emptyMap());
+ expect(stackInfo.getService("HDP", "1.3.3", "HDFS")).andReturn(hdfsService);
+ expect(stackInfo.getService("HDP", "1.3.3", "MAPREDUCE")).andReturn(mrService);
Result result = new ResultImpl(true);
createClusterResultTree(result.getResultTree());
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index e7b946d..4d08d6f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -18,6 +18,10 @@
package org.apache.ambari.server.api.services;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@@ -26,30 +30,33 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
-import java.lang.reflect.Method;
+import java.lang.reflect.Field;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.List;
-import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
-import javax.persistence.EntityManager;
-import javax.xml.bind.JAXBException;
-
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.util.Modules;
import junit.framework.Assert;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.StackAccessException;
-import org.apache.ambari.server.api.util.StackExtensionHelper;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
import org.apache.ambari.server.metadata.ActionMetadata;
+import org.apache.ambari.server.metadata.AgentAlertDefinitions;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.OrmTestHelper;
import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.orm.dao.MetainfoDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.orm.entities.MetainfoEntity;
+import org.apache.ambari.server.stack.StackManager;
import org.apache.ambari.server.state.AutoDeployInfo;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
@@ -60,15 +67,16 @@ import org.apache.ambari.server.state.OperatingSystemInfo;
import org.apache.ambari.server.state.PropertyInfo;
import org.apache.ambari.server.state.RepositoryInfo;
import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.Stack;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.MetricSource;
+import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
import org.apache.ambari.server.state.alert.PortSource;
import org.apache.ambari.server.state.alert.Reporting;
import org.apache.ambari.server.state.alert.Source;
import org.apache.ambari.server.state.stack.MetricDefinition;
+import org.apache.ambari.server.state.stack.OsFamily;
import org.apache.commons.io.FileUtils;
import org.junit.Before;
import org.junit.Rule;
@@ -78,9 +86,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.util.Modules;
+
+import javax.persistence.EntityManager;
+import javax.xml.bind.JAXBException;
public class AmbariMetaInfoTest {
@@ -112,7 +120,9 @@ public class AmbariMetaInfoTest {
private static final String HADOOP_ENV_FILE_NAME = "hadoop-env.xml";
private static final String HDFS_LOG4J_FILE_NAME = "hdfs-log4j.xml";
- private Injector injector;
+ //private Injector injector;
+
+ //todo: add fail() for cases where an exception is expected such as getService, getComponent ...
@Rule
@@ -120,22 +130,7 @@ public class AmbariMetaInfoTest {
@Before
public void before() throws Exception {
- injector = Guice.createInjector(Modules.override(
- new InMemoryDefaultTestModule()).with(new MockModule()));
-
- injector.getInstance(GuiceJpaInitializer.class);
- injector.getInstance(EntityManager.class);
-
- File stackRoot = new File("src/test/resources/stacks");
- LOG.info("Stacks file " + stackRoot.getAbsolutePath());
- metaInfo = new AmbariMetaInfo(stackRoot, new File("target/version"));
- metaInfo.injector = injector;
-
- try {
- metaInfo.init();
- } catch(Exception e) {
- LOG.info("Error in initializing ", e);
- }
+ metaInfo = createAmbariMetaInfo(new File("src/test/resources/stacks"), new File("target/version"), true);
}
public class MockModule extends AbstractModule {
@@ -145,16 +140,6 @@ public class AmbariMetaInfoTest {
}
}
- @Test
- public void getComponentCategory() throws AmbariException {
- ComponentInfo componentInfo = metaInfo.getComponentCategory(STACK_NAME_HDP,
- STACK_VERSION_HDP, SERVICE_NAME_HDFS, SERVICE_COMPONENT_NAME);
- assertNotNull(componentInfo);
- componentInfo = metaInfo.getComponentCategory(STACK_NAME_HDP,
- STACK_VERSION_HDP, SERVICE_NAME_HDFS, "DATANODE1");
- Assert.assertNotNull(componentInfo);
- assertTrue(!componentInfo.isClient());
- }
@Test
public void getRestartRequiredServicesNames() throws AmbariException {
@@ -184,11 +169,13 @@ public class AmbariMetaInfoTest {
// Scenario: user has internet and does nothing to repos via api
// use the latest
String buildDir = tmpFolder.getRoot().getAbsolutePath();
- AmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir);
+ AmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir, true);
// The current stack already has (HDP, 2.1.1, redhat6) with valid latest
// url
ambariMetaInfo.init();
+ waitForAllReposToBeResolved(ambariMetaInfo);
+
List<RepositoryInfo> redhat6Repo = ambariMetaInfo.getRepositories(
STACK_NAME_HDP, "2.1.1", "redhat6");
assertNotNull(redhat6Repo);
@@ -205,7 +192,7 @@ public class AmbariMetaInfoTest {
// Scenario: user has no internet and does nothing to repos via api
// use the default
String buildDir = tmpFolder.getRoot().getAbsolutePath();
- AmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir);
+ AmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir, true);
// The current stack already has (HDP, 2.1.1, redhat6).
// Deleting the json file referenced by the latestBaseUrl to simulate No
@@ -233,7 +220,7 @@ public class AmbariMetaInfoTest {
// Scenario: user has internet and but calls to set repos via api
// use whatever they set
String buildDir = tmpFolder.getRoot().getAbsolutePath();
- AmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir);
+ TestAmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir, true);
// The current stack already has (HDP, 2.1.1, redhat6)
// Updating the baseUrl
@@ -244,8 +231,19 @@ public class AmbariMetaInfoTest {
STACK_NAME_HDP + "-2.1.1");
assertEquals(newBaseUrl, repoInfo.getBaseUrl());
String prevBaseUrl = repoInfo.getDefaultBaseUrl();
+
+ // mock expectations
+ MetainfoDAO metainfoDAO = ambariMetaInfo.metaInfoDAO;
+ reset(metainfoDAO);
+ MetainfoEntity entity = createNiceMock(MetainfoEntity.class);
+ expect(metainfoDAO.findByKey("repo:/HDP/2.1.1/redhat6/HDP-2.1.1:baseurl")).andReturn(entity).atLeastOnce();
+ expect(entity.getMetainfoValue()).andReturn(newBaseUrl).atLeastOnce();
+ replay(metainfoDAO, entity);
+
ambariMetaInfo.init();
+ waitForAllReposToBeResolved(ambariMetaInfo);
+
List<RepositoryInfo> redhat6Repo = ambariMetaInfo.getRepositories(
STACK_NAME_HDP, "2.1.1", "redhat6");
assertNotNull(redhat6Repo);
@@ -266,8 +264,9 @@ public class AmbariMetaInfoTest {
public void testGetRepositoryNoInternetUpdatedBaseUrl() throws Exception {
// Scenario: user has no internet and but calls to set repos via api
// use whatever they set
+ String newBaseUrl = "http://myprivate-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0";
String buildDir = tmpFolder.getRoot().getAbsolutePath();
- AmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir);
+ TestAmbariMetaInfo ambariMetaInfo = setupTempAmbariMetaInfo(buildDir, true);
// The current stack already has (HDP, 2.1.1, redhat6).
// Deleting the json file referenced by the latestBaseUrl to simulate No
@@ -278,15 +277,25 @@ public class AmbariMetaInfoTest {
assertTrue(!latestUrlFile.exists());
// Update baseUrl
- String newBaseUrl = "http://myprivate-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0";
ambariMetaInfo.updateRepoBaseURL("HDP", "2.1.1", "redhat6", "HDP-2.1.1",
newBaseUrl);
RepositoryInfo repoInfo = ambariMetaInfo.getRepository(STACK_NAME_HDP, "2.1.1", "redhat6",
STACK_NAME_HDP + "-2.1.1");
assertEquals(newBaseUrl, repoInfo.getBaseUrl());
String prevBaseUrl = repoInfo.getDefaultBaseUrl();
+
+ // mock expectations
+ MetainfoDAO metainfoDAO = ambariMetaInfo.metaInfoDAO;
+ reset(metainfoDAO);
+ MetainfoEntity entity = createNiceMock(MetainfoEntity.class);
+ expect(metainfoDAO.findByKey("repo:/HDP/2.1.1/redhat6/HDP-2.1.1:baseurl")).andReturn(entity).atLeastOnce();
+ expect(entity.getMetainfoValue()).andReturn(newBaseUrl).atLeastOnce();
+ replay(metainfoDAO, entity);
+
ambariMetaInfo.init();
+ waitForAllReposToBeResolved(ambariMetaInfo);
+
List<RepositoryInfo> redhat6Repo = ambariMetaInfo.getRepositories(
STACK_NAME_HDP, "2.1.1", "redhat6");
assertNotNull(redhat6Repo);
@@ -324,24 +333,6 @@ public class AmbariMetaInfoTest {
assertFalse(invalid);
}
- /**
- * Method: getSupportedConfigs(String stackName, String version, String
- * serviceName)
- */
- @Test
- public void getSupportedConfigs() throws Exception {
-
- Map<String, Map<String, String>> configsAll = metaInfo.getSupportedConfigs(
- STACK_NAME_HDP, STACK_VERSION_HDP, SERVICE_NAME_HDFS);
- Set<String> filesKeys = configsAll.keySet();
- for (String file : filesKeys) {
- Map<String, String> configs = configsAll.get(file);
- Set<String> propertyKeys = configs.keySet();
- assertNotNull(propertyKeys);
- assertFalse(propertyKeys.size() == 0);
- }
- }
-
@Test
public void testServiceNameUsingComponentName() throws AmbariException {
String serviceName = metaInfo.getComponentToService(STACK_NAME_HDP,
@@ -374,31 +365,19 @@ public class AmbariMetaInfoTest {
*/
@Test
public void getServiceInfo() throws Exception {
- ServiceInfo si = metaInfo.getServiceInfo(STACK_NAME_HDP, STACK_VERSION_HDP,
+ ServiceInfo si = metaInfo.getService(STACK_NAME_HDP, STACK_VERSION_HDP,
SERVICE_NAME_HDFS);
assertNotNull(si);
}
@Test
public void testConfigDependencies() throws Exception {
- ServiceInfo serviceInfo = metaInfo.getServiceInfo(STACK_NAME_HDP, EXT_STACK_NAME,
- SERVICE_NAME_MAPRED2);
+ ServiceInfo serviceInfo = metaInfo.getService(STACK_NAME_HDP, EXT_STACK_NAME,
+ SERVICE_NAME_MAPRED2);
assertNotNull(serviceInfo);
assertTrue(!serviceInfo.getConfigDependencies().isEmpty());
}
- /**
- * Method: getSupportedServices(String stackName, String version)
- */
- @Test
- public void getSupportedServices() throws Exception {
- List<ServiceInfo> services = metaInfo.getSupportedServices(STACK_NAME_HDP,
- STACK_VERSION_HDP);
- assertNotNull(services);
- assertFalse(services.size() == 0);
-
- }
-
@Test
public void testGetRepos() throws Exception {
Map<String, List<RepositoryInfo>> repos = metaInfo.getRepository(
@@ -447,7 +426,7 @@ public class AmbariMetaInfoTest {
* @throws Exception
*/
public void testGlobalMapping() throws Exception {
- ServiceInfo sinfo = metaInfo.getServiceInfo("HDP",
+ ServiceInfo sinfo = metaInfo.getService("HDP",
"0.2", "HDFS");
List<PropertyInfo> pinfo = sinfo.getProperties();
/** check all the config knobs and make sure the global one is there **/
@@ -459,7 +438,7 @@ public class AmbariMetaInfoTest {
}
}
Assert.assertTrue(checkforglobal);
- sinfo = metaInfo.getServiceInfo("HDP",
+ sinfo = metaInfo.getService("HDP",
"0.2", "MAPREDUCE");
boolean checkforhadoopheapsize = false;
pinfo = sinfo.getProperties();
@@ -479,8 +458,9 @@ public class AmbariMetaInfoTest {
File stackRoot = new File("src/test/resources/stacks");
File stackRootTmp = new File(buildDir + "/ambari-metaInfo"); stackRootTmp.mkdir();
FileUtils.copyDirectory(stackRoot, stackRootTmp);
- AmbariMetaInfo ambariMetaInfo = new AmbariMetaInfo(stackRootTmp, new File("target/version"));
- ambariMetaInfo.injector = injector;
+ AmbariMetaInfo ambariMetaInfo = createAmbariMetaInfo(stackRootTmp, new File("target/version"), true);
+ //todo
+ //ambariMetaInfo.injector = injector;
File f1, f2, f3;
f1 = new File(stackRootTmp.getAbsolutePath() + "/001.svn"); f1.createNewFile();
f2 = new File(stackRootTmp.getAbsolutePath() + "/abcd.svn/001.svn"); f2.mkdirs(); f2.createNewFile();
@@ -492,10 +472,8 @@ public class AmbariMetaInfoTest {
// Tests the stack is loaded as expected
getServices();
getComponentsByService();
- getComponentCategory();
- getSupportedConfigs();
// Check .svn is not part of the stack but abcd.svn is
- Assert.assertNotNull(ambariMetaInfo.getStackInfo("abcd.svn", "001.svn"));
+ Assert.assertNotNull(ambariMetaInfo.getStack("abcd.svn", "001.svn"));
Assert.assertFalse(ambariMetaInfo.isSupportedStack(".svn", ""));
Assert.assertFalse(ambariMetaInfo.isSupportedStack(".svn", ""));
@@ -511,7 +489,6 @@ public class AmbariMetaInfoTest {
metaInfo.getComponent(STACK_NAME_HDP,
STACK_VERSION_HDP, SERVICE_NAME_HDFS, NON_EXT_VALUE);
} catch (StackAccessException e) {
- Assert.assertTrue(e instanceof StackAccessException);
}
}
@@ -530,7 +507,6 @@ public class AmbariMetaInfoTest {
try {
metaInfo.getRepository(STACK_NAME_HDP, STACK_VERSION_HDP, OS_TYPE, NON_EXT_VALUE);
} catch (StackAccessException e) {
- Assert.assertTrue(e instanceof StackAccessException);
}
}
@@ -541,40 +517,25 @@ public class AmbariMetaInfoTest {
try {
metaInfo.getService(STACK_NAME_HDP, STACK_VERSION_HDP, NON_EXT_VALUE);
} catch (StackAccessException e) {
- Assert.assertTrue(e instanceof StackAccessException);
}
}
@Test
- public void testGetStacksNames() throws Exception {
- Set<Stack> stackNames = metaInfo.getStackNames();
- assertEquals(stackNames.size(), STACKS_NAMES_CNT);
- assertTrue(stackNames.contains(new Stack(STACK_NAME_HDP)));
- assertTrue(stackNames.contains(new Stack(STACK_NAME_XYZ)));
- }
-
- @Test
- public void testGetStack() throws Exception {
- Stack stack = metaInfo.getStack(STACK_NAME_HDP);
- Assert.assertEquals(stack.getStackName(), STACK_NAME_HDP);
- try {
- metaInfo.getStack(NON_EXT_VALUE);
- } catch (StackAccessException e) {
- Assert.assertTrue(e instanceof StackAccessException);
- }
+ public void testGetStacks() {
+ Collection<StackInfo> stacks = metaInfo.getStacks();
+ //todo: complete test
}
@Test
public void testGetStackInfo() throws Exception {
- StackInfo stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, STACK_VERSION_HDP);
+ StackInfo stackInfo = metaInfo.getStack(STACK_NAME_HDP, STACK_VERSION_HDP);
Assert.assertEquals(stackInfo.getName(), STACK_NAME_HDP);
Assert.assertEquals(stackInfo.getVersion(), STACK_VERSION_HDP);
Assert.assertEquals(stackInfo.getMinUpgradeVersion(), STACK_MINIMAL_VERSION_HDP);
try {
- metaInfo.getStackInfo(STACK_NAME_HDP, NON_EXT_VALUE);
+ metaInfo.getStack(STACK_NAME_HDP, NON_EXT_VALUE);
} catch (StackAccessException e) {
- Assert.assertTrue(e instanceof StackAccessException);
}
}
@@ -589,7 +550,7 @@ public class AmbariMetaInfoTest {
@Test
public void testGetProperties() throws Exception {
- Set<PropertyInfo> properties = metaInfo.getProperties(STACK_NAME_HDP, STACK_VERSION_HDP, SERVICE_NAME_HDFS);
+ Set<PropertyInfo> properties = metaInfo.getServiceProperties(STACK_NAME_HDP, STACK_VERSION_HDP, SERVICE_NAME_HDFS);
Assert.assertEquals(properties.size(), PROPERTIES_CNT);
}
@@ -605,7 +566,6 @@ public class AmbariMetaInfoTest {
try {
metaInfo.getPropertiesByName(STACK_NAME_HDP, STACK_VERSION_HDP, SERVICE_NAME_HDFS, NON_EXT_VALUE);
} catch (StackAccessException e) {
- Assert.assertTrue(e instanceof StackAccessException);
}
}
@@ -636,7 +596,6 @@ public class AmbariMetaInfoTest {
try {
metaInfo.getOperatingSystem(STACK_NAME_HDP, STACK_VERSION_HDP, NON_EXT_VALUE);
} catch (StackAccessException e) {
- Assert.assertTrue(e instanceof StackAccessException);
}
}
@@ -656,9 +615,9 @@ public class AmbariMetaInfoTest {
@Test
public void testExtendedStackDefinition() throws Exception {
- StackInfo stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, EXT_STACK_NAME);
+ StackInfo stackInfo = metaInfo.getStack(STACK_NAME_HDP, EXT_STACK_NAME);
Assert.assertTrue(stackInfo != null);
- List<ServiceInfo> serviceInfos = stackInfo.getServices();
+ Collection<ServiceInfo> serviceInfos = stackInfo.getServices();
Assert.assertFalse(serviceInfos.isEmpty());
Assert.assertTrue(serviceInfos.size() > 1);
ServiceInfo deletedService = null;
@@ -740,86 +699,8 @@ public class AmbariMetaInfoTest {
}
@Test
- public void testGetParentStacksInOrder() throws Exception {
- List<StackInfo> allStacks = metaInfo.getSupportedStacks();
- StackInfo stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, EXT_STACK_NAME);
- StackInfo newStack = new StackInfo();
- newStack.setName(STACK_NAME_HDP);
- newStack.setVersion("2.0.99");
- newStack.setParentStackVersion(EXT_STACK_NAME);
- newStack.setActive(true);
- newStack.setRepositories(stackInfo.getRepositories());
- allStacks.add(newStack);
-
- Method method = StackExtensionHelper.class.getDeclaredMethod
- ("getParentStacksInOrder", Collection.class);
- method.setAccessible(true);
- StackExtensionHelper helper = new StackExtensionHelper(injector, metaInfo.getStackRoot());
- helper.fillInfo();
- Map<String, List<StackInfo>> stacks =
- (Map<String, List<StackInfo>>) method.invoke(helper, allStacks);
-
- Assert.assertNotNull(stacks.get("2.0.99"));
- // Verify order
- LinkedList<String> target = new LinkedList<String>();
- target.add("2.0.5");
- target.add("2.0.6");
- target.add("2.0.99");
- LinkedList<String> actual = new LinkedList<String>();
- LinkedList<StackInfo> parents = (LinkedList<StackInfo>) stacks.get("2.0.99");
- parents.addFirst(newStack);
- ListIterator lt = parents.listIterator(parents.size());
- while (lt.hasPrevious()) {
- StackInfo stack = (StackInfo) lt.previous();
- actual.add(stack.getVersion());
- }
- org.junit.Assert.assertArrayEquals("Order of stack extension not " +
- "preserved.", target.toArray(), actual.toArray());
- }
-
- @Test
- public void testGetApplicableServices() throws Exception {
- StackExtensionHelper helper = new StackExtensionHelper(injector,
- metaInfo.getStackRoot());
- helper.fillInfo();
- List<ServiceInfo> allServices = helper.getAllApplicableServices(metaInfo
- .getStackInfo(STACK_NAME_HDP, EXT_STACK_NAME));
-
- ServiceInfo testService = null;
- ServiceInfo existingService = null;
- for (ServiceInfo serviceInfo : allServices) {
- if (serviceInfo.getName().equals("YARN")) {
- testService = serviceInfo;
- } else if (serviceInfo.getName().equals("MAPREDUCE2")) {
- existingService = serviceInfo;
- }
- }
-
- Assert.assertNotNull(testService);
- Assert.assertNotNull(existingService);
-
- PropertyInfo testProperty = null;
- PropertyInfo existingProperty = null;
- for (PropertyInfo property : testService.getProperties()) {
- if (property.getName().equals("new-yarn-property")) {
- testProperty = property;
- }
- }
- for (PropertyInfo property : existingService.getProperties()) {
- if (property.getName().equals("mapreduce.map.log.level")) {
- existingProperty = property;
- }
- }
-
- Assert.assertNotNull(testProperty);
- Assert.assertEquals("some-value", testProperty.getValue());
- Assert.assertNotNull(existingProperty);
- Assert.assertEquals("INFO", existingProperty.getValue());
- }
-
- @Test
public void testPropertyCount() throws Exception {
- Set<PropertyInfo> properties = metaInfo.getProperties(STACK_NAME_HDP, STACK_VERSION_HDP_02, SERVICE_NAME_HDFS);
+ Set<PropertyInfo> properties = metaInfo.getServiceProperties(STACK_NAME_HDP, STACK_VERSION_HDP_02, SERVICE_NAME_HDFS);
// 3 empty properties
Assert.assertEquals(103, properties.size());
}
@@ -828,12 +709,13 @@ public class AmbariMetaInfoTest {
public void testBadStack() throws Exception {
File stackRoot = new File("src/test/resources/bad-stacks");
LOG.info("Stacks file " + stackRoot.getAbsolutePath());
- AmbariMetaInfo mi = new AmbariMetaInfo(stackRoot, new File("target/version"));
- mi.injector = injector;
+
try {
- mi.init();
- } catch(Exception e) {
- assertTrue(JAXBException.class.isInstance(e));
+ createAmbariMetaInfo(stackRoot, new File("target/version"), true);
+ fail("Exception expected due to bad stack");
+ } catch(AmbariException e) {
+ e.printStackTrace();
+ assertTrue(e.getCause() instanceof JAXBException);
}
}
@@ -1262,13 +1144,13 @@ public class AmbariMetaInfoTest {
@Test
public void testHooksDirInheritance() throws Exception {
// Test hook dir determination in parent
- StackInfo stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, "2.0.6");
+ StackInfo stackInfo = metaInfo.getStack(STACK_NAME_HDP, "2.0.6");
Assert.assertEquals("HDP/2.0.6/hooks", stackInfo.getStackHooksFolder());
// Test hook dir inheritance
- stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, "2.0.7");
+ stackInfo = metaInfo.getStack(STACK_NAME_HDP, "2.0.7");
Assert.assertEquals("HDP/2.0.6/hooks", stackInfo.getStackHooksFolder());
// Test hook dir override
- stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, "2.0.8");
+ stackInfo = metaInfo.getStack(STACK_NAME_HDP, "2.0.8");
Assert.assertEquals("HDP/2.0.8/hooks", stackInfo.getStackHooksFolder());
}
@@ -1317,21 +1199,21 @@ public class AmbariMetaInfoTest {
public void testComponentCommandScriptInheritance() throws Exception {
// Test command script determination in parent
ComponentInfo component = metaInfo.getComponent(STACK_NAME_HDP,
- "2.0.7", "HDFS", "HDFS_CLIENT");
+ "2.0.7", "HDFS", "HDFS_CLIENT");
Assert.assertEquals("scripts/hdfs_client.py",
component.getCommandScript().getScript());
component = metaInfo.getComponent(STACK_NAME_HDP,
- "2.0.7", "HBASE", "HBASE_MASTER");
+ "2.0.7", "HBASE", "HBASE_MASTER");
Assert.assertEquals("scripts/hbase_master.py",
component.getCommandScript().getScript());
// Test command script inheritance
component = metaInfo.getComponent(STACK_NAME_HDP,
- "2.0.8", "HBASE", "HBASE_MASTER");
+ "2.0.8", "HBASE", "HBASE_MASTER");
Assert.assertEquals("scripts/hbase_master.py",
component.getCommandScript().getScript());
// Test command script override
component = metaInfo.getComponent(STACK_NAME_HDP,
- "2.0.8", "HDFS", "HDFS_CLIENT");
+ "2.0.8", "HDFS", "HDFS_CLIENT");
Assert.assertEquals("scripts/hdfs_client_overridden.py",
component.getCommandScript().getScript());
}
@@ -1484,6 +1366,18 @@ public class AmbariMetaInfoTest {
@Test
public void testLatestRepo() throws Exception {
+ // ensure that all of the latest repo retrieval tasks have completed
+ StackManager sm = metaInfo.getStackManager();
+ int maxWait = 45000;
+ int waitTime = 0;
+ while (waitTime < maxWait && ! sm.haveAllRepoUrlsBeenResolved()) {
+ Thread.sleep(5);
+ waitTime += 5;
+ }
+
+ if (waitTime >= maxWait) {
+ fail("Latest Repo tasks did not complete");
+ }
for (RepositoryInfo ri : metaInfo.getRepositories("HDP", "2.1.1", "centos6")) {
Assert.assertEquals(
@@ -1651,6 +1545,8 @@ public class AmbariMetaInfoTest {
assertTrue(ignoreHost.isHostIgnored());
}
+
+ //todo: refactor test to use mocks instead of injector
/**
* Tests merging stack-based with existing definitions works
*
@@ -1658,7 +1554,19 @@ public class AmbariMetaInfoTest {
*/
@Test
public void testAlertDefinitionMerging() throws Exception {
+ Injector injector = Guice.createInjector(Modules.override(
+ new InMemoryDefaultTestModule()).with(new MockModule()));
+
+ injector.getInstance(GuiceJpaInitializer.class);
+ injector.getInstance(EntityManager.class);
injector.getInstance(OrmTestHelper.class).createCluster();
+
+ metaInfo.alertDefinitionDao = injector.getInstance(AlertDefinitionDAO.class);
+ Class<?> c = metaInfo.getClass().getSuperclass();
+ Field f = c.getDeclaredField("agentAlertDefinitions");
+ f.setAccessible(true);
+ f.set(metaInfo, injector.getInstance(AgentAlertDefinitions.class));
+
Clusters clusters = injector.getInstance(Clusters.class);
Cluster cluster = clusters.getClusterById(1);
cluster.setDesiredStackVersion(
@@ -1700,15 +1608,102 @@ public class AmbariMetaInfoTest {
}
}
- private AmbariMetaInfo setupTempAmbariMetaInfo(String buildDir)
+ private TestAmbariMetaInfo setupTempAmbariMetaInfo(String buildDir, boolean replayMocks)
throws Exception {
File stackRootTmp = new File(buildDir + "/ambari-metaInfo");
File stackRoot = new File("src/test/resources/stacks");
stackRootTmp.mkdir();
FileUtils.copyDirectory(stackRoot, stackRootTmp);
- AmbariMetaInfo ambariMetaInfo = new AmbariMetaInfo(stackRootTmp, new File(
- "target/version"));
- injector.injectMembers(ambariMetaInfo);
+ TestAmbariMetaInfo ambariMetaInfo = createAmbariMetaInfo(stackRootTmp, new File(
+ "target/version"), replayMocks);
+
return ambariMetaInfo;
}
+
+ private TestAmbariMetaInfo createAmbariMetaInfo(File stackRoot, File versionFile, boolean replayMocks) throws Exception {
+ TestAmbariMetaInfo metaInfo = new TestAmbariMetaInfo(stackRoot, versionFile);
+ if (replayMocks) {
+ metaInfo.replayAllMocks();
+
+ try {
+ metaInfo.init();
+ } catch(Exception e) {
+ LOG.info("Error in initializing ", e);
+ throw e;
+ }
+ waitForAllReposToBeResolved(metaInfo);
+ }
+
+ return metaInfo;
+ }
+
+ private void waitForAllReposToBeResolved(AmbariMetaInfo metaInfo) throws Exception {
+ int maxWait = 45000;
+ int waitTime = 0;
+ StackManager sm = metaInfo.getStackManager();
+ while (waitTime < maxWait && ! sm.haveAllRepoUrlsBeenResolved()) {
+ Thread.sleep(5);
+ waitTime += 5;
+ }
+
+ if (waitTime >= maxWait) {
+ fail("Latest Repo tasks did not complete");
+ }
+ }
+
+ private static class TestAmbariMetaInfo extends AmbariMetaInfo {
+
+ MetainfoDAO metaInfoDAO;
+ AlertDefinitionDAO alertDefinitionDAO;
+ AlertDefinitionFactory alertDefinitionFactory;
+ OsFamily osFamily;
+
+ public TestAmbariMetaInfo(File stackRoot, File serverVersionFile) throws Exception {
+ super(stackRoot, serverVersionFile);
+ // MetainfoDAO
+ metaInfoDAO = createNiceMock(MetainfoDAO.class);
+ Class<?> c = getClass().getSuperclass();
+ Field f = c.getDeclaredField("metaInfoDAO");
+ f.setAccessible(true);
+ f.set(this, metaInfoDAO);
+
+ // ActionMetadata
+ ActionMetadata actionMetadata = new ActionMetadata();
+ f = c.getDeclaredField("actionMetadata");
+ f.setAccessible(true);
+ f.set(this, actionMetadata);
+
+ //AlertDefinitionDAO
+ alertDefinitionDAO = createNiceMock(AlertDefinitionDAO.class);
+ f = c.getDeclaredField("alertDefinitionDao");
+ f.setAccessible(true);
+ f.set(this, alertDefinitionDAO);
+
+ //AlertDefinitionFactory
+ //alertDefinitionFactory = createNiceMock(AlertDefinitionFactory.class);
+ alertDefinitionFactory = new AlertDefinitionFactory();
+ f = c.getDeclaredField("alertDefinitionFactory");
+ f.setAccessible(true);
+ f.set(this, alertDefinitionFactory);
+
+ //AmbariEventPublisher
+ AmbariEventPublisher ambariEventPublisher = new AmbariEventPublisher();
+ f = c.getDeclaredField("eventPublisher");
+ f.setAccessible(true);
+ f.set(this, ambariEventPublisher);
+
+ //OSFamily
+ Configuration config = createNiceMock(Configuration.class);
+ expect(config.getSharedResourcesDirPath()).andReturn("./src/test/resources").anyTimes();
+ replay(config);
+ osFamily = new OsFamily(config);
+ f = c.getDeclaredField("os_family");
+ f.setAccessible(true);
+ f.set(this, osFamily);
+ }
+
+ public void replayAllMocks() {
+ replay(metaInfoDAO, alertDefinitionDAO);
+ }
+ }
}
[03/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/package/dummy-script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/package/dummy-script.py b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/package/dummy-script.py
new file mode 100644
index 0000000..35de4bb
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/package/dummy-script.py
@@ -0,0 +1,20 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/SQOOP2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/SQOOP2/metainfo.xml b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/SQOOP2/metainfo.xml
new file mode 100644
index 0000000..55323d3
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/SQOOP2/metainfo.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>SQOOP2</name>
+ <displayName>Sqoop</displayName>
+ <comment>Extended SQOOP</comment>
+ <version>Extended Version</version>
+ <extends>HDP/2.1.1/SQOOP</extends>
+ </service>
+ </services>
+
+</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/configuration/placeholder.txt
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/configuration/placeholder.txt b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/configuration/placeholder.txt
new file mode 100644
index 0000000..e2479f3
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/configuration/placeholder.txt
@@ -0,0 +1,17 @@
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metainfo.xml b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metainfo.xml
new file mode 100644
index 0000000..86ec307
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metainfo.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>STORM</name>
+ <comment>Apache Hadoop Stream processing framework (Extended)</comment>
+ <version>New version</version>
+ <extends>HDP/2.1.1/STORM</extends>
+ </service>
+ </services>
+</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metrics.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metrics.json b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metrics.json
new file mode 100644
index 0000000..59bec39
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/metrics.json
@@ -0,0 +1,99 @@
+{
+ "STORM_REST_API": {
+ "Component": [
+ {
+ "type": "jmx",
+ "metrics": {
+ "metrics/api/cluster/summary/tasks.total": {
+ "metric": "tasks.total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/slots.total": {
+ "metric": "slots.total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/slots.free": {
+ "metric": "slots.free",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/supervisors": {
+ "metric": "supervisors",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/executors.total": {
+ "metric": "executors.total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/slots.used": {
+ "metric": "slots.used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/topologies": {
+ "metric": "topologies",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/nimbus.uptime": {
+ "metric": "nimbus.uptime",
+ "pointInTime": true,
+ "temporal": false
+ }
+ }
+ }
+ ],
+ "HostComponent": [
+ {
+ "type": "jmx",
+ "metrics": {
+ "metrics/api/cluster/summary/tasks.total": {
+ "metric": "tasks.total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/slots.total": {
+ "metric": "slots.total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/slots.free": {
+ "metric": "slots.free",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/supervisors": {
+ "metric": "supervisors",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/executors.total": {
+ "metric": "executors.total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/slots.used": {
+ "metric": "slots.used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/topologies": {
+ "metric": "topologies",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/api/cluster/summary/nimbus.uptime": {
+ "metric": "nimbus.uptime",
+ "pointInTime": true,
+ "temporal": false
+ }
+ }
+ }
+
+ ]
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/package/placeholder.txt
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/package/placeholder.txt b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/package/placeholder.txt
new file mode 100644
index 0000000..e2479f3
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/STORM/package/placeholder.txt
@@ -0,0 +1,17 @@
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/2.0/metainfo.xml b/ambari-server/src/test/resources/stacks/OTHER/2.0/metainfo.xml
new file mode 100644
index 0000000..716972f
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/2.0/metainfo.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <versions>
+ <active>true</active>
+ </versions>
+ <extends>1.0</extends>
+</metainfo>
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/hdp.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/hdp.json b/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/hdp.json
new file mode 100644
index 0000000..fc51627
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/hdp.json
@@ -0,0 +1,10 @@
+{
+ "HDP-2.1.1": {
+ "latest": {
+ "centos6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "redhat6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "oraclelinux6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "suse11": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118/hdp.repo"
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/repoinfo.xml b/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/repoinfo.xml
new file mode 100644
index 0000000..9d8a232
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/2.0/repos/repoinfo.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<reposinfo>
+ <latest>./hdp.json</latest>
+ <os family="centos6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="centos5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="suse11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="sles11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/2.0/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/2.0/services/HBASE/metainfo.xml b/ambari-server/src/test/resources/stacks/OTHER/2.0/services/HBASE/metainfo.xml
new file mode 100644
index 0000000..b20aa6a
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/2.0/services/HBASE/metainfo.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>HBASE</name>
+ <displayName>HBASE</displayName>
+ <comment>Inherited from HDP stack</comment>
+ <version>other-2.0-version</version>
+ <extends>HDP/2.0.8/HBASE</extends>
+ <excluded-config-types>
+ <config-type>hbase-policy</config-type>
+ </excluded-config-types>
+ </service>
+ </services>
+</metainfo>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/2.0/services/SQOOP2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/2.0/services/SQOOP2/metainfo.xml b/ambari-server/src/test/resources/stacks/OTHER/2.0/services/SQOOP2/metainfo.xml
new file mode 100644
index 0000000..b90f143
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/2.0/services/SQOOP2/metainfo.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>SQOOP2</name>
+ <displayName>Sqoop</displayName>
+ <comment>Inherited from parent</comment>
+ <version>Extended from parent version</version>
+ </service>
+ </services>
+</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/metainfo.xml
new file mode 100644
index 0000000..31716d2
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <versions>
+ <active>true</active>
+ </versions>
+</metainfo>
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/hdp.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/hdp.json b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/hdp.json
new file mode 100644
index 0000000..fc51627
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/hdp.json
@@ -0,0 +1,10 @@
+{
+ "HDP-2.1.1": {
+ "latest": {
+ "centos6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "redhat6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "oraclelinux6": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118",
+ "suse11": "http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11/2.x/BUILDS/2.1.1.0-118/hdp.repo"
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/repoinfo.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/repoinfo.xml
new file mode 100644
index 0000000..9d8a232
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/repos/repoinfo.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<reposinfo>
+ <latest>./hdp.json</latest>
+ <os family="centos6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="centos5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat6">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="redhat5">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos5/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="suse11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+ <os family="sles11">
+ <repo>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/suse11/2.x/updates/2.0.6.0</baseurl>
+ <repoid>HDP-2.1.1</repoid>
+ <reponame>HDP</reponame>
+ </repo>
+ </os>
+</reposinfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/role_command_order.json b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/role_command_order.json
new file mode 100644
index 0000000..c45ba07
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/role_command_order.json
@@ -0,0 +1,104 @@
+{
+ "_comment" : "Record format:",
+ "_comment" : "blockedRole-blockedCommand: [blockerRole1-blockerCommand1, blockerRole2-blockerCommand2, ...]",
+ "general_deps" : {
+ "_comment" : "dependencies for all cases",
+ "NAGIOS_SERVER-INSTALL" : ["HIVE_CLIENT-INSTALL", "HCAT-INSTALL",
+ "MAPREDUCE_CLIENT-INSTALL", "OOZIE_CLIENT-INSTALL"],
+ "HBASE_MASTER-START": ["ZOOKEEPER_SERVER-START"],
+ "HBASE_REGIONSERVER-START": ["HBASE_MASTER-START"],
+ "OOZIE_SERVER-START": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "WEBHCAT_SERVER-START": ["TASKTRACKER-START", "HIVE_SERVER-START"],
+ "HIVE_METASTORE-START": ["MYSQL_SERVER-START"],
+ "HIVE_SERVER-START": ["TASKTRACKER-START", "MYSQL_SERVER-START"],
+ "HUE_SERVER-START": ["HIVE_SERVER-START", "HCAT-START", "OOZIE_SERVER-START"],
+ "FLUME_HANDLER-START": ["OOZIE_SERVER-START"],
+ "NAGIOS_SERVER-START": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START",
+ "GANGLIA_SERVER-START", "GANGLIA_MONITOR-START", "HCAT-START",
+ "HIVE_SERVER-START", "HIVE_METASTORE-START", "HUE_SERVER-START",
+ "JOBTRACKER-START", "TASKTRACKER-START", "ZOOKEEPER_SERVER-START",
+ "MYSQL_SERVER-START", "OOZIE_SERVER-START", "PIG-START", "SQOOP-START",
+ "WEBHCAT_SERVER-START", "FLUME_HANDLER-START"],
+ "MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START"],
+ "WEBHCAT_SERVICE_CHECK-SERVICE_CHECK": ["WEBHCAT_SERVER-START"],
+ "HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
+ "HIVE_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START", "HIVE_METASTORE-START"],
+ "HCAT_SERVICE_CHECK-SERVICE_CHECK": ["HIVE_SERVER-START"],
+ "PIG_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "SQOOP_SERVICE_CHECK-SERVICE_CHECK": ["JOBTRACKER-START", "TASKTRACKER-START"],
+ "ZOOKEEPER_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
+ "ZOOKEEPER_QUORUM_SERVICE_CHECK-SERVICE_CHECK": ["ZOOKEEPER_SERVER-START"],
+ "ZOOKEEPER_SERVER-STOP" : ["HBASE_MASTER-STOP", "HBASE_REGIONSERVER-STOP"],
+ "HBASE_MASTER-STOP": ["HBASE_REGIONSERVER-STOP"],
+ "TASKTRACKER-UPGRADE": ["JOBTRACKER-UPGRADE"],
+ "MAPREDUCE_CLIENT-UPGRADE": ["TASKTRACKER-UPGRADE", "JOBTRACKER-UPGRADE"],
+ "ZOOKEEPER_SERVER-UPGRADE": ["MAPREDUCE_CLIENT-UPGRADE"],
+ "ZOOKEEPER_CLIENT-UPGRADE": ["ZOOKEEPER_SERVER-UPGRADE"],
+ "HBASE_MASTER-UPGRADE": ["ZOOKEEPER_CLIENT-UPGRADE"],
+ "HBASE_REGIONSERVER-UPGRADE": ["HBASE_MASTER-UPGRADE"],
+ "HBASE_CLIENT-UPGRADE": ["HBASE_REGIONSERVER-UPGRADE"],
+ "HIVE_SERVER-UPGRADE" : ["HBASE_CLIENT-UPGRADE"],
+ "HIVE_METASTORE-UPGRADE" : ["HIVE_SERVER-UPGRADE"],
+ "MYSQL_SERVER-UPGRADE": ["HIVE_METASTORE-UPGRADE"],
+ "HIVE_CLIENT-UPGRADE": ["MYSQL_SERVER-UPGRADE"],
+ "HCAT-UPGRADE": ["HIVE_CLIENT-UPGRADE"],
+ "OOZIE_SERVER-UPGRADE" : ["HCAT-UPGRADE"],
+ "OOZIE_CLIENT-UPGRADE" : ["OOZIE_SERVER-UPGRADE"],
+ "WEBHCAT_SERVER-UPGRADE" : ["OOZIE_CLIENT-UPGRADE"],
+ "PIG-UPGRADE" : ["WEBHCAT_SERVER-UPGRADE"],
+ "SQOOP-UPGRADE" : ["PIG-UPGRADE"],
+ "NAGIOS_SERVER-UPGRADE" : ["SQOOP-UPGRADE"],
+ "GANGLIA_SERVER-UPGRADE" : ["NAGIOS_SERVER-UPGRADE"],
+ "GANGLIA_MONITOR-UPGRADE" : ["GANGLIA_SERVER-UPGRADE"]
+ },
+ "_comment" : "GLUSTERFS-specific dependencies",
+ "optional_glusterfs": {
+ "HBASE_MASTER-START": ["PEERSTATUS-START"],
+ "JOBTRACKER-START": ["PEERSTATUS-START"],
+ "TASKTRACKER-START": ["PEERSTATUS-START"],
+ "GLUSTERFS_SERVICE_CHECK-SERVICE_CHECK": ["PEERSTATUS-START"],
+ "JOBTRACKER-UPGRADE": ["GLUSTERFS_CLIENT-UPGRADE"]
+ },
+ "_comment" : "Dependencies that are used when GLUSTERFS is not present in cluster",
+ "optional_no_glusterfs": {
+ "SECONDARY_NAMENODE-START": ["NAMENODE-START"],
+ "RESOURCEMANAGER-START": ["NAMENODE-START", "DATANODE-START"],
+ "NODEMANAGER-START": ["NAMENODE-START", "DATANODE-START", "RESOURCEMANAGER-START"],
+ "HISTORYSERVER-START": ["NAMENODE-START", "DATANODE-START"],
+ "HBASE_MASTER-START": ["NAMENODE-START", "DATANODE-START"],
+ "JOBTRACKER-START": ["NAMENODE-START", "DATANODE-START"],
+ "TASKTRACKER-START": ["NAMENODE-START", "DATANODE-START"],
+ "HIVE_SERVER-START": ["DATANODE-START"],
+ "WEBHCAT_SERVER-START": ["DATANODE-START"],
+ "NAGIOS_SERVER-START": ["NAMENODE-START", "SECONDARY_NAMENODE-START",
+ "DATANODE-START", "RESOURCEMANAGER-START", "NODEMANAGER-START", "HISTORYSERVER-START"],
+ "HDFS_SERVICE_CHECK-SERVICE_CHECK": ["NAMENODE-START", "DATANODE-START",
+ "SECONDARY_NAMENODE-START"],
+ "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START",
+ "RESOURCEMANAGER-START", "HISTORYSERVER-START", "YARN_SERVICE_CHECK-SERVICE_CHECK"],
+ "YARN_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
+ "RESOURCEMANAGER_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START"],
+ "PIG_SERVICE_CHECK-SERVICE_CHECK": ["RESOURCEMANAGER-START", "NODEMANAGER-START"],
+ "NAMENODE-STOP": ["JOBTRACKER-STOP", "TASKTRACKER-STOP", "RESOURCEMANAGER-STOP",
+ "NODEMANAGER-STOP", "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
+ "DATANODE-STOP": ["JOBTRACKER-STOP", "TASKTRACKER-STOP", "RESOURCEMANAGER-STOP",
+ "NODEMANAGER-STOP", "HISTORYSERVER-STOP", "HBASE_MASTER-STOP"],
+ "SECONDARY_NAMENODE-UPGRADE": ["NAMENODE-UPGRADE"],
+ "DATANODE-UPGRADE": ["SECONDARY_NAMENODE-UPGRADE"],
+ "HDFS_CLIENT-UPGRADE": ["DATANODE-UPGRADE"],
+ "JOBTRACKER-UPGRADE": ["HDFS_CLIENT-UPGRADE"]
+ },
+ "_comment" : "Dependencies that are used in HA NameNode cluster",
+ "namenode_optional_ha": {
+ "NAMENODE-START": ["JOURNALNODE-START", "ZOOKEEPER_SERVER-START"],
+ "ZKFC-START": ["NAMENODE-START"],
+ "NAGIOS_SERVER-START": ["ZKFC-START", "JOURNALNODE-START"],
+ "HDFS_SERVICE_CHECK-SERVICE_CHECK": ["ZKFC-START"]
+ },
+ "_comment" : "Dependencies that are used in ResourceManager HA cluster",
+ "resourcemanager_optional_ha" : {
+ "RESOURCEMANAGER-START": ["ZOOKEEPER_SERVER-START"]
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml
new file mode 100644
index 0000000..c40fbd0
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/configuration/hdfs-site.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<configuration>
+
+ <property>
+ <name>dfs.name.dir</name>
+ <value></value>
+ <description>Determines where on the local filesystem the DFS name node
+ should store the name table.</description>
+ <final>true</final>
+ </property>
+
+ <property>
+ <name>dfs.support.append</name>
+ <value>true</value>
+ <description>to enable dfs append</description>
+ <final>true</final>
+ <deletable>false</deletable>
+ </property>
+
+ <property>
+ <name>dfs.webhdfs.enabled</name>
+ <value>true</value>
+ <description>to enable webhdfs</description>
+ </property>
+
+</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metainfo.xml b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..d3bef74
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metainfo.xml
@@ -0,0 +1,146 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<metainfo>
+ <schemaVersion>2.0</schemaVersion>
+ <services>
+ <service>
+ <name>HDFS</name>
+ <comment>Apache Hadoop Distributed File System</comment>
+ <version>2.1.0.2.0.6.0</version>
+
+ <components>
+ <component>
+ <name>NAMENODE</name>
+ <category>MASTER</category>
+ <commandScript>
+ <script>scripts/namenode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ <customCommands>
+ <customCommand>
+ <name>DECOMMISSION</name>
+ <commandScript>
+ <script>scripts/namenode_dec_overr.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ <customCommand>
+ <name>YET_ANOTHER_CHILD_COMMAND</name>
+ <commandScript>
+ <script>scripts/yet_another_child_command.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ </customCommands>
+ </component>
+
+ <component>
+ <name>DATANODE</name>
+ <category>SLAVE</category>
+ <commandScript>
+ <script>scripts/datanode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>SECONDARY_NAMENODE</name>
+ <category>MASTER</category>
+ <commandScript>
+ <script>scripts/snamenode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>HDFS_CLIENT</name>
+ <category>CLIENT</category>
+ <commandScript>
+ <script>scripts/hdfs_client_overridden.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>JOURNALNODE</name>
+ <category>MASTER</category>
+ <commandScript>
+ <script>scripts/journalnode.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+
+ <component>
+ <name>ZKFC</name>
+ <category>SLAVE</category>
+ <commandScript>
+ <script>scripts/zkfc_slave.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </component>
+ </components>
+
+ <osSpecifics>
+ <osSpecific>
+ <osFamily>any</osFamily>
+ <packages>
+ <package>
+ <name>child-package-def</name>
+ </package>
+ </packages>
+ </osSpecific>
+ </osSpecifics>
+
+ <commandScript>
+ <script>scripts/service_check_2.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>300</timeout>
+ </commandScript>
+
+ <customCommands>
+ <customCommand>
+ <name>RESTART</name>
+ <commandScript>
+ <script>scripts/restart_child.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ <customCommand>
+ <name>YET_ANOTHER_CHILD_SRV_COMMAND</name>
+ <commandScript>
+ <script>scripts/yet_another_child_srv_command.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ </customCommands>
+
+ <configuration-dependencies>
+ </configuration-dependencies>
+ </service>
+ </services>
+</metainfo>
[09/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationDirectory.java
new file mode 100644
index 0000000..d4c99e5
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationDirectory.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.stack.ConfigurationXml;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.namespace.QName;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Encapsulates IO operations on a stack definition configuration directory.
+ */
+public class ConfigurationDirectory extends StackDefinitionDirectory {
+ /**
+ * Used to unmarshal a stack definition configuration to an object representation
+ */
+ private static ModuleFileUnmarshaller unmarshaller = new ModuleFileUnmarshaller();
+
+ /**
+ * Map of configuration type to configuration module.
+ * One entry for each configuration file in this configuration directory.
+ */
+ private Map<String, ConfigurationModule> configurationModules = new HashMap<String, ConfigurationModule>();
+
+ /**
+ * Logger instance
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(ConfigurationDirectory.class);
+
+ /**
+ * Constructor.
+ *
+ * @param directoryName configuration directory name
+ */
+ public ConfigurationDirectory(String directoryName) {
+ super(directoryName);
+ parsePath();
+ }
+
+ /**
+ * Obtain a collection of of configuration modules representing each configuration
+ * file contained in this configuration directory.
+ *
+ * @return collection of configuration modules
+ */
+ public Collection<ConfigurationModule> getConfigurationModules() {
+ return configurationModules.values();
+ }
+
+ /**
+ * Parse the configuration directory.
+ */
+ private void parsePath() {
+ File[] configFiles = directory.listFiles(AmbariMetaInfo.FILENAME_FILTER);
+ if (configFiles != null) {
+ for (File configFile : configFiles) {
+ if (configFile.getName().endsWith(AmbariMetaInfo.SERVICE_CONFIG_FILE_NAME_POSTFIX)) {
+ try {
+ String configType = ConfigHelper.fileNameToConfigType(configFile.getName());
+ ConfigurationXml config = unmarshaller.unmarshal(ConfigurationXml.class, configFile);
+ ConfigurationInfo configInfo = new ConfigurationInfo(parseProperties(config,
+ configFile.getName()), parseAttributes(config));
+ ConfigurationModule module = new ConfigurationModule(configType, configInfo);
+ configurationModules.put(configType, module);
+ } catch (Exception e) {
+ LOG.error("Could not load configuration for " + configFile, e);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Parse a configurations properties.
+ *
+ * @param configuration object representation of a configuration file
+ * @param fileName configuration file name
+ *
+ * @return collection of properties
+ */
+ private Collection<PropertyInfo> parseProperties(ConfigurationXml configuration, String fileName) {
+ List<PropertyInfo> props = new ArrayList<PropertyInfo>();
+ for (PropertyInfo pi : configuration.getProperties()) {
+ pi.setFilename(fileName);
+ props.add(pi);
+ }
+ return props; }
+
+ /**
+ * Parse a configurations type attributes.
+ *
+ * @param configuration object representation of a configuration file
+ *
+ * @return collection of attributes for the configuration type
+ */
+ private Map<String, String> parseAttributes(ConfigurationXml configuration) {
+ Map<String, String> attributes = new HashMap<String, String>();
+ for (Map.Entry<QName, String> attribute : configuration.getAttributes().entrySet()) {
+ attributes.put(attribute.getKey().getLocalPart(), attribute.getValue());
+ }
+ return attributes;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationInfo.java
new file mode 100644
index 0000000..97d57cb
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationInfo.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.state.PropertyInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Encapsulates configuration properties and attributes for a single type.
+ */
+public class ConfigurationInfo {
+ /**
+ * Collection of properties
+ */
+ private Collection<PropertyInfo> properties;
+
+ /**
+ * Map of attribute category to a map of attribute name/value
+ */
+ private Map<String, Map<String, String>> attributes;
+
+ /**
+ * Logger instance
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(ConfigurationInfo.class);
+
+ /**
+ * Constructor.
+ *
+ * @param properties configuration properties
+ * @param attributes configuration attributes
+ */
+ public ConfigurationInfo(Collection<PropertyInfo> properties, Map<String, String> attributes) {
+ this.properties = properties;
+ setAttributes(attributes);
+ }
+
+ /**
+ * Obtain the configuration properties.
+ *
+ * @return collection of properties
+ */
+ public Collection<PropertyInfo> getProperties() {
+ return properties;
+ }
+
+ /**
+ * Obtain the configuration attributes.
+ *
+ * @return map of attribute category to a map of attribute names/values
+ */
+ public Map<String, Map<String, String>> getAttributes() {
+ return attributes;
+ }
+
+ /**
+ * Set the default value of all type attributes which are not already specified.
+ */
+ public void ensureDefaultAttributes() {
+ Map<String, String> supportsAttributes = attributes.get(Supports.KEYWORD);
+ for (Supports supportsProperty : Supports.values()) {
+ String propertyName = supportsProperty.getPropertyName();
+ if (! supportsAttributes.containsKey(propertyName)) {
+ supportsAttributes.put(propertyName, supportsProperty.getDefaultValue());
+ }
+ }
+ }
+
+ /**
+ *
+ * Set the specified configuration type attributes.
+ *
+ * @param specifiedAttributes attributes that have been specified in configuration
+ */
+ private void setAttributes(Map<String, String> specifiedAttributes) {
+ Map<String, Map<String, String>> attributes = new HashMap<String, Map<String, String>>();
+ Map<String, String> supportsAttributes = new HashMap<String, String>();
+ attributes.put(Supports.KEYWORD, supportsAttributes);
+
+ for (Map.Entry<String, String> entry : specifiedAttributes.entrySet()) {
+ String attributeName = entry.getKey();
+ Supports s = Supports.attributeNameValueOf(attributeName);
+ if (s != null) {
+ supportsAttributes.put(s.getPropertyName(),
+ Boolean.valueOf(entry.getValue()).toString());
+ } else {
+ LOG.warn("Unknown configuration type attribute is specified: {}={}", attributeName, entry.getValue());
+ }
+ }
+ this.attributes = attributes;
+ }
+
+
+ /**
+ * Service configuration-types can support different abilities. This
+ * enumerates the various abilities that configuration-types can support.
+ *
+ * For example, Hadoop configuration types like 'core-site' and 'hdfs-site'
+ * can support the ability to define certain configs as 'final'.
+ */
+ public static enum Supports {
+
+ FINAL("supports_final"),
+ ADDING_FORBIDDEN("supports_adding_forbidden"),
+ DO_NOT_EXTEND("supports_do_not_extend");
+
+ public static final String KEYWORD = "supports";
+
+ private String defaultValue;
+ private String xmlAttributeName;
+
+ private Supports(String xmlAttributeName) {
+ this(xmlAttributeName, Boolean.FALSE.toString());
+ }
+
+ private Supports(String xmlAttributeName, String defaultValue) {
+ this.defaultValue = defaultValue;
+ this.xmlAttributeName = xmlAttributeName;
+ }
+
+ public String getDefaultValue() {
+ return defaultValue;
+ }
+
+ public String getXmlAttributeName() {
+ return xmlAttributeName;
+ }
+
+ public String getPropertyName() {
+ return name().toLowerCase();
+ }
+
+ public static Supports attributeNameValueOf(String attributeName) {
+ for (Supports s : values()) {
+ if (s.getXmlAttributeName().equals(attributeName)) {
+ return s;
+ }
+ }
+ return null;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationModule.java
new file mode 100644
index 0000000..ff2e930
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ConfigurationModule.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.state.PropertyInfo;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+
+
+/**
+ * Configuration module which provides functionality related to parsing and fully
+ * resolving a configuration from the stack definition. Each instance is specific
+ * to a configuration type.
+ */
+public class ConfigurationModule extends BaseModule<ConfigurationModule, ConfigurationInfo> {
+ /**
+ * Configuration type
+ */
+ private String configType;
+
+ /**
+ * Associated configuration info
+ */
+ ConfigurationInfo info;
+
+ /**
+ * Specifies whether this configuration is marked as deleted
+ */
+ private boolean isDeleted;
+
+
+ /**
+ * Constructor.
+ *
+ * @param configType configuration type
+ * @param info configuration info
+ */
+ public ConfigurationModule(String configType, ConfigurationInfo info) {
+ this.configType = configType;
+ this.info = info;
+ }
+
+ @Override
+ public void resolve(ConfigurationModule parent, Map<String, StackModule> allStacks) throws AmbariException {
+ // merge properties also removes deleted props so should be called even if extension is disabled
+ mergeProperties(parent);
+
+ if (isExtensionEnabled()) {
+ mergeAttributes(parent);
+ }
+ }
+
+ @Override
+ public ConfigurationInfo getModuleInfo() {
+ return info;
+ }
+
+ @Override
+ public boolean isDeleted() {
+ return isDeleted;
+ }
+
+ @Override
+ public String getId() {
+ return getConfigType();
+ }
+
+ /**
+ * Obtain the configuration type.
+ *
+ * @return configuration type
+ */
+ public String getConfigType() {
+ return configType;
+ }
+
+
+ /**
+ * Set the deleted flag.
+ *
+ * @param isDeleted whether the configuration has been marked for deletion
+ */
+ public void setDeleted(boolean isDeleted) {
+ this.isDeleted = isDeleted;
+ }
+
+ /**
+ * Merge configuration properties with the configurations parent.
+ *
+ * @param parent parent configuration module
+ */
+ private void mergeProperties(ConfigurationModule parent) {
+ Collection<String> existingProps = new HashSet<String>();
+ Iterator<PropertyInfo> iter = info.getProperties().iterator();
+ while (iter.hasNext()) {
+ PropertyInfo prop = iter.next();
+ existingProps.add(prop.getFilename() + "/" + prop.getName());
+ if (prop.isDeleted()) {
+ iter.remove();
+ }
+ }
+
+ if (isExtensionEnabled()) {
+ for (PropertyInfo prop : parent.info.getProperties()) {
+ if (! existingProps.contains(prop.getFilename() + "/" + prop.getName())) {
+ info.getProperties().add(prop);
+ }
+ }
+ }
+ }
+
+ /**
+ * Merge configuration attributes with the parent configuration.
+ *
+ * @param parent parent configuration module
+ */
+ private void mergeAttributes(ConfigurationModule parent) {
+
+ for (Map.Entry<String, Map<String, String>> parentCategoryEntry : parent.info.getAttributes().entrySet()) {
+ String category = parentCategoryEntry.getKey();
+ Map<String, String> categoryAttributeMap = info.getAttributes().get(category);
+ if (categoryAttributeMap == null) {
+ categoryAttributeMap = new HashMap<String, String>();
+ info.getAttributes().put(category, categoryAttributeMap);
+ }
+ for (Map.Entry<String, String> parentAttributeEntry : parentCategoryEntry.getValue().entrySet()) {
+ String attributeName = parentAttributeEntry.getKey();
+ if (! categoryAttributeMap.containsKey(attributeName)) {
+ categoryAttributeMap.put(attributeName, parentAttributeEntry.getValue());
+ }
+ }
+ }
+ }
+
+ /**
+ * Determine if the configuration should extend the parents configuration.
+ *
+ * @return true if this configuration should extend the parents; false otherwise
+ */
+ //todo: is this valuable as a generic module concept?
+ private boolean isExtensionEnabled() {
+ Map<String, String> supportsMap = getModuleInfo().getAttributes().get(ConfigurationInfo.Supports.KEYWORD);
+ if (supportsMap == null) {
+ return true;
+ }
+
+ String val = supportsMap.get(ConfigurationInfo.Supports.DO_NOT_EXTEND.getPropertyName());
+ return val == null || val.equals("false");
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/ModuleFileUnmarshaller.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ModuleFileUnmarshaller.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ModuleFileUnmarshaller.java
new file mode 100644
index 0000000..aa8e17b
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ModuleFileUnmarshaller.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.state.stack.ConfigurationXml;
+import org.apache.ambari.server.state.stack.RepositoryXml;
+import org.apache.ambari.server.state.stack.ServiceMetainfoXml;
+import org.apache.ambari.server.state.stack.StackMetainfoXml;
+import org.apache.ambari.server.state.stack.UpgradePack;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Provides functionality to unmarshal stack definition files to their
+ * corresponding object representations.
+ */
+class ModuleFileUnmarshaller {
+ /**
+ * Map of class to JAXB context
+ */
+ private static final Map<Class<?>, JAXBContext> jaxbContexts = new HashMap<Class<?>, JAXBContext>();
+
+ /**
+ * Unmarshal a file to it's corresponding object type.
+ *
+ * @param clz class of the object representation
+ * @param file file to unmarshal
+ *
+ * @return object representation of the specified file
+ * @throws JAXBException if unable to unmarshal the file
+ */
+ public <T> T unmarshal(Class<T> clz, File file) throws JAXBException {
+ Unmarshaller u = jaxbContexts.get(clz).createUnmarshaller();
+
+ return clz.cast(u.unmarshal(file));
+ }
+
+ /**
+ * statically register the JAXB contexts
+ */
+ static {
+ try {
+ // three classes define the top-level element "metainfo", so we need 3 contexts.
+ JAXBContext ctx = JAXBContext.newInstance(StackMetainfoXml.class, RepositoryXml.class,
+ ConfigurationXml.class, UpgradePack.class);
+
+ jaxbContexts.put(StackMetainfoXml.class, ctx);
+ jaxbContexts.put(RepositoryXml.class, ctx);
+ jaxbContexts.put(ConfigurationXml.class, ctx);
+ jaxbContexts.put(UpgradePack.class, ctx);
+ jaxbContexts.put(ServiceMetainfoXml.class, JAXBContext.newInstance(ServiceMetainfoXml.class));
+ } catch (JAXBException e) {
+ throw new RuntimeException (e);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
new file mode 100644
index 0000000..843df0b
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.state.stack.ServiceMetainfoXml;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.bind.JAXBException;
+import java.io.File;
+
+/**
+ * Encapsulates IO operations on a stack definition service directory.
+ */
+public class ServiceDirectory extends StackDefinitionDirectory {
+ /**
+ * metrics file
+ */
+ private File metricsFile;
+
+ /**
+ * alerts file
+ */
+ private File alertsFile;
+
+ /**
+ * package directory path
+ */
+ private String packageDir;
+
+ /**
+ * service metainfo file object representation
+ */
+ private ServiceMetainfoXml metaInfoXml;
+
+ /**
+ * services root directory name
+ */
+ public static final String SERVICES_FOLDER_NAME = "services";
+
+ /**
+ * package directory name
+ */
+ private static final String PACKAGE_FOLDER_NAME = "package";
+
+ /**
+ * service metainfo file name
+ */
+ private static final String SERVICE_METAINFO_FILE_NAME = "metainfo.xml";
+
+ /**
+ * stack definition file unmarshaller
+ */
+ ModuleFileUnmarshaller unmarshaller = new ModuleFileUnmarshaller();
+
+ /**
+ * logger instance
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(ServiceDirectory.class);
+
+
+ /**
+ * Constructor.
+ *
+ * @param servicePath path of the service directory
+ * @throws AmbariException if unable to parse the service directory
+ */
+ public ServiceDirectory(String servicePath) throws AmbariException {
+ super(servicePath);
+ parsePath();
+
+ File mf = new File(directory.getAbsolutePath()
+ + File.separator + AmbariMetaInfo.SERVICE_METRIC_FILE_NAME);
+ metricsFile = mf.exists() ? mf : null;
+
+ File af = new File(directory.getAbsolutePath()
+ + File.separator + AmbariMetaInfo.SERVICE_ALERT_FILE_NAME);
+ alertsFile = af.exists() ? af : null;
+ }
+
+ /**
+ * Obtain the package directory path.
+ *
+ * @return package directory path
+ */
+ public String getPackageDir() {
+ return packageDir;
+ }
+
+ /**
+ * Obtain the metrics file.
+ *
+ * @return metrics file
+ */
+ public File getMetricsFile() {
+ return metricsFile;
+ }
+
+ /**
+ * Obtain the alerts file.
+ *
+ * @return alerts file
+ */
+ public File getAlertsFile() {
+ return alertsFile;
+ }
+
+ /**
+ * Obtain the service metainfo file object representation.
+ *
+ * @return
+ * Obtain the service metainfo file object representation
+ */
+ public ServiceMetainfoXml getMetaInfoFile() {
+ return metaInfoXml;
+ }
+
+ /**
+ * Parse the service directory.
+ *
+ * @throws AmbariException if unable to parse the service directory
+ */
+ private void parsePath() throws AmbariException {
+
+ File serviceDir = new File(getAbsolutePath());
+ File stackVersionDir = serviceDir.getParentFile().getParentFile();
+ File stackDir = stackVersionDir.getParentFile();
+
+ String stackId = String.format("%s-%s", stackDir.getName(), stackVersionDir.getName());
+
+ File absPackageDir = new File(getAbsolutePath() + File.separator + PACKAGE_FOLDER_NAME);
+ if (absPackageDir.isDirectory()) {
+ packageDir = absPackageDir.getPath().substring(stackDir.getParentFile().getPath().length() + 1);
+ LOG.debug(String.format("Service package folder for service %s for stack %s has been resolved to %s",
+ serviceDir.getName(), stackId, packageDir));
+ } else {
+ //todo: this seems like it should be an error case
+ LOG.debug(String.format("Service package folder %s for service %s for stack %s does not exist.",
+ absPackageDir, serviceDir.getName(), stackId));
+ }
+ parseMetaInfoFile();
+ }
+
+ /**
+ * Unmarshal the metainfo file into its object representation.
+ *
+ * @throws AmbariException if the metainfo file doesn't exist or
+ * unable to unmarshal the metainfo file
+ */
+ private void parseMetaInfoFile() throws AmbariException {
+ File f = new File(getAbsolutePath() + File.separator + SERVICE_METAINFO_FILE_NAME);
+ if (! f.exists()) {
+ throw new AmbariException(String.format("Stack Definition Service at '%s' doesn't contain a metainfo.xml file",
+ f.getAbsolutePath()));
+ }
+
+ try {
+ metaInfoXml = unmarshaller.unmarshal(ServiceMetainfoXml.class, f);
+ } catch (JAXBException e) {
+ throw new AmbariException(String.format("Unable to parse service metainfo.xml file '%s' ", f.getAbsolutePath()), e);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
new file mode 100644
index 0000000..279361b
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
@@ -0,0 +1,289 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.CustomCommandDefinition;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Service module which provides all functionality related to parsing and fully
+ * resolving services from the stack definition.
+ */
+public class ServiceModule extends BaseModule<ServiceModule, ServiceInfo> {
+ /**
+ * Corresponding service info
+ */
+ private ServiceInfo serviceInfo;
+
+ /**
+ * Context which provides modules access to external functionality
+ */
+ private StackContext stackContext;
+
+ /**
+ * Map of child configuration modules keyed by configuration type
+ */
+ private Map<String, ConfigurationModule> configurationModules =
+ new HashMap<String, ConfigurationModule>();
+
+ /**
+ * Map of child component modules keyed by component name
+ */
+ private Map<String, ComponentModule> componentModules =
+ new HashMap<String, ComponentModule>();
+
+ /**
+ * Encapsulates IO operations on service directory
+ */
+ private ServiceDirectory serviceDirectory;
+
+
+ /**
+ * Constructor.
+ *
+ * @param stackContext stack context which provides module access to external functionality
+ * @param serviceInfo associated service info
+ * @param serviceDirectory used for all IO interaction with service directory in stack definition
+ */
+ public ServiceModule(StackContext stackContext, ServiceInfo serviceInfo, ServiceDirectory serviceDirectory) {
+ this.serviceInfo = serviceInfo;
+ this.stackContext = stackContext;
+ this.serviceDirectory = serviceDirectory;
+
+ serviceInfo.setMetricsFile(serviceDirectory.getMetricsFile());
+ serviceInfo.setAlertsFile(serviceDirectory.getAlertsFile());
+ serviceInfo.setSchemaVersion(AmbariMetaInfo.SCHEMA_VERSION_2);
+ serviceInfo.setServicePackageFolder(serviceDirectory.getPackageDir());
+
+ populateComponentModules();
+ populateConfigurationModules();
+ }
+
+ @Override
+ public ServiceInfo getModuleInfo() {
+ return serviceInfo;
+ }
+
+ @Override
+ public void resolve(ServiceModule parentModule, Map<String, StackModule> allStacks) throws AmbariException {
+ ServiceInfo parent = parentModule.getModuleInfo();
+
+ if (serviceInfo.getComment() == null) {
+ serviceInfo.setComment(parent.getComment());
+ }
+ if (serviceInfo.getDisplayName() == null) {
+ serviceInfo.setDisplayName(parent.getDisplayName());
+ }
+
+ if (serviceInfo.getRequiredServices() == null) {
+ serviceInfo.setRequiredServices(parent.getRequiredServices() != null ?
+ parent.getRequiredServices() :
+ Collections.<String>emptyList());
+ }
+
+ if (serviceInfo.isRestartRequiredAfterChange() == null) {
+ serviceInfo.setRestartRequiredAfterChange(parent.isRestartRequiredAfterChange());
+ }
+ if (serviceInfo.isMonitoringService() == null) {
+ serviceInfo.setMonitoringService(parent.isMonitoringService());
+ }
+ if (serviceInfo.getOsSpecifics().isEmpty() ) {
+ serviceInfo.setOsSpecifics(parent.getOsSpecifics());
+ }
+ if (serviceInfo.getCommandScript() == null) {
+ serviceInfo.setCommandScript(parent.getCommandScript());
+ }
+ if (serviceInfo.getServicePackageFolder() == null) {
+ serviceInfo.setServicePackageFolder(parent.getServicePackageFolder());
+ }
+ if (serviceInfo.getMetricsFile() == null) {
+ serviceInfo.setMetricsFile(parent.getMetricsFile());
+ }
+ if (serviceInfo.getAlertsFile() == null) {
+ serviceInfo.setAlertsFile(parent.getAlertsFile());
+ }
+
+ mergeCustomCommands(parent.getCustomCommands(), serviceInfo.getCustomCommands());
+ mergeConfigDependencies(parent);
+ mergeComponents(parentModule, allStacks);
+ mergeConfigurations(parentModule, allStacks);
+ }
+
+ @Override
+ public boolean isDeleted() {
+ return serviceInfo.isDeleted();
+ }
+
+ @Override
+ public String getId() {
+ return serviceInfo.getName();
+ }
+
+ @Override
+ public void finalizeModule() {
+ finalizeChildModules(configurationModules.values());
+ finalizeChildModules(componentModules.values());
+ finalizeConfiguration();
+ if(serviceInfo.getCommandScript() != null && ! isDeleted()) {
+ stackContext.registerServiceCheck(getId());
+ }
+ }
+
+ /**
+ * Parse and populate child component modules.
+ */
+ private void populateComponentModules() {
+ for (ComponentInfo component : serviceInfo.getComponents()) {
+ componentModules.put(component.getName(), new ComponentModule(component));
+ }
+ }
+
+ /**
+ * Parse and populate child configuration modules.
+ */
+ private void populateConfigurationModules() {
+ ConfigurationDirectory configDirectory = serviceDirectory.getConfigurationDirectory(
+ serviceInfo.getConfigDir());
+
+ if (configDirectory != null) {
+ for (ConfigurationModule config : configDirectory.getConfigurationModules()) {
+ if (! serviceInfo.getExcludedConfigTypes().contains(config.getConfigType())) {
+ ConfigurationInfo info = config.getModuleInfo();
+ serviceInfo.getProperties().addAll(info.getProperties());
+ serviceInfo.setTypeAttributes(config.getConfigType(), info.getAttributes());
+ configurationModules.put(config.getConfigType(), config);
+ }
+ }
+
+ for (String excludedType : serviceInfo.getExcludedConfigTypes()) {
+ if (! configurationModules.containsKey(excludedType)) {
+ ConfigurationInfo configInfo = new ConfigurationInfo(
+ Collections.<PropertyInfo>emptyList(), Collections.<String, String>emptyMap());
+ ConfigurationModule config = new ConfigurationModule(excludedType, configInfo);
+
+ config.setDeleted(true);
+ configurationModules.put(excludedType, config);
+ }
+ }
+ }
+ }
+
+ /**
+ * Merge configuration dependencies with parent. Child values override parent values.
+ *
+ * @param parent parent service module
+ */
+ private void mergeConfigDependencies(ServiceInfo parent) {
+ //currently there is no way to remove an inherited config dependency
+ List<String> configDependencies = serviceInfo.getConfigDependencies();
+ List<String> parentConfigDependencies = parent.getConfigDependencies() != null ?
+ parent.getConfigDependencies() : Collections.<String>emptyList();
+
+ if (configDependencies == null) {
+ serviceInfo.setConfigDependencies(parentConfigDependencies);
+ } else {
+ for (String parentDependency : parentConfigDependencies) {
+ if (! configDependencies.contains(parentDependency)) {
+ configDependencies.add(parentDependency);
+ }
+ }
+ }
+ }
+
+ /**
+ * Merge configurations with the parent configurations.
+ * This will update the child configuration module set as well as the underlying info instances.
+ *
+ * @param parent parent service module
+ * @param stacks all stack modules
+ */
+ private void mergeConfigurations(ServiceModule parent, Map<String, StackModule> stacks) throws AmbariException {
+ serviceInfo.getProperties().clear();
+ serviceInfo.setAllConfigAttributes(new HashMap<String, Map<String, Map<String, String>>>());
+
+ Collection<ConfigurationModule> mergedModules = mergeChildModules(
+ stacks, configurationModules, parent.configurationModules);
+
+ for (ConfigurationModule module : mergedModules) {
+ configurationModules.put(module.getId(), module);
+ serviceInfo.getProperties().addAll(module.getModuleInfo().getProperties());
+ serviceInfo.setTypeAttributes(module.getConfigType(), module.getModuleInfo().getAttributes());
+ }
+ }
+
+ /**
+ * Merge components with the parent configurations.
+ * This will update the child component module set as well as the underlying info instances.
+ */
+ private void mergeComponents(ServiceModule parent, Map<String, StackModule> stacks) throws AmbariException {
+ serviceInfo.getComponents().clear();
+ Collection<ComponentModule> mergedModules = mergeChildModules(
+ stacks, componentModules, parent.componentModules);
+
+ for (ComponentModule module : mergedModules) {
+ componentModules.put(module.getId(), module);
+ serviceInfo.getComponents().add(module.getModuleInfo());
+ }
+ }
+
+ /**
+ * Merge custom commands with the parent custom commands.
+ *
+ * @param parentCmds parent custom command collection
+ * @param childCmds child custom command collection
+ */
+ //todo: duplicated in Component Module. Can we use mergeChildModules?
+ private void mergeCustomCommands(Collection<CustomCommandDefinition> parentCmds,
+ Collection<CustomCommandDefinition> childCmds) {
+
+ Collection<String> existingNames = new HashSet<String>();
+
+ for (CustomCommandDefinition childCmd : childCmds) {
+ existingNames.add(childCmd.getName());
+ }
+ for (CustomCommandDefinition parentCmd : parentCmds) {
+ if (! existingNames.contains(parentCmd.getName())) {
+ childCmds.add(parentCmd);
+ }
+ }
+ }
+
+ /**
+ * Finalize service configurations.
+ * Ensure that all default type attributes are set.
+ */
+ private void finalizeConfiguration() {
+ for (ConfigurationModule config : configurationModules.values()) {
+ ConfigurationInfo configInfo = config.getModuleInfo();
+ configInfo.ensureDefaultAttributes();
+ serviceInfo.setTypeAttributes(config.getConfigType(), configInfo.getAttributes());
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
new file mode 100644
index 0000000..5d1d098
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.metadata.ActionMetadata;
+import org.apache.ambari.server.orm.dao.MetainfoDAO;
+import org.apache.ambari.server.orm.entities.MetainfoEntity;
+import org.apache.ambari.server.state.stack.LatestRepoCallable;
+import org.apache.ambari.server.state.stack.OsFamily;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.ThreadFactory;
+
+/**
+ * Provides external functionality to the Stack framework.
+ */
+public class StackContext {
+ /**
+ * Metainfo data access object
+ */
+ private MetainfoDAO metaInfoDAO;
+
+ /**
+ * Action meta data functionality
+ */
+ private ActionMetadata actionMetaData;
+
+ /**
+ * Operating System families
+ */
+ private OsFamily osFamily;
+
+ /**
+ * Executor used to get latest repo url's
+ */
+ private LatestRepoQueryExecutor repoUpdateExecutor = new LatestRepoQueryExecutor();
+
+ /**
+ * Repository XML base url property name
+ */
+ private static final String REPOSITORY_XML_PROPERTY_BASEURL = "baseurl";
+
+
+ /**
+ * Constructor.
+ *
+ * @param metaInfoDAO metainfo data access object
+ * @param actionMetaData action meta data
+ * @param osFamily OS family information
+ */
+ public StackContext(MetainfoDAO metaInfoDAO, ActionMetadata actionMetaData, OsFamily osFamily) {
+ this.metaInfoDAO = metaInfoDAO;
+ this.actionMetaData = actionMetaData;
+ this.osFamily = osFamily;
+ }
+
+ /**
+ * Register a service check.
+ *
+ * @param serviceName name of the service
+ */
+ public void registerServiceCheck(String serviceName) {
+ actionMetaData.addServiceCheckAction(serviceName);
+ }
+
+ /**
+ * Obtain an updated url for the repo.
+ * This will check the database for a user update of the repo url.
+ *
+ * @param repoName repository name
+ * @param stackVersion stack version
+ * @param osType OS type
+ * @param repoId repo id
+ *
+ * @return an update url or null if the url has not been updated
+ */
+ public String getUpdatedRepoUrl(String repoName, String stackVersion, String osType, String repoId) {
+ StringBuilder sb = new StringBuilder("repo:/");
+ sb.append(repoName).append('/');
+ sb.append(stackVersion).append('/');
+ sb.append(osType).append('/');
+ sb.append(repoId);
+ sb.append(':').append(REPOSITORY_XML_PROPERTY_BASEURL);
+
+ MetainfoEntity entity = metaInfoDAO.findByKey(sb.toString());
+ return entity != null ? entity.getMetainfoValue() : null;
+ }
+
+ /**
+ * Register a task to obtain the latest repo url from an external location.
+ *
+ * @param url external repo information URL
+ * @param stack stack module
+ */
+ public void registerRepoUpdateTask(String url, StackModule stack) {
+ repoUpdateExecutor.addTask(new LatestRepoCallable(url,
+ new File(stack.getStackDirectory().getRepoDir()), stack.getModuleInfo(), osFamily));
+ }
+
+ /**
+ * Execute the registered repo update tasks.
+ */
+ public void executeRepoTasks() {
+ repoUpdateExecutor.execute();
+ }
+
+ /**
+ * Determine if all registered repo update tasks have completed.
+ *
+ * @return true if all tasks have completed; false otherwise
+ */
+ public boolean haveAllRepoTasksCompleted() {
+ return repoUpdateExecutor.hasCompleted();
+ }
+
+
+ /**
+ * Executor used to execute repository update tasks.
+ * Tasks will be executed in a single executor thread.
+ */
+ public static class LatestRepoQueryExecutor {
+ /**
+ * Registered tasks
+ */
+ private Collection<LatestRepoCallable> tasks = new ArrayList<LatestRepoCallable>();
+
+ /**
+ * Task futures
+ */
+ Collection<Future<Void>> futures = new ArrayList<Future<Void>>();
+ /**
+ * Underlying executor
+ */
+ private ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() {
+ @Override
+ public Thread newThread(Runnable r) {
+ return new Thread(r, "Stack Version Loading Thread");
+ }
+ });
+
+
+ /**
+ * Add a task.
+ *
+ * @param task task to be added
+ */
+ public void addTask(LatestRepoCallable task) {
+ tasks.add(task);
+ }
+
+ /**
+ * Execute all tasks.
+ */
+ public void execute() {
+ for (LatestRepoCallable task : tasks) {
+ futures.add(executor.submit(task));
+ }
+ executor.shutdown();
+ }
+
+ /**
+ * Determine whether all tasks have completed.
+ *
+ * @return true if all tasks have completed; false otherwise
+ */
+ public boolean hasCompleted() {
+ for (Future<Void> f : futures) {
+ if (! f.isDone()) {
+ return false;
+ }
+ }
+ return true;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionDirectory.java
new file mode 100644
index 0000000..8f81b5a
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionDirectory.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+
+import java.io.File;
+import java.io.FilenameFilter;
+
+/**
+ * Base stack definition directory.
+ * Contains functionality common across directory types.
+ */
+public abstract class StackDefinitionDirectory {
+ /**
+ * xml filename filter
+ */
+ protected static final FilenameFilter XML_FILENAME_FILTER = new FilenameFilter() {
+ @Override
+ public boolean accept(File folder, String fileName) {
+ return fileName.toLowerCase().endsWith(".xml");
+ }
+ };
+
+ /**
+ * underlying directory
+ */
+ protected File directory;
+
+
+ /**
+ * Constructor.
+ *
+ * @param directory underlying directory
+ */
+ public StackDefinitionDirectory(String directory) {
+ //todo: handle non-existent dir
+ this.directory = new File(directory);
+ }
+
+ /**
+ * Obtain the configuration sub-directory instance for the specified path.
+ *
+ * @param directoryName name of the configuration directory
+ * @return ConfigurationDirectory instance for the specified configuration directory name
+ */
+ public ConfigurationDirectory getConfigurationDirectory(String directoryName) {
+ ConfigurationDirectory configDirectory = null;
+ File configDirFile = new File(directory.getAbsolutePath() + File.separator + directoryName);
+ if (configDirFile.exists() && configDirFile.isDirectory()) {
+ configDirectory = new ConfigurationDirectory(configDirFile.getAbsolutePath());
+ }
+ return configDirectory;
+ }
+
+ /**
+ * Obtain the path for this directory instance.
+ *
+ * @return the path represented by this directory
+ */
+ public String getPath() {
+ return directory.getPath();
+ }
+
+ /**
+ * Obtain the absolute path for this directory instance.
+ *
+ * @return the absolute path represented by this directory
+ */
+ public String getAbsolutePath() {
+ return directory.getAbsolutePath();
+ }
+
+ /**
+ * Obtain the name of the directory.
+ *
+ * @return name of the directory
+ */
+ public String getName() {
+ return directory.getName();
+ }
+
+ /**
+ * Obtain the underlying directory.
+ *
+ * @return the underlying directory file
+ */
+ protected File getDirectory() {
+ return directory;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionModule.java
new file mode 100644
index 0000000..cc088e3
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDefinitionModule.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+
+import org.apache.ambari.server.AmbariException;
+
+import java.util.Map;
+
+/**
+ * Stack Definition Module.
+ * Represents a module within a stack definition tree. For each stack version specified in
+ * a stack definition a tree will exist with a stack module being the root node. Each module
+ * may have a parent as well as child modules. Each module has an associated "info" object
+ * which contains the underlying state that is being wrapped by the module.
+ */
+public interface StackDefinitionModule <T, I> {
+ /**
+ * Resolve the module state with the specified parent.
+ *
+ * @param parent the parent that this module will be merged with
+ * @param allStacks collection of all stack modules in the tree
+ *
+ * @throws AmbariException if resolution fails
+ */
+ public void resolve(T parent, Map<String, StackModule> allStacks) throws AmbariException;
+
+ /**
+ * Obtain the associated module information.
+ *
+ * @return associated module information
+ */
+ public I getModuleInfo();
+
+ /**
+ * Determine whether the module has been marked for deletion.
+ *
+ * @return true if the module is marked for deletion; otherwise false
+ */
+ public boolean isDeleted();
+
+ /**
+ * Obtain the id of the module.
+ *
+ * @return module id
+ */
+ public String getId();
+
+ /**
+ * Lifecycle even which is called when the associated stack has been fully resolved.
+ */
+ public void finalizeModule();
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
new file mode 100644
index 0000000..2a30e40
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
@@ -0,0 +1,365 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.state.stack.RepositoryXml;
+import org.apache.ambari.server.state.stack.StackMetainfoXml;
+import org.apache.ambari.server.state.stack.UpgradePack;
+import org.apache.commons.io.FilenameUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.bind.JAXBException;
+import java.io.File;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+
+/**
+ * Encapsulates IO operations on a stack definition stack directory.
+ */
+//todo: Normalize all path return values.
+//todo: Currently some are relative and some are absolute.
+//todo: Current values were dictated by the StackInfo expectations.
+public class StackDirectory extends StackDefinitionDirectory {
+ /**
+ * hooks directory path
+ */
+ private String hooksDir;
+
+ /**
+ * upgrades directory path
+ */
+ private String upgradesDir;
+
+ /**
+ * rco file path
+ */
+ private String rcoFilePath;
+
+ /**
+ * repository file
+ */
+ private RepositoryXml repoFile;
+
+ /**
+ * repository directory
+ */
+ private String repoDir;
+
+ /**
+ * collection of service directories
+ */
+ private Collection<ServiceDirectory> serviceDirectories;
+
+ /**
+ * map of upgrade pack name to upgrade pack
+ */
+ //todo: should be a collection but upgrade pack doesn't have a name attribute
+ private Map<String, UpgradePack> upgradePacks;
+
+ /**
+ * metainfo file representation
+ */
+ private StackMetainfoXml metaInfoXml;
+
+ /**
+ * file unmarshaller
+ */
+ ModuleFileUnmarshaller unmarshaller = new ModuleFileUnmarshaller();
+
+ /**
+ * name of the hooks directory
+ */
+ public static final String HOOKS_FOLDER_NAME = "hooks";
+
+ /**
+ * repository directory name
+ */
+ private final static String REPOSITORY_FOLDER_NAME = "repos";
+
+ /**
+ * repository file name
+ */
+ private final static String REPOSITORY_FILE_NAME = "repoinfo.xml";
+
+ /**
+ * metainfo file name
+ */
+ private static final String STACK_METAINFO_FILE_NAME = "metainfo.xml";
+
+ /**
+ * upgrades directory name
+ */
+ private static final String UPGRADE_PACK_FOLDER_NAME = "upgrades";
+
+ /**
+ * logger instance
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(StackDirectory.class);
+
+
+ /**
+ * Constructor.
+ *
+ * @param directory stack directory
+ * @throws AmbariException if unable to parse the stack directory
+ */
+ public StackDirectory(String directory) throws AmbariException {
+ super(directory);
+ parsePath();
+ }
+
+ /**
+ * Obtain the stack directory name.
+ *
+ * @return stack directory name
+ */
+ public String getStackDirName() {
+ return getDirectory().getParentFile().getName();
+ }
+
+ /**
+ * Obtain the hooks directory path.
+ *
+ * @return hooks directory path
+ */
+ public String getHooksDir() {
+ return hooksDir;
+ }
+
+ /**
+ * Obtain the upgrades directory path.
+ *
+ * @return upgrades directory path
+ */
+ public String getUpgradesDir() {
+ return upgradesDir;
+ }
+
+ /**
+ * Obtain the rco file path.
+ *
+ * @return rco file path
+ */
+ public String getRcoFilePath() {
+ return rcoFilePath;
+ }
+
+ /**
+ * Obtain the repository directory path.
+ *
+ * @return repository directory path
+ */
+ public String getRepoDir() {
+ return repoDir;
+ }
+
+ /**
+ * Obtain the repository file object representation.
+ *
+ * @return repository file object representation
+ */
+ public RepositoryXml getRepoFile() {
+ return repoFile;
+ }
+
+ /**
+ * Obtain the object representation of the stack metainfo.xml file.
+ *
+ * @return object representation of the stack metainfo.xml file
+ */
+ public StackMetainfoXml getMetaInfoFile() {
+ return metaInfoXml;
+ }
+
+ /**
+ * Obtain a collection of all service directories.
+ *
+ * @return collection of all service directories
+ */
+ public Collection<ServiceDirectory> getServiceDirectories() {
+ return serviceDirectories;
+ }
+
+ /**
+ * Obtain a map of all upgrade packs.
+ *
+ * @return map of upgrade pack name to upgrade pack or null if no packs available
+ */
+ public Map<String, UpgradePack> getUpgradePacks() {
+ return upgradePacks;
+ }
+
+ /**
+ * Parse the stack directory.
+ *
+ * @throws AmbariException if unable to parse the directory
+ */
+ private void parsePath() throws AmbariException {
+ Collection<String> subDirs = Arrays.asList(directory.list());
+ if (subDirs.contains(HOOKS_FOLDER_NAME)) {
+ // hooksDir is expected to be relative to stack root
+ hooksDir = getStackDirName() + File.separator + getName() +
+ File.separator + HOOKS_FOLDER_NAME;
+ } else {
+ LOG.debug("Hooks folder " + getAbsolutePath() + File.separator +
+ HOOKS_FOLDER_NAME + " does not exist");
+ }
+
+ if (subDirs.contains(AmbariMetaInfo.RCO_FILE_NAME)) {
+ // rcoFile is expected to be absolute
+ rcoFilePath = getAbsolutePath() + File.separator + AmbariMetaInfo.RCO_FILE_NAME;
+ }
+
+ parseUpgradePacks(subDirs);
+ parseServiceDirectories(subDirs);
+ parseRepoFile(subDirs);
+ parseMetaInfoFile();
+ }
+
+ /**
+ * Parse the repository file.
+ *
+ * @param subDirs stack directory sub directories
+ * @throws AmbariException if unable to parse the repository file
+ */
+ private void parseRepoFile(Collection<String> subDirs) throws AmbariException {
+ File repositoryFile;
+
+ if (subDirs.contains(REPOSITORY_FOLDER_NAME)) {
+ repoDir = getAbsolutePath() + File.separator + REPOSITORY_FOLDER_NAME;
+ repositoryFile = new File(getPath()+ File.separator +
+ REPOSITORY_FOLDER_NAME + File.separator + REPOSITORY_FILE_NAME);
+
+ if (repositoryFile.exists()) {
+ try {
+ repoFile = unmarshaller.unmarshal(RepositoryXml.class, repositoryFile);
+ } catch (JAXBException e) {
+ throw new AmbariException("Unable to parse repo file at location: " +
+ repositoryFile.getAbsolutePath(), e);
+ }
+ }
+ }
+
+ if (repoFile == null) {
+ LOG.warn("No repository information defined for "
+ + ", stackName=" + getStackDirName()
+ + ", stackVersion=" + getPath()
+ + ", repoFolder=" + getPath() + File.separator + REPOSITORY_FOLDER_NAME);
+ }
+ }
+
+ /**
+ * Parse the stack metainfo file.
+ *
+ * @throws AmbariException if unable to parse the stack metainfo file
+ */
+ private void parseMetaInfoFile() throws AmbariException {
+ File stackMetaInfoFile = new File(getAbsolutePath()
+ + File.separator + STACK_METAINFO_FILE_NAME);
+
+ //todo: is it ok for this file not to exist?
+ if (stackMetaInfoFile.exists()) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Reading stack version metainfo from file " + stackMetaInfoFile.getAbsolutePath());
+ }
+
+ try {
+ metaInfoXml = unmarshaller.unmarshal(StackMetainfoXml.class, stackMetaInfoFile);
+ } catch (JAXBException e) {
+ throw new AmbariException("Unable to parse stack metainfo.xml file at location: " +
+ stackMetaInfoFile.getAbsolutePath(), e);
+ }
+ }
+ }
+
+ /**
+ * Parse the stacks service directories.
+ *
+ * @param subDirs stack sub directories
+ * @throws AmbariException if unable to parse the service directories
+ */
+ private void parseServiceDirectories(Collection<String> subDirs) throws AmbariException {
+ Collection<ServiceDirectory> dirs = new HashSet<ServiceDirectory>();
+
+ if (subDirs.contains(ServiceDirectory.SERVICES_FOLDER_NAME)) {
+ String servicesDir = getAbsolutePath() + File.separator + ServiceDirectory.SERVICES_FOLDER_NAME;
+ File baseServiceDir = new File(servicesDir);
+ File[] serviceFolders = baseServiceDir.listFiles(AmbariMetaInfo.FILENAME_FILTER);
+ if (serviceFolders != null) {
+ for (File d : serviceFolders) {
+ if (d.isDirectory()) {
+ try {
+ dirs.add(new ServiceDirectory(d.getAbsolutePath()));
+ } catch (AmbariException e) {
+ //todo: this seems as though we should propagate this exception
+ //todo: eating it now to keep backwards compatibility
+ LOG.warn(String.format("Unable to parse stack definition service at '%s'. Ignoring service. : %s",
+ d.getAbsolutePath(), e.toString()));
+ }
+ }
+ }
+ }
+ }
+
+ if (dirs.isEmpty()) {
+ //todo: what does it mean for a stack to have no services?
+ LOG.info("The stack defined at '" + getAbsolutePath() + "' contains no services");
+ }
+ serviceDirectories = dirs;
+ }
+
+ /**
+ * Parse all stack upgrade files for the stack.
+ *
+ * @param subDirs stack sub directories
+ * @throws AmbariException if unable to parse stack upgrade file
+ */
+ private void parseUpgradePacks(Collection<String> subDirs) throws AmbariException {
+ Map<String, UpgradePack> upgradeMap = new HashMap<String, UpgradePack>();
+ if (subDirs.contains(UPGRADE_PACK_FOLDER_NAME)) {
+ File f = new File(getAbsolutePath() + File.separator + UPGRADE_PACK_FOLDER_NAME);
+ if (f.isDirectory()) {
+ upgradesDir = f.getAbsolutePath();
+ for (File upgradeFile : f.listFiles(XML_FILENAME_FILTER)) {
+ try {
+ upgradeMap.put(FilenameUtils.removeExtension(upgradeFile.getName()),
+ unmarshaller.unmarshal(UpgradePack.class, upgradeFile));
+ } catch (JAXBException e) {
+ throw new AmbariException("Unable to parse stack upgrade file at location: " +
+ upgradeFile.getAbsolutePath(), e);
+ }
+ }
+ }
+ }
+
+ if (upgradesDir == null) {
+ LOG.info("Stack '{}' doesn't contain an upgrade directory ", getPath());
+ }
+
+ if (! upgradeMap.isEmpty()) {
+ upgradePacks = upgradeMap;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
new file mode 100644
index 0000000..2a14f51
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.state.StackInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+
+
+/**
+ * Manages all stack related behavior including parsing of stacks and providing access to
+ * stack information.
+ */
+public class StackManager {
+ /**
+ * Provides access to non-stack server functionality
+ */
+ private StackContext stackContext;
+
+ /**
+ * Logger
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(StackManager.class);
+
+ /**
+ * Map of stack name to stack info
+ */
+ private Map<String, StackInfo> stackMap = new HashMap<String, StackInfo>();
+
+
+ /**
+ * Constructor.
+ *
+ * @param stackRoot stack root directory
+ * @param stackContext context which provides external functionality
+ *
+ * @throws AmbariException if an exception occurs while processing the stacks
+ */
+ public StackManager(File stackRoot, StackContext stackContext) throws AmbariException {
+ validateStackDirectory(stackRoot);
+
+ this.stackContext = stackContext;
+ Map<String, StackModule> stackModules = new HashMap<String, StackModule>();
+ File[] stackFiles = stackRoot.listFiles(AmbariMetaInfo.FILENAME_FILTER);
+ for (File stack : stackFiles) {
+ if (stack.isFile()) {
+ continue;
+ }
+ for (File stackFolder : stack.listFiles(AmbariMetaInfo.FILENAME_FILTER)) {
+ if (stackFolder.isFile()) {
+ continue;
+ }
+ String stackName = stackFolder.getParentFile().getName();
+ String stackVersion = stackFolder.getName();
+
+ StackModule stackModule = new StackModule(new StackDirectory(stackFolder.getPath()),stackContext);
+ stackModules.put(stackName + stackVersion, stackModule);
+ stackMap.put(stackName + stackVersion, stackModule.getModuleInfo());
+ }
+ }
+
+ if (stackMap.isEmpty()) {
+ throw new AmbariException("Unable to find stack definitions under " +
+ "stackRoot = " + stackRoot.getAbsolutePath());
+ }
+
+ fullyResolveStacks(stackModules);
+ }
+
+ /**
+ * Obtain the stack info specified by name and version.
+ *
+ * @param name name of the stack
+ * @param version version of the stack
+ * @return The stack corresponding to the specified name and version.
+ * If no matching stack exists, null is returned.
+ */
+ public StackInfo getStack(String name, String version) {
+ return stackMap.get(name + version);
+ }
+
+ /**
+ * Obtain all stacks for the given name.
+ *
+ * @param name stack name
+ * @return A collection of all stacks with the given name.
+ * If no stacks match the specified name, an empty collection is returned.
+ */
+ public Collection<StackInfo> getStacks(String name) {
+ Collection<StackInfo> stacks = new HashSet<StackInfo>();
+ for (StackInfo stack: stackMap.values()) {
+ if (stack.getName().equals(name)) {
+ stacks.add(stack);
+ }
+ }
+ return stacks;
+ }
+
+ /**
+ * Obtain all stacks.
+ *
+ * @return collection of all stacks
+ */
+ public Collection<StackInfo> getStacks() {
+ return stackMap.values();
+ }
+
+ /**
+ * Determine if all tasks which update stack repo urls have completed.
+ *
+ * @return true if all of the repo update tasks have completed; false otherwise
+ */
+ public boolean haveAllRepoUrlsBeenResolved() {
+ return stackContext.haveAllRepoTasksCompleted();
+ }
+
+ /**
+ * Fully resolve all stacks.
+ *
+ * @param stackModules map of stack id which contains name and version to stack module.
+ * @throws AmbariException if unable to resolve all stacks
+ */
+ private void fullyResolveStacks(Map<String, StackModule> stackModules) throws AmbariException {
+ for (StackModule stack : stackModules.values()) {
+ if (stack.getResolutionState() == StackModule.State.INIT) {
+ stack.resolve(null, stackModules);
+ }
+ }
+ // execute all of the repo tasks in a single thread executor
+ stackContext.executeRepoTasks();
+ }
+
+ /**
+ * Validate that the specified stack root is a valid directory.
+ * @param stackRoot the stack root directory to validate
+ * @throws AmbariException if the specified stack root directory is invalid
+ */
+ private void validateStackDirectory(File stackRoot) throws AmbariException {
+ String stackRootAbsPath = stackRoot.getAbsolutePath();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Loading stack information"
+ + ", stackRoot = " + stackRootAbsPath);
+ }
+
+ if (!stackRoot.isDirectory() && !stackRoot.exists())
+ throw new AmbariException("" + Configuration.METADETA_DIR_PATH
+ + " should be a directory with stack"
+ + ", stackRoot = " + stackRootAbsPath);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
new file mode 100644
index 0000000..20dba84
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
@@ -0,0 +1,498 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.state.RepositoryInfo;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.stack.RepositoryXml;
+import org.apache.ambari.server.state.stack.ServiceMetainfoXml;
+import org.apache.ambari.server.state.stack.StackMetainfoXml;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Stack module which provides all functionality related to parsing and fully
+ * resolving stacks from the stack definition.
+ *
+ * <p>
+ * Each stack node is identified by name and version, contains service and configuration
+ * child nodes and may extend a single parent stack.
+ * </p>
+ *
+ * <p>
+ * Resolution of a stack is a depth first traversal up the inheritance chain where each stack node
+ * calls resolve on its parent before resolving itself. After the parent resolve call returns, all
+ * ancestors in the inheritance tree are fully resolved. The act of resolving the stack includes
+ * resolution of the configuration and services children of the stack as well as merging of other stack
+ * state with the fully resolved parent.
+ * </p>
+ *
+ * <p>
+ * Configuration child node resolution involves merging configuration types, properties and attributes
+ * with the fully resolved parent.
+ * </p>
+ *
+ * <p>
+ * Because a service may explicitly extend another service in a stack outside of the inheritance tree,
+ * service child node resolution involves a depth first resolution of the stack associated with the
+ * services explicit parent, if any. This follows the same steps defined above fore stack node
+ * resolution. After the services explicit parent is fully resolved, the services state is merged
+ * with it's parent.
+ * </p>
+ *
+ * <p>
+ * If a cycle in a stack definition is detected, an exception is thrown from the resolve call.
+ * </p>
+ *
+ */
+public class StackModule extends BaseModule<StackModule, StackInfo> {
+ /**
+ * Visitation state enum used for cycle detection
+ */
+ public enum State { INIT, VISITED, RESOLVED }
+
+ /**
+ * Visitation state of the stack
+ */
+ private State resolutionState = State.INIT;
+
+ /**
+ * Context which provides access to external functionality
+ */
+ private StackContext stackContext;
+
+ /**
+ * Map of child configuration modules keyed by configuration type
+ */
+ private Map<String, ConfigurationModule> configurationModules = new HashMap<String, ConfigurationModule>();
+
+ /**
+ * Map of child service modules keyed by service name
+ */
+ private Map<String, ServiceModule> serviceModules = new HashMap<String, ServiceModule>();
+
+ /**
+ * Corresponding StackInfo instance
+ */
+ private StackInfo stackInfo;
+
+ /**
+ * Encapsulates IO operations on stack directory
+ */
+ private StackDirectory stackDirectory;
+
+ /**
+ * Stack id which is in the form stackName:stackVersion
+ */
+ private String id;
+
+ /**
+ * Logger
+ */
+ private final static Logger LOG = LoggerFactory.getLogger(StackModule.class);
+
+ /**
+ * Constructor.
+ * @param stackDirectory represents stack directory
+ * @param stackContext general stack context
+ */
+ public StackModule(StackDirectory stackDirectory, StackContext stackContext) {
+ this.stackDirectory = stackDirectory;
+ this.stackContext = stackContext;
+ this.stackInfo = new StackInfo();
+ populateStackInfo();
+ }
+
+ /**
+ * Fully resolve the stack. See stack resolution description in the class documentation.
+ * If the stack has a parent, this stack will be merged against its fully resolved parent
+ * if one is specified.Merging applies to all stack state including child service and
+ * configuration modules. Services may extend a service in another version in the
+ * same stack hierarchy or may explicitly extend a service in a stack in a different
+ * hierarchy.
+ *
+ * @param parentModule not used. Each stack determines its own parent since stacks don't
+ * have containing modules
+ * @param allStacks all stacks modules contained in the stack definition
+ *
+ * @throws AmbariException if an exception occurs during stack resolution
+ */
+ @Override
+ public void resolve(StackModule parentModule, Map<String, StackModule> allStacks) throws AmbariException {
+ resolutionState = State.VISITED;
+ String parentVersion = stackInfo.getParentStackVersion();
+ // merge with parent version of same stack definition
+ if (parentVersion != null) {
+ mergeStackWithParent(allStacks, parentVersion);
+ }
+ mergeServicesWithExplicitParent(allStacks);
+ processRepositories();
+ resolutionState = State.RESOLVED;
+
+ finalizeModule();
+ }
+
+ @Override
+ public StackInfo getModuleInfo() {
+ return stackInfo;
+ }
+
+ @Override
+ public boolean isDeleted() {
+ return false;
+ }
+
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public void finalizeModule() {
+ finalizeChildModules(serviceModules.values());
+ finalizeChildModules(configurationModules.values());
+ }
+
+ /**
+ * Get the associated stack directory.
+ *
+ * @return associated stack directory
+ */
+ public StackDirectory getStackDirectory() {
+ return stackDirectory;
+ }
+
+ /**
+ * Stack resolution state.
+ * Initial state is INIT.
+ * When resolve is called state is set to VISITED.
+ * When resolve completes, state is set to RESOLVED.
+ *
+ * @return the stacks resolution state
+ */
+ public State getResolutionState() {
+ return resolutionState;
+ }
+
+ /**
+ * Merge the stack with its parent.
+ *
+ * @param allStacks all stacks in stack definition
+ * @param parentVersion version of the stacks parent
+ *
+ * @throws AmbariException if an exception occurs merging with the parent
+ */
+ private void mergeStackWithParent(Map<String, StackModule> allStacks, String parentVersion) throws AmbariException {
+ String parentStackKey = stackInfo.getName() + parentVersion;
+ StackModule parentStack = allStacks.get(parentStackKey);
+
+ if (parentStack == null) {
+ throw new AmbariException("Stack '" + stackInfo.getName() + ":" + stackInfo.getVersion() +
+ "' specifies a parent that doesn't exist");
+ }
+
+ resolveStack(parentStack, allStacks);
+ mergeConfigurations(parentStack, allStacks);
+
+ if (stackInfo.getStackHooksFolder() == null) {
+ stackInfo.setStackHooksFolder(parentStack.getModuleInfo().getStackHooksFolder());
+ }
+ mergeServicesWithParent(allStacks, parentStack);
+ }
+
+ /**
+ * Merge child services with parent stack.
+ *
+ * @param stacks all stacks in stack definition
+ * @param parentStack parent stack module
+ *
+ * @throws AmbariException if an exception occurs merging the child services with the parent stack
+ */
+ private void mergeServicesWithParent(Map<String, StackModule> stacks, StackModule parentStack) throws AmbariException {
+ stackInfo.getServices().clear();
+ Collection<ServiceModule> mergedModules = mergeChildModules(stacks, serviceModules, parentStack.serviceModules);
+ for (ServiceModule module : mergedModules) {
+ serviceModules.put(module.getId(), module);
+ stackInfo.getServices().add(module.getModuleInfo());
+ }
+ }
+
+ /**
+ * Merge services with their explicitly specified parent if one has been specified.
+ *
+ * @param stacks all stacks specified in the stack definition
+ *
+ * @throws AmbariException if an exception occurs while merging child services with their explicit parents
+ */
+ private void mergeServicesWithExplicitParent(Map<String, StackModule> stacks) throws AmbariException {
+ for (ServiceModule service : serviceModules.values()) {
+ ServiceInfo serviceInfo = service.getModuleInfo();
+ String parent = serviceInfo.getParent();
+ if (parent != null) {
+ mergeServiceWithExplicitParent(stacks, service, parent);
+ }
+ }
+ }
+
+ /**
+ * Merge a service with its explicitly specified parent.
+ * @param stacks all stacks specified in the stack definition
+ * @param service the service to merge
+ * @param parent the explicitly specified parent service
+ *
+ * @throws AmbariException if an exception occurs merging a service with its explicit parent
+ */
+ private void mergeServiceWithExplicitParent(Map<String, StackModule> stacks, ServiceModule service, String parent)
+ throws AmbariException {
+
+ ServiceInfo serviceInfo = service.getModuleInfo();
+ String[] parentToks = parent.split("/");
+ String baseStackKey = parentToks[0] + parentToks[1];
+ StackModule baseStack = stacks.get(baseStackKey);
+ if (baseStack == null) {
+ throw new AmbariException("The service '" + serviceInfo.getName() + "' in stack '" + stackInfo.getName() + ":"
+ + stackInfo.getVersion() + "' extends a service in a non-existent stack: '" + baseStackKey + "'");
+ }
+
+ resolveStack(baseStack, stacks);
+
+ ServiceModule baseService = baseStack.serviceModules.get(parentToks[2]);
+ if (baseService == null) {
+ throw new AmbariException("The service '" + serviceInfo.getName() + "' in stack '" + stackInfo.getName() + ":"
+ + stackInfo.getVersion() + "' extends a non-existent service: '" + parent + "'");
+ }
+ service.resolve(baseService, stacks);
+ }
+
+
+ /**
+ * Populate the stack module and info from the stack definition.
+ */
+ private void populateStackInfo() {
+ stackInfo.setName(stackDirectory.getStackDirName());
+ stackInfo.setVersion(stackDirectory.getName());
+
+ id = String.format("%s:%s", stackInfo.getName(), stackInfo.getVersion());
+
+ LOG.debug("Adding new stack to known stacks"
+ + ", stackName = " + stackInfo.getName()
+ + ", stackVersion = " + stackInfo.getVersion());
+
+
+ //odo: give additional thought on handling missing metainfo.xml
+ StackMetainfoXml smx = stackDirectory.getMetaInfoFile();
+ if (smx != null) {
+ stackInfo.setMinUpgradeVersion(smx.getVersion().getUpgrade());
+ stackInfo.setActive(smx.getVersion().isActive());
+ stackInfo.setParentStackVersion(smx.getExtends());
+ stackInfo.setStackHooksFolder(stackDirectory.getHooksDir());
+ stackInfo.setRcoFileLocation(stackDirectory.getRcoFilePath());
+ stackInfo.setUpgradesFolder(stackDirectory.getUpgradesDir());
+ stackInfo.setUpgradePacks(stackDirectory.getUpgradePacks());
+ populateConfigurationModules();
+ }
+
+ try {
+ // Read the service and available configs for this stack
+ populateServices();
+ //todo: shouldn't blindly catch Exception, re-evaluate this.
+ } catch (Exception e) {
+ LOG.error("Exception caught while populating services for stack: " +
+ stackInfo.getName() + "-" + stackInfo.getVersion());
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Populate the child services.
+ */
+ private void populateServices()throws AmbariException {
+ for (ServiceDirectory serviceDir : stackDirectory.getServiceDirectories()) {
+ populateService(serviceDir);
+ }
+ }
+
+ /**
+ * Populate a child service.
+ *
+ * @param serviceDirectory the child service directory
+ */
+ private void populateService(ServiceDirectory serviceDirectory) {
+ Collection<ServiceModule> serviceModules = new ArrayList<ServiceModule>();
+ // unfortunately, we allow multiple services to be specified in the same metainfo.xml,
+ // so we can't move the unmarshal logic into ServiceModule
+ ServiceMetainfoXml metaInfoXml = serviceDirectory.getMetaInfoFile();
+ List<ServiceInfo> serviceInfos = metaInfoXml.getServices();
+
+ for (ServiceInfo serviceInfo : serviceInfos) {
+ serviceModules.add(new ServiceModule(stackContext, serviceInfo, serviceDirectory));
+ }
+ addServices(serviceModules);
+ }
+
+ /**
+ * Populate the child configurations.
+ */
+ private void populateConfigurationModules() {
+ //todo: can't exclude types in stack config
+ ConfigurationDirectory configDirectory = stackDirectory.getConfigurationDirectory(
+ AmbariMetaInfo.SERVICE_CONFIG_FOLDER_NAME);
+
+ if (configDirectory != null) {
+ for (ConfigurationModule config : configDirectory.getConfigurationModules()) {
+ stackInfo.getProperties().addAll(config.getModuleInfo().getProperties());
+ stackInfo.setConfigTypeAttributes(config.getConfigType(), config.getModuleInfo().getAttributes());
+ configurationModules.put(config.getConfigType(), config);
+ }
+ }
+ }
+
+ /**
+ * Merge configurations with the parent configurations.
+ *
+ * @param parent parent stack module
+ * @param stacks all stack modules
+ */
+ private void mergeConfigurations(StackModule parent, Map<String, StackModule> stacks) throws AmbariException {
+ stackInfo.getProperties().clear();
+ stackInfo.setAllConfigAttributes(new HashMap<String, Map<String, Map<String, String>>>());
+
+ Collection<ConfigurationModule> mergedModules = mergeChildModules(
+ stacks, configurationModules, parent.configurationModules);
+ for (ConfigurationModule module : mergedModules) {
+ configurationModules.put(module.getId(), module);
+ stackInfo.getProperties().addAll(module.getModuleInfo().getProperties());
+ stackInfo.setConfigTypeAttributes(module.getConfigType(), module.getModuleInfo().getAttributes());
+ }
+ }
+
+ /**
+ * Resolve another stack module.
+ *
+ * @param stackToBeResolved stack module to be resolved
+ * @param stacks all stack modules in stack definition
+ * @throws AmbariException if unable to resolve the stack
+ */
+ private void resolveStack(StackModule stackToBeResolved, Map<String, StackModule> stacks) throws AmbariException {
+ if (stackToBeResolved.getResolutionState() == State.INIT) {
+ stackToBeResolved.resolve(null, stacks);
+ } else if (stackToBeResolved.getResolutionState() == State.VISITED) {
+ //todo: provide more information to user about cycle
+ throw new AmbariException("Cycle detected while parsing stack definition");
+ }
+ }
+
+ /**
+ * Add a child service module to the stack.
+ *
+ * @param service service module to add
+ */
+ private void addService(ServiceModule service) {
+ ServiceInfo serviceInfo = service.getModuleInfo();
+ Object previousValue = serviceModules.put(service.getId(), service);
+ if (previousValue == null) {
+ stackInfo.getServices().add(serviceInfo);
+ }
+ }
+
+ /**
+ * Add child service modules to the stack.
+ *
+ * @param services collection of service modules to add
+ */
+ private void addServices(Collection<ServiceModule> services) {
+ for (ServiceModule service : services) {
+ addService(service);
+ }
+ }
+
+ /**
+ * Process repositories associated with the stack.
+ * @throws AmbariException if unable to fully process the stack repositories
+ */
+ private void processRepositories() throws AmbariException {
+ RepositoryXml rxml = stackDirectory.getRepoFile();
+ if (rxml == null) {
+ return;
+ }
+
+ LOG.debug("Adding repositories to stack" +
+ ", stackName=" + stackInfo.getName() +
+ ", stackVersion=" + stackInfo.getVersion() +
+ ", repoFolder=" + stackDirectory.getRepoDir());
+
+ List<RepositoryInfo> repos = new ArrayList<RepositoryInfo>();
+
+ for (RepositoryXml.Os o : rxml.getOses()) {
+ String osFamily = o.getFamily();
+ for (String os : osFamily.split(",")) {
+ for (RepositoryXml.Repo r : o.getRepos()) {
+ repos.add(processRepository(osFamily, os, r));
+ }
+ }
+ }
+
+ stackInfo.getRepositories().addAll(repos);
+
+ if (null != rxml.getLatestURI() && repos.size() > 0) {
+ stackContext.registerRepoUpdateTask(rxml.getLatestURI(), this);
+ }
+ }
+
+ /**
+ * Process a repository associated with the stack.
+ *
+ * @param osFamily OS family
+ * @param osType OS type
+ * @param r repo
+ */
+ private RepositoryInfo processRepository(String osFamily, String osType, RepositoryXml.Repo r) {
+ RepositoryInfo ri = new RepositoryInfo();
+ ri.setBaseUrl(r.getBaseUrl());
+ ri.setDefaultBaseUrl(r.getBaseUrl());
+ ri.setMirrorsList(r.getMirrorsList());
+ ri.setOsType(osType.trim());
+ ri.setRepoId(r.getRepoId());
+ ri.setRepoName(r.getRepoName());
+ ri.setLatestBaseUrl(r.getBaseUrl());
+
+ LOG.debug("Checking for override for base_url");
+ String updatedUrl = stackContext.getUpdatedRepoUrl(r.getRepoName(), stackInfo.getVersion(),
+ osFamily, r.getRepoId());
+
+ if (null != updatedUrl) {
+ ri.setBaseUrl(updatedUrl);
+ }
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Adding repo to stack"
+ + ", repoInfo=" + ri.toString());
+ }
+ return ri;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/state/AutoDeployInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/AutoDeployInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/AutoDeployInfo.java
index 9e97d5f..56f1dbc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/AutoDeployInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/AutoDeployInfo.java
@@ -72,4 +72,24 @@ public class AutoDeployInfo {
public String getCoLocate() {
return m_coLocate;
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ AutoDeployInfo that = (AutoDeployInfo) o;
+
+ if (m_enabled != that.m_enabled) return false;
+ if (m_coLocate != null ? !m_coLocate.equals(that.m_coLocate) : that.m_coLocate != null) return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = (m_enabled ? 1 : 0);
+ result = 31 * result + (m_coLocate != null ? m_coLocate.hashCode() : 0);
+ return result;
+ }
}
[10/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
deleted file mode 100644
index e014627..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
+++ /dev/null
@@ -1,975 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.server.api.util;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.ListIterator;
-import java.util.Map;
-import java.util.Set;
-
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Unmarshaller;
-import javax.xml.namespace.QName;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathExpression;
-import javax.xml.xpath.XPathExpressionException;
-import javax.xml.xpath.XPathFactory;
-
-import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.metadata.ActionMetadata;
-import org.apache.ambari.server.state.ClientConfigFileDefinition;
-import org.apache.ambari.server.state.CommandScriptDefinition;
-import org.apache.ambari.server.state.ComponentInfo;
-import org.apache.ambari.server.state.ConfigHelper;
-import org.apache.ambari.server.state.CustomCommandDefinition;
-import org.apache.ambari.server.state.DependencyInfo;
-import org.apache.ambari.server.state.PropertyInfo;
-import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.ServiceOsSpecific;
-import org.apache.ambari.server.state.StackInfo;
-import org.apache.ambari.server.state.stack.ConfigurationXml;
-import org.apache.ambari.server.state.stack.RepositoryXml;
-import org.apache.ambari.server.state.stack.ServiceMetainfoXml;
-import org.apache.ambari.server.state.stack.StackMetainfoXml;
-import org.apache.ambari.server.state.stack.UpgradePack;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
-import org.xml.sax.SAXException;
-
-import com.google.inject.Injector;
-
-/**
- * Helper methods for providing stack extension behavior -
- * Apache Jira: AMBARI-2819
- *
- * Stack extension processing is done in two steps. At first step, we parse
- * all information for every stack from stack files. At second step, we
- * go through parent and perform inheritance where needed. At both steps,
- * stacks are processed at random order, that's why extension implementation
- * for any new stack/service/component property should also consist of two
- * separate steps (otherwise child may happen to be processed before parent's
- * properties are populated).
- */
-public class StackExtensionHelper {
- private ActionMetadata actionMetadata;
-
- private File stackRoot;
- private final static Logger LOG = LoggerFactory.getLogger(StackExtensionHelper.class);
- private final Map<String, StackInfo> stackVersionMap = new HashMap<String,
- StackInfo>();
- private Map<String, List<StackInfo>> stackParentsMap = null;
- public final static String HOOKS_FOLDER_NAME = "hooks";
- private static final String PACKAGE_FOLDER_NAME = "package";
-
- private static final Map<Class<?>, JAXBContext> _jaxbContexts =
- new HashMap<Class<?>, JAXBContext> ();
- static {
- try {
- JAXBContext ctx = JAXBContext.newInstance(StackMetainfoXml.class,
- RepositoryXml.class, ConfigurationXml.class, UpgradePack.class);
- _jaxbContexts.put(StackMetainfoXml.class, ctx);
- _jaxbContexts.put(RepositoryXml.class, ctx);
- _jaxbContexts.put(ConfigurationXml.class, ctx);
- _jaxbContexts.put(UpgradePack.class, ctx);
- _jaxbContexts.put(ServiceMetainfoXml.class, JAXBContext.newInstance(ServiceMetainfoXml.class));
- } catch (JAXBException e) {
- throw new RuntimeException (e);
- }
- }
-
- /**
- * Note: constructor does not perform inialisation now. After instance
- * creation, you have to call fillInfo() manually
- */
- public StackExtensionHelper(Injector injector, File stackRoot) {
- this.stackRoot = stackRoot;
- this.actionMetadata = injector.getInstance(ActionMetadata.class);
- }
-
-
- /**
- * Must be manually called after creation of StackExtensionHelper instance
- */
- public void fillInfo() throws Exception {
- if (stackParentsMap != null) {
- throw new AmbariException("fillInfo() method has already been called");
- }
- File[] stackFiles = stackRoot.listFiles(AmbariMetaInfo.FILENAME_FILTER);
- for (File stack : stackFiles) {
- if (stack.isFile()) {
- continue;
- }
- for (File stackFolder : stack.listFiles(AmbariMetaInfo.FILENAME_FILTER)) {
- if (stackFolder.isFile()) {
- continue;
- }
- String stackName = stackFolder.getParentFile().getName();
- String stackVersion = stackFolder.getName();
- stackVersionMap.put(stackName + stackVersion, getStackInfo(stackFolder));
- }
- }
- stackParentsMap = getParentStacksInOrder(stackVersionMap.values());
- }
-
- void mergeStacks(StackInfo parentStack,
- StackInfo resultStack) {
- if(parentStack.getConfigTypes() != null) {
- resultStack.getConfigTypes().putAll(parentStack.getConfigTypes());
- }
- List<PropertyInfo> mergedProperties = new ArrayList<PropertyInfo>();
- mergeProperties(resultStack.getProperties(), parentStack.getProperties(), mergedProperties, resultStack.getConfigTypes());
- resultStack.setProperties(mergedProperties);
- }
-
- ServiceInfo mergeServices(ServiceInfo parentService,
- ServiceInfo childService) {
- ServiceInfo mergedServiceInfo = new ServiceInfo();
- mergedServiceInfo.setSchemaVersion(childService.getSchemaVersion());
- mergedServiceInfo.setName(childService.getName());
- mergedServiceInfo.setComment(childService.getComment() != null ?
- childService.getComment() :
- parentService.getComment());
- mergedServiceInfo.setVersion(childService.getVersion());
- mergedServiceInfo.setDisplayName(
- childService.getDisplayName() != null ?
- childService.getDisplayName() :
- parentService.getDisplayName());
- mergedServiceInfo.setConfigDependencies(
- childService.getConfigDependencies() != null ?
- childService.getConfigDependencies() :
- parentService.getConfigDependencies() != null ?
- parentService.getConfigDependencies() :
- Collections.<String>emptyList());
- mergedServiceInfo.setExcludedConfigTypes(
- childService.getExcludedConfigTypes() != null ?
- childService.getExcludedConfigTypes() :
- parentService.getExcludedConfigTypes() != null ?
- parentService.getExcludedConfigTypes() :
- Collections.<String>emptySet()
- );
- mergedServiceInfo.setRequiredServices(
- childService.getRequiredServices() != null ?
- childService.getRequiredServices() :
- parentService.getRequiredServices() != null ?
- parentService.getRequiredServices() :
- Collections.<String>emptyList()
- );
-
- mergedServiceInfo.setRestartRequiredAfterChange(
- (childService.isRestartRequiredAfterChange() != null)
- ? childService.isRestartRequiredAfterChange()
- : parentService.isRestartRequiredAfterChange());
- mergedServiceInfo.setMonitoringService(
- (childService.isMonitoringService() != null)
- ? childService.isMonitoringService()
- : parentService.isMonitoringService());
-
- Map<String, ServiceOsSpecific> osSpecific = childService.getOsSpecifics();
- if (! osSpecific.isEmpty()) {
- mergedServiceInfo.setOsSpecifics(childService.getOsSpecifics());
- } else {
- mergedServiceInfo.setOsSpecifics(parentService.getOsSpecifics());
- }
-
- mergedServiceInfo.setConfigTypes(new HashMap<String, Map<String, Map<String, String>>>());
- if(childService.getConfigTypes() != null) {
- mergedServiceInfo.getConfigTypes().putAll(childService.getConfigTypes());
- }
- if(parentService.getConfigTypes() != null) {
- mergedServiceInfo.getConfigTypes().putAll(parentService.getConfigTypes());
- }
-
- CommandScriptDefinition commandScript = childService.getCommandScript();
- if (commandScript != null) {
- mergedServiceInfo.setCommandScript(childService.getCommandScript());
- } else {
- mergedServiceInfo.setCommandScript(parentService.getCommandScript());
- }
-
- String servicePackageFolder = childService.getServicePackageFolder();
- if (servicePackageFolder != null) {
- mergedServiceInfo.setServicePackageFolder(servicePackageFolder);
- } else {
- mergedServiceInfo.setServicePackageFolder(
- parentService.getServicePackageFolder());
- }
-
- // Merge custom command definitions for service
- List<CustomCommandDefinition> mergedCustomCommands =
- mergeCustomCommandLists(parentService.getCustomCommands(),
- childService.getCustomCommands());
- mergedServiceInfo.setCustomCommands(mergedCustomCommands);
-
- // metrics
- if (null != childService.getMetricsFile())
- mergedServiceInfo.setMetricsFile(childService.getMetricsFile());
- else if (null != parentService.getMetricsFile())
- mergedServiceInfo.setMetricsFile(parentService.getMetricsFile());
-
- // alerts
- if (null != childService.getAlertsFile())
- mergedServiceInfo.setAlertsFile(childService.getAlertsFile());
- else if (null != parentService.getAlertsFile())
- mergedServiceInfo.setAlertsFile(parentService.getAlertsFile());
-
- populateComponents(mergedServiceInfo, parentService, childService);
- mergeProperties(childService.getProperties(), parentService.getProperties(),
- mergedServiceInfo.getProperties(), childService.getConfigTypes());
- // Add all parent config dependencies
- if (parentService.getConfigDependencies() != null && !parentService
- .getConfigDependencies().isEmpty()) {
- for (String configDep : parentService.getConfigDependencies()) {
- if (!mergedServiceInfo.getConfigDependencies().contains(configDep)) {
- mergedServiceInfo.getConfigDependencies().add(configDep);
- }
- }
- }
- return mergedServiceInfo;
- }
-
- public void mergeProperties(List<PropertyInfo> childProperties,
- List<PropertyInfo> parentProperties, List<PropertyInfo> mergedProperties, Map<String,
- Map<String, Map<String, String>>> childConfigTypes) {
- // Add child properties not deleted
- Map<String, Set<String>> deleteMap = new HashMap<String, Set<String>>();
- Map<String, Set<String>> appendMap = new HashMap<String, Set<String>>();
- for (PropertyInfo propertyInfo : childProperties) {
- if (!propertyInfo.isDeleted()) {
- mergedProperties.add(propertyInfo);
- if (appendMap.containsKey(propertyInfo.getName())) {
- appendMap.get(propertyInfo.getName()).add(propertyInfo.getFilename());
- } else {
- Set<String> filenames = new HashSet<String>();
- filenames.add(propertyInfo.getFilename());
- appendMap.put(propertyInfo.getName(), filenames);
- }
- } else {
- if (deleteMap.containsKey(propertyInfo.getName())) {
- deleteMap.get(propertyInfo.getName()).add(propertyInfo.getFilename());
- } else {
- Set<String> filenames = new HashSet<String>();
- filenames.add(propertyInfo.getFilename());
- deleteMap.put(propertyInfo.getName(), filenames);
- }
- }
- }
- // Add all parent properties
- for (PropertyInfo parentPropertyInfo : parentProperties) {
- String configType = ConfigHelper.fileNameToConfigType(parentPropertyInfo.getFilename());
- boolean disableInherit = false;
- if (childConfigTypes.containsKey(configType)){
- disableInherit =
- childConfigTypes.get(configType).get(Supports.KEYWORD).get(Supports.DO_NOT_EXTEND.getPropertyName()).equals("true");
- }
- if (!deleteMap.containsKey(parentPropertyInfo.getName()) && !(appendMap
- .containsKey(parentPropertyInfo.getName())
- && appendMap.get(parentPropertyInfo.getName())
- .contains(parentPropertyInfo.getFilename())) && !disableInherit) {
- mergedProperties.add(parentPropertyInfo);
- }
- }
- }
-
-
- /**
- * Merges component sets of parentService and childService and writes result
- * to mergedServiceInfo
- */
- private void populateComponents(ServiceInfo mergedServiceInfo, ServiceInfo parentService,
- ServiceInfo childService) {
- // Add all child components to service
- List<String> deleteList = new ArrayList<String>();
- List<String> appendList = new ArrayList<String>();
-
- for (ComponentInfo childComponent : childService.getComponents()) {
- if (!childComponent.isDeleted()) {
- ComponentInfo parentComponent = parentService.getComponentByName(childComponent.getName());
- if (parentComponent != null) { // If parent has similar component
- ComponentInfo mergedComponent = mergeComponents(parentComponent,
- childComponent);
- mergedServiceInfo.getComponents().add(mergedComponent);
- appendList.add(mergedComponent.getName());
- } else {
- mergedServiceInfo.getComponents().add(childComponent);
- appendList.add(childComponent.getName());
- }
- } else {
- deleteList.add(childComponent.getName());
- }
- }
- // Add remaining parent components
- for (ComponentInfo parentComponent : parentService.getComponents()) {
- if (!deleteList.contains(parentComponent.getName()) && !appendList
- .contains(parentComponent.getName())) {
- mergedServiceInfo.getComponents().add(parentComponent);
- }
- }
- }
-
- ComponentInfo mergeComponents(ComponentInfo parent, ComponentInfo child) {
- ComponentInfo result = new ComponentInfo(child); // cloning child
- CommandScriptDefinition commandScript = child.getCommandScript();
- String category = child.getCategory();
- String cardinality = child.getCardinality();
- List<String> clientsToUpdateConfigs = child.getClientsToUpdateConfigs();
-
- if (commandScript != null) {
- result.setCommandScript(child.getCommandScript());
- } else {
- result.setCommandScript(parent.getCommandScript());
- }
- //keep the same semantic as for ServiceInfo
- result.setDisplayName(
- child.getDisplayName() != null ?
- child.getDisplayName() : parent.getDisplayName());
-
- result.setConfigDependencies(
- child.getConfigDependencies() != null ?
- child.getConfigDependencies() : parent.getConfigDependencies());
-
-
- //Merge client config file definitions
- List<ClientConfigFileDefinition> clientConfigFiles = child.getClientConfigFiles();
- if (clientConfigFiles != null) {
- result.setClientConfigFiles(child.getClientConfigFiles());
- } else {
- result.setClientConfigFiles(parent.getClientConfigFiles());
- }
- if (clientsToUpdateConfigs != null) {
- result.setClientsToUpdateConfigs(child.getClientsToUpdateConfigs());
- } else {
- result.setClientsToUpdateConfigs(parent.getClientsToUpdateConfigs());
- }
- // Merge custom command definitions for service
- List<CustomCommandDefinition> mergedCustomCommands =
- mergeCustomCommandLists(parent.getCustomCommands(),
- child.getCustomCommands());
- result.setCustomCommands(mergedCustomCommands);
-
- if (category != null) {
- result.setCategory(child.getCategory());
- } else {
- result.setCategory(parent.getCategory());
- }
-
- if (cardinality != null) {
- result.setCardinality(child.getCardinality());
- } else {
- result.setCardinality(parent.getCardinality());
- }
-
- result.setDependencies(
- child.getDependencies() == null ?
- parent.getDependencies() :
- parent.getDependencies() == null ?
- child.getDependencies() :
- mergeComponentDependencies(parent.getDependencies(),
- child.getDependencies()));
-
- return result;
- }
-
- List<DependencyInfo> mergeComponentDependencies(
- List<DependencyInfo> parentList,
- List<DependencyInfo> childList) {
-
- List<DependencyInfo> mergedList =
- new ArrayList<DependencyInfo>(childList);
- List<String> existingNames = new ArrayList<String>();
-
- for (DependencyInfo childDI : childList) {
- existingNames.add(childDI.getName());
- }
- for (DependencyInfo parentsDI : parentList) {
- if (! existingNames.contains(parentsDI.getName())) {
- mergedList.add(parentsDI);
- existingNames.add(parentsDI.getName());
- }
- }
- return mergedList;
- }
-
-
- private List<CustomCommandDefinition> mergeCustomCommandLists(
- List<CustomCommandDefinition> parentList,
- List<CustomCommandDefinition> childList) {
- List<CustomCommandDefinition> mergedList =
- new ArrayList<CustomCommandDefinition>(childList);
- List<String> existingNames = new ArrayList<String>();
- for (CustomCommandDefinition childCCD : childList) {
- existingNames.add(childCCD.getName());
- }
- for (CustomCommandDefinition parentsCCD : parentList) {
- if (! existingNames.contains(parentsCCD.getName())) {
- mergedList.add(parentsCCD);
- existingNames.add(parentsCCD.getName());
- }
- }
- return mergedList;
- }
-
-
- public List<ServiceInfo> getAllApplicableServices(StackInfo stackInfo) {
- LinkedList<StackInfo> parents = (LinkedList<StackInfo>)
- stackParentsMap.get(stackInfo.getVersion());
-
- if (parents == null || parents.isEmpty()) {
- return stackInfo.getServices();
- }
- // Add child to the end of extension list
- parents.addFirst(stackInfo);
- ListIterator<StackInfo> lt = parents.listIterator(parents.size());
- // Map services with unique names
- Map<String, ServiceInfo> serviceInfoMap = new HashMap<String,
- ServiceInfo>();
- List<ServiceInfo> serviceInfoList = null;
- // Iterate with oldest parent first - all stacks are populated
- StackInfo parentStack = null;
- while(lt.hasPrevious()) {
- if (parentStack == null) {
- parentStack = lt.previous();
- serviceInfoList = parentStack.getServices();
- for (ServiceInfo service : serviceInfoList){
- if (!service.isDeleted()) {
- serviceInfoMap.put(service.getName(), service);
- }
- }
- continue;
- }
- StackInfo currentStackInfo = lt.previous();
- serviceInfoList = currentStackInfo.getServices();
-
- mergeStacks(parentStack, currentStackInfo);
-
- for (ServiceInfo service : serviceInfoList) {
- ServiceInfo existingService = serviceInfoMap.get(service.getName());
- if (service.isDeleted()) {
- serviceInfoMap.remove(service.getName());
- continue;
- }
-
- if (existingService == null && !service.isDeleted()) {
- serviceInfoMap.put(service.getName(), service);
- } else {
- // Redefined service - merge with parent
- ServiceInfo newServiceInfo = mergeServices(existingService, service);
- serviceInfoMap.put(service.getName(), newServiceInfo);
- }
-
- // remove 'excluded-config-types' from configTypes
- ServiceInfo serviceInfo = serviceInfoMap.get(service.getName());
- if(serviceInfo.getExcludedConfigTypes() != null) {
- Iterator<Map.Entry<String,Map<String,Map<String,String>>>> configTypesItetator = serviceInfo.getConfigTypes().entrySet().iterator();
-
- while(configTypesItetator.hasNext()) {
- Map.Entry<String,Map<String,Map<String,String>>> configTypeMap = configTypesItetator.next();
-
- if(serviceInfo.getExcludedConfigTypes().contains(configTypeMap.getKey())) {
- configTypesItetator.remove();
- }
- }
- }
-
- }
- parentStack = currentStackInfo;
- }
- return new ArrayList<ServiceInfo>(serviceInfoMap.values());
- }
-
-
- /**
- * Determines exact hooks folder (subpath from stackRoot to hooks directory)
- * to use for a given stack. If given stack
- * has not hooks folder, inheritance hierarhy is queried.
- * @param stackInfo stack to work with
- */
- public String resolveHooksFolder(StackInfo stackInfo) throws AmbariException {
- // Determine hooks folder for stack
- String stackId = String.format("%s-%s",
- stackInfo.getName(), stackInfo.getVersion());
- String hooksFolder = stackInfo.getStackHooksFolder();
- if (hooksFolder == null) {
- // Try to get parent's
- List<StackInfo> parents = getParents(stackInfo);
- for (StackInfo parent : parents) {
- hooksFolder = parent.getStackHooksFolder();
- if (hooksFolder != null) {
- break;
- }
- }
- }
- if (hooksFolder == null) {
- String message = String.format(
- "Can not determine hooks dir for stack %s",
- stackId);
- LOG.debug(message);
- }
- return hooksFolder;
- }
-
- void populateServicesForStack(StackInfo stackInfo) throws
- ParserConfigurationException, SAXException,
- XPathExpressionException, IOException, JAXBException {
- List<ServiceInfo> services = new ArrayList<ServiceInfo>();
-
- File servicesFolder = new File(stackRoot.getAbsolutePath() + File
- .separator + stackInfo.getName() + File.separator + stackInfo.getVersion()
- + File.separator + AmbariMetaInfo.SERVICES_FOLDER_NAME);
- if (!servicesFolder.exists()) {
- LOG.info("No services defined for stack: " + stackInfo.getName() +
- "-" + stackInfo.getVersion());
- } else {
- try {
- File[] servicesFolders = servicesFolder.listFiles(AmbariMetaInfo
- .FILENAME_FILTER);
- if (servicesFolders == null) {
- String message = String.format("No service folders found at %s",
- servicesFolder.getAbsolutePath());
- throw new AmbariException(message);
- }
- // Iterate over service folders
- for (File serviceFolder : servicesFolders) {
- if (!serviceFolder.isDirectory())
- continue;
- // Get metainfo schema format version
- File metainfoFile = new File(serviceFolder.getAbsolutePath()
- + File.separator + AmbariMetaInfo.SERVICE_METAINFO_FILE_NAME);
- // get metrics file, if it exists
- File metricsJson = new File(serviceFolder.getAbsolutePath()
- + File.separator + AmbariMetaInfo.SERVICE_METRIC_FILE_NAME);
-
- File alertsJson = new File(serviceFolder.getAbsolutePath() +
- File.separator + AmbariMetaInfo.SERVICE_ALERT_FILE_NAME);
-
- if (!metainfoFile.exists()) {
- LOG.warn("Service folder " + serviceFolder.getAbsolutePath() + " doesn't contain metainfo file. Ignoring it.");
- continue;
- }
-
- //Reading v2 service metainfo (may contain multiple services)
- // Get services from metadata
- try {
- ServiceMetainfoXml smiv2x =
- unmarshal(ServiceMetainfoXml.class, metainfoFile);
- List<ServiceInfo> serviceInfos = smiv2x.getServices();
- for (ServiceInfo serviceInfo : serviceInfos) {
- serviceInfo.setSchemaVersion(AmbariMetaInfo.SCHEMA_VERSION_2);
-
- // Find service package folder
- String servicePackageDir = resolveServicePackageFolder(
- stackRoot.getAbsolutePath(), stackInfo,
- serviceFolder.getName(), serviceInfo.getName());
- serviceInfo.setServicePackageFolder(servicePackageDir);
-
- // process metrics.json
- if (metricsJson.exists())
- serviceInfo.setMetricsFile(metricsJson);
- if (alertsJson.exists())
- serviceInfo.setAlertsFile(alertsJson);
-
- // Get all properties from all "configs/*-site.xml" files
- setPropertiesFromConfigs(serviceFolder, serviceInfo);
-
- // Add now to be removed while iterating extension graph
- services.add(serviceInfo);
- }
- } catch (JAXBException e) {
- LOG.warn("Error while parsing metainfo.xml for a service: " + serviceFolder.getAbsolutePath(), e);
- }
- }
- } catch (Exception e) {
- LOG.error("Error while parsing metainfo.xml for a service", e);
- }
- }
-
- stackInfo.getServices().addAll(services);
-
- // add service check actions from the target stack
- for(ServiceInfo serviceInfo : stackInfo.getServices()) {
- if(serviceInfo.getCommandScript() != null) {
- actionMetadata.addServiceCheckAction(serviceInfo.getName());
- }
- }
-
- }
-
-
- /**
- * Determines exact service directory that contains scripts and templates
- * for service. If given stack has not this folder, inheritance hierarhy is
- * queried.
- */
- String resolveServicePackageFolder(String stackRoot,
- StackInfo stackInfo, String serviceFolderName,
- String serviceName) throws AmbariException {
- String stackId = String.format("%s-%s",
- stackInfo.getName(), stackInfo.getVersion());
- String expectedSubPath = stackInfo.getName() + File.separator +
- stackInfo.getVersion() + File.separator +
- AmbariMetaInfo.SERVICES_FOLDER_NAME +
- File.separator + serviceFolderName + File.separator +
- PACKAGE_FOLDER_NAME;
- File packageDir = new File(stackRoot + File.separator + expectedSubPath);
- String servicePackageFolder = null;
- if (packageDir.isDirectory()) {
- servicePackageFolder = expectedSubPath;
- String message = String.format(
- "Service package folder for service %s" +
- " for stack %s has been resolved to %s",
- serviceName, stackId, servicePackageFolder);
- LOG.debug(message);
- } else {
- String message = String.format(
- "Service package folder %s for service %s " +
- " for stack %s does not exist.",
- packageDir.getAbsolutePath(), serviceName, stackId);
- LOG.debug(message);
- }
- return servicePackageFolder;
- }
-
-
- public List<StackInfo> getAllAvailableStacks() {
- return new ArrayList<StackInfo>(stackVersionMap.values());
- }
-
- public List<StackInfo> getParents(StackInfo stackInfo) {
- return stackParentsMap.get(stackInfo.getVersion());
- }
-
- private Map<String, List<StackInfo>> getParentStacksInOrder(
- Collection<StackInfo> stacks) {
- Map<String, List<StackInfo>> parentStacksMap = new HashMap<String,
- List<StackInfo>>();
-
- for (StackInfo child : stacks) {
- List<StackInfo> parentStacks = new LinkedList<StackInfo>();
- parentStacksMap.put(child.getVersion(), parentStacks);
- while (child.getParentStackVersion() != null && !child
- .getParentStackVersion().isEmpty() && !child.getVersion().equals
- (child.getParentStackVersion())) {
- String key = child.getName() + child.getParentStackVersion();
- if (stackVersionMap.containsKey(key)) {
- StackInfo parent = stackVersionMap.get(key);
- parentStacks.add(parent);
- child = parent;
- } else {
- LOG.info("Unknown parent stack version: " + child
- .getParentStackVersion() + ", for stack: " + child.getName() + " " +
- child.getVersion());
- break;
- }
- }
- }
- return parentStacksMap;
- }
-
-
- /**
- * Determines schema version of a given metainfo file
- * @param stackMetainfoFile xml file
- */
- String getSchemaVersion(File stackMetainfoFile) throws IOException,
- ParserConfigurationException, SAXException, XPathExpressionException {
- // Using XPath to get a single value from an metainfo file
- DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
- DocumentBuilder builder = factory.newDocumentBuilder();
- Document doc = builder.parse(stackMetainfoFile);
- XPathFactory xPathfactory = XPathFactory.newInstance();
- XPath xpath = xPathfactory.newXPath();
- XPathExpression schemaPath = xpath.compile("/metainfo/schemaVersion[1]");
-
- String value = schemaPath.evaluate(doc).trim();
- if ( "".equals(value) || // If schemaVersion is not defined
- AmbariMetaInfo.SCHEMA_VERSION_2.equals(value)) {
- return AmbariMetaInfo.SCHEMA_VERSION_2;
- } else {
- String message = String.format("Unknown schema version %s at file " +
- "%s", value, stackMetainfoFile.getAbsolutePath());
- throw new AmbariException(message);
- }
-
- }
-
- /**
- * Get populated stackInfo for the stack definition at the provided path.
- * @param stackVersionFolder Path to stack definition.
- * @return StackInfo StackInfo object
- * @throws JAXBException
- */
- private StackInfo getStackInfo(File stackVersionFolder) throws JAXBException {
- StackInfo stackInfo = new StackInfo();
-
- stackInfo.setName(stackVersionFolder.getParentFile().getName());
- stackInfo.setVersion(stackVersionFolder.getName());
-
- // Get metainfo from file
- File stackMetainfoFile = new File(stackVersionFolder.getAbsolutePath()
- + File.separator + AmbariMetaInfo.STACK_METAINFO_FILE_NAME);
-
- if (stackMetainfoFile.exists()) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Reading stack version metainfo from file "
- + stackMetainfoFile.getAbsolutePath());
- }
-
- StackMetainfoXml smx = unmarshal(StackMetainfoXml.class, stackMetainfoFile);
-
- stackInfo.setMinUpgradeVersion(smx.getVersion().getUpgrade());
- stackInfo.setActive(smx.getVersion().isActive());
- stackInfo.setParentStackVersion(smx.getExtends());
-
- // Populating hooks dir for stack
- String hooksSubPath = stackInfo.getName() + File.separator +
- stackInfo.getVersion() + File.separator + HOOKS_FOLDER_NAME;
- String hooksAbsPath = stackVersionFolder.getAbsolutePath() +
- File.separator + HOOKS_FOLDER_NAME;
- if (new File(hooksAbsPath).exists()) {
- stackInfo.setStackHooksFolder(hooksSubPath);
- } else {
- String message = String.format("Hooks folder %s does not exist",
- hooksAbsPath);
- LOG.debug(message);
- }
-
- String rcoFileLocation = stackVersionFolder.getAbsolutePath() +
- File.separator + AmbariMetaInfo.RCO_FILE_NAME;
- if (new File(rcoFileLocation).exists())
- stackInfo.setRcoFileLocation(rcoFileLocation);
-
- setStackPropertiesFromConfigs(stackInfo);
- }
-
- try {
- // Read the service and available configs for this stack
- populateServicesForStack(stackInfo);
- } catch (Exception e) {
- LOG.error("Exception caught while populating services for stack: " +
- stackInfo.getName() + "-" + stackInfo.getVersion());
- e.printStackTrace();
- }
- return stackInfo;
- }
-
- private void populateStackProperties(StackInfo stackInfo, File configFile) throws JAXBException {
- ConfigurationXml configuration = unmarshal(ConfigurationXml.class, configFile);
- String fileName = configFile.getName();
- stackInfo.getProperties().addAll(getProperties(configuration, fileName));
- String configType = ConfigHelper.fileNameToConfigType(fileName);
-
- addConfigType(stackInfo.getConfigTypes(), configType);
- setConfigTypeAttributes(stackInfo.getConfigTypes(), configuration, configType);
- }
-
- /**
- * Get all properties from all "configs/*.xml" files. See {@see AmbariMetaInfo#SERVICE_CONFIG_FILE_NAME_POSTFIX}
- */
- void setStackPropertiesFromConfigs(StackInfo stackInfo) {
- File configsFolder = new File(stackRoot.getAbsolutePath() + File
- .separator + stackInfo.getName() + File.separator + stackInfo.getVersion()
- + File.separator + AmbariMetaInfo.SERVICE_CONFIG_FOLDER_NAME);
-
- if (!configsFolder.exists() || !configsFolder.isDirectory())
- return;
-
- File[] configFiles = configsFolder.listFiles(AmbariMetaInfo.FILENAME_FILTER);
- if (configFiles != null) {
- for (File configFile : configFiles) {
- if (configFile.getName().endsWith(AmbariMetaInfo.SERVICE_CONFIG_FILE_NAME_POSTFIX)) {
- try {
- populateStackProperties(stackInfo, configFile);
- } catch (Exception e) {
- LOG.error("Could not load configuration for " + configFile, e);
- }
- }
- }
- }
- }
-
- private List<PropertyInfo> getProperties(ConfigurationXml configuration, String fileName) {
- List<PropertyInfo> list = new ArrayList<PropertyInfo>();
- for (PropertyInfo pi : configuration.getProperties()) {
- pi.setFilename(fileName);
- list.add(pi);
- }
- return list;
- }
-
- /**
- * Add properties and config type's properties from configuration file
- */
- void populateServiceProperties(File configFile, ServiceInfo serviceInfo) throws JAXBException {
- ConfigurationXml configuration = unmarshal(ConfigurationXml.class, configFile);
- String fileName = configFile.getName();
- serviceInfo.getProperties().addAll(getProperties(configuration, fileName));
- String configType = ConfigHelper.fileNameToConfigType(fileName);
-
- addConfigType(serviceInfo.getConfigTypes(), configType);
- setConfigTypeAttributes(serviceInfo.getConfigTypes(), configuration, configType);
- }
-
- void setConfigTypeAttributes(Map<String, Map<String, Map<String, String>>> configTypes, ConfigurationXml configuration, String configType) {
- for (Map.Entry<QName, String> attribute : configuration.getAttributes().entrySet()) {
- for (Supports supportsProperty : Supports.values()) {
- String attributeName = attribute.getKey().getLocalPart();
- String attributeValue = attribute.getValue();
- if (attributeName.equals(supportsProperty.getXmlAttributeName())) {
- addConfigTypeProperty(configTypes, configType, Supports.KEYWORD,
- supportsProperty.getPropertyName(), Boolean.valueOf(attributeValue).toString());
- }
- }
- }
- }
-
- void addConfigType(Map<String, Map<String, Map<String, String>>> configTypes, String configType) {
- configTypes.put(configType, new HashMap<String, Map<String, String>>());
-
- Map<String, Map<String, String>> properties = configTypes.get(configType);
- Map<String, String> supportsProperties = new HashMap<String, String>();
- for (Supports supportsProperty : Supports.values()) {
- supportsProperties.put(supportsProperty.getPropertyName(), supportsProperty.getDefaultValue());
- }
- properties.put(Supports.KEYWORD, supportsProperties);
- }
-
- /**
- * Populate ServiceInfo#configTypes with default entries based on ServiceInfo#configDependencies property
- */
- void populateConfigTypesFromDependencies(ServiceInfo serviceInfo) {
- List<PropertyInfo> configurations = serviceInfo.getProperties();
- if (configurations != null) {
- Map<String, Map<String, Map<String, String>>> configTypes = new HashMap<String, Map<String, Map<String, String>>>();
- for (PropertyInfo configuration : configurations) {
- String configType = ConfigHelper.fileNameToConfigType(configuration.getFilename());
-
- if (!configTypes.containsKey(configType)) {
- Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
- Map<String, String> supportsProperties = new HashMap<String, String>();
- for (Supports supportsProperty : Supports.values()) {
- supportsProperties.put(supportsProperty.getPropertyName(), supportsProperty.getDefaultValue());
- }
- properties.put(Supports.KEYWORD, supportsProperties);
- configTypes.put(configType, properties);
- }
- }
- serviceInfo.setConfigTypes(configTypes);
- }
- }
-
- /**
- * Put new property entry to ServiceInfo#configTypes collection for specified configType
- */
- void addConfigTypeProperty(Map<String, Map<String, Map<String, String>>> configTypes, String configType,
- String propertiesGroupName, String key, String value) {
- if (configTypes != null && configTypes.containsKey(configType)) {
- Map<String, Map<String, String>> configDependencyProperties = configTypes.get(configType);
- if (!configDependencyProperties.containsKey(propertiesGroupName)) {
- configDependencyProperties.put(propertiesGroupName, new HashMap<String, String>());
- }
- Map<String, String> propertiesGroup = configDependencyProperties.get(propertiesGroupName);
- propertiesGroup.put(key, value);
- }
- }
-
- /**
- * Get all properties from all "configs/*.xml" files. See {@see AmbariMetaInfo#SERVICE_CONFIG_FILE_NAME_POSTFIX}
- */
- void setPropertiesFromConfigs(File serviceFolder, ServiceInfo serviceInfo) {
-
- File serviceConfigFolder = new File(serviceFolder.getAbsolutePath()
- + File.separator + serviceInfo.getConfigDir());
-
- if (!serviceConfigFolder.exists() || !serviceConfigFolder.isDirectory())
- return;
-
- File[] configFiles = serviceConfigFolder.listFiles(AmbariMetaInfo.FILENAME_FILTER);
- if (configFiles != null) {
- for (File configFile : configFiles) {
- if (configFile.getName().endsWith(AmbariMetaInfo.SERVICE_CONFIG_FILE_NAME_POSTFIX)) {
- try {
- populateServiceProperties(configFile, serviceInfo);
- } catch (Exception e) {
- LOG.error("Could not load configuration for " + configFile, e);
- }
- }
- }
- }
- }
-
- public static <T> T unmarshal(Class<T> clz, File file) throws JAXBException {
- Unmarshaller u = _jaxbContexts.get(clz).createUnmarshaller();
-
- return clz.cast(u.unmarshal(file));
- }
-
- /**
- * Service configuration-types can support different abilities. This
- * enumerates the various abilities that configuration-types can support.
- *
- * For example, Hadoop configuration types like 'core-site' and 'hdfs-site'
- * can support the ability to define certain configs as 'final'.
- */
- protected enum Supports {
-
- FINAL("supports_final"),
- ADDING_FORBIDDEN("supports_adding_forbidden"),
- DO_NOT_EXTEND("supports_do_not_extend");
-
- public static final String KEYWORD = "supports";
-
- private String defaultValue;
- private String xmlAttributeName;
-
- private Supports(String xmlAttributeName) {
- this(xmlAttributeName, Boolean.FALSE.toString());
- }
-
- private Supports(String xmlAttributeName, String defaultValue) {
- this.defaultValue = defaultValue;
- this.xmlAttributeName = xmlAttributeName;
- }
-
- public String getDefaultValue() {
- return defaultValue;
- }
-
- public String getXmlAttributeName() {
- return xmlAttributeName;
- }
-
- public String getPropertyName() {
- return name().toLowerCase();
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index 4808a77..1899dde 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -30,6 +30,7 @@ import java.util.Set;
import java.util.TreeMap;
import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ObjectNotFoundException;
import org.apache.ambari.server.Role;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.StackAccessException;
@@ -242,8 +243,12 @@ public class AmbariActionExecutionHelper {
cluster.getService(serviceName)
.getServiceComponent(componentName).getServiceComponentHosts();
candidateHosts.addAll(componentHosts.keySet());
- componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
- stackId.getStackVersion(), serviceName, componentName);
+ try {
+ componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
+ stackId.getStackVersion(), serviceName, componentName);
+ } catch (ObjectNotFoundException e) {
+ // do nothing, componentId is checked for null later
+ }
} else {
for (String component : cluster.getService(serviceName).getServiceComponents().keySet()) {
Map<String, ServiceComponentHost> componentHosts =
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 8ffad0d..da7a9da 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -244,10 +244,10 @@ public class AmbariCustomCommandExecutionHelper {
StackId stackId = cluster.getDesiredStackVersion();
AmbariMetaInfo ambariMetaInfo = managementController.getAmbariMetaInfo();
- ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo
- (stackId.getStackName(), stackId.getStackVersion(), serviceName);
- StackInfo stackInfo = ambariMetaInfo.getStackInfo
- (stackId.getStackName(), stackId.getStackVersion());
+ ServiceInfo serviceInfo = ambariMetaInfo.getService(
+ stackId.getStackName(), stackId.getStackVersion(), serviceName);
+ StackInfo stackInfo = ambariMetaInfo.getStack
+ (stackId.getStackName(), stackId.getStackVersion());
CustomCommandDefinition customCommandDefinition = null;
ComponentInfo ci = serviceInfo.getComponentByName(componentName);
@@ -473,10 +473,10 @@ public class AmbariCustomCommandExecutionHelper {
StackId stackId = cluster.getDesiredStackVersion();
AmbariMetaInfo ambariMetaInfo = managementController.getAmbariMetaInfo();
ServiceInfo serviceInfo =
- ambariMetaInfo.getServiceInfo(stackId.getStackName(),
+ ambariMetaInfo.getService(stackId.getStackName(),
stackId.getStackVersion(), serviceName);
- StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
- stackId.getStackVersion());
+ StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
+ stackId.getStackVersion());
stage.addHostRoleExecutionCommand(hostname,
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 196f3b0..7e95ac8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -326,7 +326,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
+ " provided when creating a cluster");
}
StackId stackId = new StackId(request.getStackVersion());
- StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
stackId.getStackVersion());
if (stackInfo == null) {
throw new StackAccessException("stackName=" + stackId.getStackName() + ", stackVersion=" + stackId.getStackVersion());
@@ -566,7 +566,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
Cluster cluster = clusters.getCluster(request.getClusterName());
StackId stackId = cluster.getCurrentStackVersion();
- List<String> monitoringServices = ambariMetaInfo.getMonitoringServiceNames(
+ Collection<String> monitoringServices = ambariMetaInfo.getMonitoringServiceNames(
stackId.getStackName(), stackId.getStackVersion());
for (String serviceName : monitoringServices) {
@@ -1414,7 +1414,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
ServiceComponent sc = cluster.getService(serviceName).
getServiceComponent(componentName);
StackId stackId = sc.getDesiredStackVersion();
- ComponentInfo compInfo = ambariMetaInfo.getComponentCategory(
+ ComponentInfo compInfo = ambariMetaInfo.getComponent(
stackId.getStackName(), stackId.getStackVersion(), serviceName,
componentName);
if (runSmokeTest && compInfo.isMaster() &&
@@ -1546,13 +1546,13 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
String hostname = scHost.getHostName();
String osFamily = clusters.getHost(hostname).getOsFamily();
StackId stackId = cluster.getDesiredStackVersion();
- ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
- stackId.getStackVersion(), serviceName);
+ ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(),
+ stackId.getStackVersion(), serviceName);
ComponentInfo componentInfo = ambariMetaInfo.getComponent(
stackId.getStackName(), stackId.getStackVersion(),
serviceName, componentName);
- StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
- stackId.getStackVersion());
+ StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
+ stackId.getStackVersion());
ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(),
scHost.getServiceComponentName()).getExecutionCommand();
@@ -1861,7 +1861,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
break;
case STARTED:
StackId stackId = scHost.getDesiredStackVersion();
- ComponentInfo compInfo = ambariMetaInfo.getComponentCategory(
+ ComponentInfo compInfo = ambariMetaInfo.getComponent(
stackId.getStackName(), stackId.getStackVersion(), scHost.getServiceName(),
scHost.getServiceComponentName());
@@ -2939,7 +2939,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
StackId stackId = service.getDesiredStackVersion();
ComponentInfo compInfo =
- ambariMetaInfo.getServiceInfo(stackId.getStackName(),
+ ambariMetaInfo.getService(stackId.getStackName(),
stackId.getStackVersion(), service.getName()).getClientComponent();
if (compInfo != null) {
try {
@@ -3128,13 +3128,14 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
String stackName = request.getStackName();
if (stackName != null) {
- org.apache.ambari.server.state.Stack stack = ambariMetaInfo.getStack(stackName);
- response = Collections.singleton(stack.convertToResponse());
+ // this will throw an exception if the stack doesn't exist
+ ambariMetaInfo.getStacks(stackName);
+ response = Collections.singleton(new StackResponse(stackName));
} else {
- Set<org.apache.ambari.server.state.Stack> supportedStackNames = ambariMetaInfo.getStackNames();
+ Collection<StackInfo> supportedStacks = ambariMetaInfo.getStacks();
response = new HashSet<StackResponse>();
- for (org.apache.ambari.server.state.Stack stack: supportedStackNames) {
- response.add(stack.convertToResponse());
+ for (StackInfo stack: supportedStacks) {
+ response.add(new StackResponse(stack.getName()));
}
}
return response;
@@ -3145,9 +3146,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
try {
ambariMetaInfo.init();
+ } catch (AmbariException e) {
+ throw e;
} catch (Exception e) {
throw new AmbariException(
- "Ambari metainormation can't be read from the stack root directory");
+ "Ambari Meta Information can't be read from the stack root directory");
}
return null;
@@ -3312,13 +3315,17 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
String stackVersion = request.getStackVersion();
if (stackVersion != null) {
- StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackName, stackVersion);
+ StackInfo stackInfo = ambariMetaInfo.getStack(stackName, stackVersion);
response = Collections.singleton(stackInfo.convertToResponse());
} else {
- Set<StackInfo> stackInfos = ambariMetaInfo.getStackInfos(stackName);
- response = new HashSet<StackVersionResponse>();
- for (StackInfo stackInfo: stackInfos) {
- response.add(stackInfo.convertToResponse());
+ try {
+ Collection<StackInfo> stackInfos = ambariMetaInfo.getStacks(stackName);
+ response = new HashSet<StackVersionResponse>();
+ for (StackInfo stackInfo: stackInfos) {
+ response.add(stackInfo.convertToResponse());
+ }
+ } catch (StackAccessException e) {
+ response = Collections.emptySet();
}
}
@@ -3458,7 +3465,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
if (propertyName != null) {
properties = ambariMetaInfo.getPropertiesByName(stackName, stackVersion, serviceName, propertyName);
} else {
- properties = ambariMetaInfo.getProperties(stackName, stackVersion, serviceName);
+ properties = ambariMetaInfo.getServiceProperties(stackName, stackVersion, serviceName);
}
for (PropertyInfo property: properties) {
response.add(property.convertToResponse());
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
index a8e3451..2794c18 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/StackServiceResponse.java
@@ -25,7 +25,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
-import java.util.Set;
public class StackServiceResponse {
@@ -40,8 +39,6 @@ public class StackServiceResponse {
private List<String> customCommands;
private Map<String, Map<String, Map<String, String>>> configTypes;
- private Set<String> excludedConfigTypes;
-
private List<String> requiredServices;
/**
@@ -56,8 +53,7 @@ public class StackServiceResponse {
userName = null;
comments = service.getComment();
serviceVersion = service.getVersion();
- configTypes = service.getConfigTypes();
- excludedConfigTypes = service.getExcludedConfigTypes();
+ configTypes = service.getConfigTypeAttributes();
requiredServices = service.getRequiredServices();
serviceCheckSupported = null != service.getCommandScript();
@@ -132,10 +128,6 @@ public class StackServiceResponse {
public Map<String, Map<String, Map<String, String>>> getConfigTypes() {
return configTypes;
}
-
- public Set<String> getExcludedConfigTypes() {
- return excludedConfigTypes;
- }
public List<String> getRequiredServices() {
return requiredServices;
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
index 2c25623..2c9179d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractResourceProvider.java
@@ -300,11 +300,11 @@ public abstract class AbstractResourceProvider extends BaseProvider implements R
throws SystemException, NoSuchResourceException, NoSuchParentResourceException {
try {
return command.invoke();
- } catch (ObjectNotFoundException e) {
- throw new NoSuchResourceException("The requested resource doesn't exist: " + e.getMessage(), e);
} catch (ParentObjectNotFoundException e) {
throw new NoSuchParentResourceException(e.getMessage(), e);
- } catch (AmbariException e) {
+ } catch (ObjectNotFoundException e) {
+ throw new NoSuchResourceException("The requested resource doesn't exist: " + e.getMessage(), e);
+ } catch (AmbariException e) {
if (LOG.isErrorEnabled()) {
LOG.error("Caught AmbariException when getting a resource", e);
}
@@ -329,11 +329,11 @@ public abstract class AbstractResourceProvider extends BaseProvider implements R
throws SystemException, NoSuchResourceException, NoSuchParentResourceException {
try {
return command.invoke();
- } catch (ObjectNotFoundException e) {
- throw new NoSuchResourceException("The specified resource doesn't exist: " + e.getMessage(), e);
} catch (ParentObjectNotFoundException e) {
throw new NoSuchParentResourceException(e.getMessage(), e);
- } catch (AmbariException e) {
+ } catch (ObjectNotFoundException e) {
+ throw new NoSuchResourceException("The specified resource doesn't exist: " + e.getMessage(), e);
+ } catch (AmbariException e) {
if (LOG.isErrorEnabled()) {
LOG.error("Caught AmbariException when modifying a resource", e);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
index c49b677..4a1f596 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java
@@ -44,7 +44,6 @@ import org.apache.ambari.server.orm.entities.HostGroupConfigEntity;
import org.apache.ambari.server.orm.entities.HostGroupEntity;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.PropertyInfo;
import java.util.ArrayList;
import java.util.Arrays;
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
index 5aca65b..d719805 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClientConfigResourceProvider.java
@@ -140,7 +140,7 @@ public class ClientConfigResourceProvider extends AbstractControllerResourceProv
componentInfo = managementController.getAmbariMetaInfo().
getComponent(stackId.getStackName(), stackId.getStackVersion(), serviceName, componentName);
packageFolder = managementController.getAmbariMetaInfo().
- getServiceInfo(stackId.getStackName(), stackId.getStackVersion(), serviceName).getServicePackageFolder();
+ getService(stackId.getStackName(), stackId.getStackVersion(), serviceName).getServicePackageFolder();
String commandScript = componentInfo.getCommandScript().getScript();
List<ClientConfigFileDefinition> clientConfigFiles = componentInfo.getClientConfigFiles();
@@ -216,7 +216,7 @@ public class ClientConfigResourceProvider extends AbstractControllerResourceProv
ServiceInfo serviceInfo = null;
String osFamily = null;
clusterHostInfo = StageUtils.getClusterHostInfo(managementController.getClusters().getHostsForCluster(cluster.getClusterName()), cluster);
- serviceInfo = managementController.getAmbariMetaInfo().getServiceInfo(stackId.getStackName(),
+ serviceInfo = managementController.getAmbariMetaInfo().getService(stackId.getStackName(),
stackId.getStackVersion(), serviceName);
clusterHostInfo = substituteHostIndexes(clusterHostInfo);
osFamily = clusters.getHost(hostName).getOsFamily();
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index 96b4c8c..bb6a39c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -702,22 +702,13 @@ public class ClusterResourceProvider extends BaseBlueprintProcessor {
BlueprintServiceConfigRequest blueprintConfigRequest =
new BlueprintServiceConfigRequest(service);
- Set<String> excludedConfigTypes = stack.getExcludedConfigurationTypes(service);
- if (excludedConfigTypes == null) {
- excludedConfigTypes = Collections.emptySet();
- }
-
for (String serviceConfigType : stack.getConfigurationTypes(service)) {
- // skip config types that are considered excluded,
- // which means that they typically belong to another service
- if (!excludedConfigTypes.contains(serviceConfigType)) {
- // skip handling of cluster-env here
- if (!serviceConfigType.equals("cluster-env")) {
- if (mapClusterConfigurations.containsKey(serviceConfigType)) {
- blueprintConfigRequest.addConfigElement(serviceConfigType,
- mapClusterConfigurations.get(serviceConfigType),
- mapClusterAttributes.get(serviceConfigType));
- }
+ // skip handling of cluster-env here
+ if (!serviceConfigType.equals("cluster-env")) {
+ if (mapClusterConfigurations.containsKey(serviceConfigType)) {
+ blueprintConfigRequest.addConfigElement(serviceConfigType,
+ mapClusterConfigurations.get(serviceConfigType),
+ mapClusterAttributes.get(serviceConfigType));
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
index 9a2be41..20bd60d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
@@ -32,6 +32,7 @@ import org.apache.ambari.server.DuplicateResourceException;
import org.apache.ambari.server.ObjectNotFoundException;
import org.apache.ambari.server.ParentObjectNotFoundException;
import org.apache.ambari.server.ServiceNotFoundException;
+import org.apache.ambari.server.StackAccessException;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
@@ -427,8 +428,8 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
// Get the components for the given requests.
protected Set<ServiceComponentResponse> getComponents(
Set<ServiceComponentRequest> requests) throws AmbariException {
- Set<ServiceComponentResponse> response =
- new HashSet<ServiceComponentResponse>();
+
+ Set<ServiceComponentResponse> response = new HashSet<ServiceComponentResponse>();
for (ServiceComponentRequest request : requests) {
try {
response.addAll(getComponents(request));
@@ -438,12 +439,7 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
// there will be > 1 request in case of OR predicate
throw e;
}
- } catch (ParentObjectNotFoundException ee) {
- if (requests.size() == 1) {
- throw ee;
- }
}
-
}
return response;
}
@@ -503,13 +499,15 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
ServiceComponent sc = s.getServiceComponent(request.getComponentName());
ServiceComponentResponse serviceComponentResponse = sc.convertToResponse();
- ComponentInfo componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
- stackId.getStackVersion(), s.getName(), request.getComponentName());
- if (componentInfo != null) {
+ try {
+ ComponentInfo componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
+ stackId.getStackVersion(), s.getName(), request.getComponentName());
category = componentInfo.getCategory();
if (category != null) {
serviceComponentResponse.setCategory(category);
}
+ } catch (ObjectNotFoundException e) {
+ // nothing to do, component doesn't exist
}
response.add(serviceComponentResponse);
@@ -549,17 +547,18 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
continue;
}
- ComponentInfo componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
- stackId.getStackVersion(), s.getName(), sc.getName());
ServiceComponentResponse serviceComponentResponse = sc.convertToResponse();
-
- String requestedCategory = request.getComponentCategory();
- if (componentInfo != null) {
+ try {
+ ComponentInfo componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
+ stackId.getStackVersion(), s.getName(), sc.getName());
category = componentInfo.getCategory();
if (category != null) {
serviceComponentResponse.setCategory(category);
}
+ } catch (ObjectNotFoundException e) {
+ // component doesn't exist, nothing to do
}
+ String requestedCategory = request.getComponentCategory();
if (requestedCategory != null && !requestedCategory.isEmpty() &&
category != null && !requestedCategory.equalsIgnoreCase(category)) {
continue;
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index 31c6c37..eb6237d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -848,7 +848,7 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
+ ", newDesiredState=" + newState;
StackId sid = cluster.getDesiredStackVersion();
- if ( ambariMetaInfo.getComponentCategory(
+ if ( ambariMetaInfo.getComponent(
sid.getStackName(), sid.getStackVersion(), sc.getServiceName(),
sch.getServiceComponentName()).isMaster()) {
throw new AmbariException(error);
@@ -990,16 +990,16 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
boolean hasMM = false;
for (ServiceComponentHostResponse hostComponentResponse : hostComponentResponses ) {
- ComponentInfo componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
- stackId.getStackVersion(), hostComponentResponse.getServiceName(),
- hostComponentResponse.getComponentName());
+ try {
+ ComponentInfo componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
+ stackId.getStackVersion(), hostComponentResponse.getServiceName(),
+ hostComponentResponse.getComponentName());
- if (componentInfo != null) {
State state = getHostComponentState(hostComponentResponse);
// Components in MM should not affect service status,
// so we tend to ignore them
boolean isInMaintenance = ! MaintenanceState.OFF.toString().
- equals(hostComponentResponse.getMaintenanceState());
+ equals(hostComponentResponse.getMaintenanceState());
if (state.equals(State.DISABLED)) {
hasDisabled = true;
@@ -1040,6 +1040,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
otherState = state;
}
}
+ } catch (ObjectNotFoundException e) {
+ // component doesn't exist, nothing to do
}
}
@@ -1086,13 +1088,12 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
State nonStartedState = null;
for (ServiceComponentHostResponse hostComponentResponse : hostComponentResponses ) {
- ComponentInfo componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
- stackId.getStackVersion(), hostComponentResponse.getServiceName(),
- hostComponentResponse.getComponentName());
+ try {
+ ComponentInfo componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
+ stackId.getStackVersion(), hostComponentResponse.getServiceName(),
+ hostComponentResponse.getComponentName());
- if (componentInfo != null) {
if (componentInfo.isMaster()) {
-
String componentName = hostComponentResponse.getComponentName();
boolean isNameNode = false;
@@ -1118,6 +1119,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
nonStartedState = state;
}
}
+ } catch (ObjectNotFoundException e) {
+ // component doesn't exist, nothing to do
}
}
@@ -1162,13 +1165,12 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
State nonStartedState = null;
for (ServiceComponentHostResponse hostComponentResponse : hostComponentResponses ) {
- ComponentInfo componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
- stackId.getStackVersion(), hostComponentResponse.getServiceName(),
- hostComponentResponse.getComponentName());
+ try {
+ ComponentInfo componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
+ stackId.getStackVersion(), hostComponentResponse.getServiceName(),
+ hostComponentResponse.getComponentName());
- if (componentInfo != null) {
if (componentInfo.isMaster()) {
-
State state = getHostComponentState(hostComponentResponse);
switch (state) {
@@ -1183,6 +1185,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
nonStartedState = state;
}
}
+ } catch (ObjectNotFoundException e) {
+ // component doesn't exist, nothing to do
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
index d6cfbd0..cd22340 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java
@@ -100,13 +100,6 @@ class Stack {
new HashMap<String, Map<String, Map<String, ConfigProperty>>>();
/**
- * Map of service to set of excluded config types
- */
- private Map<String, Set<String>> excludedConfigurationTypes =
- new HashMap<String, Set<String>>();
-
-
- /**
* Ambari Management Controller, used to obtain Stack definitions
*/
private final AmbariManagementController ambariManagementController;
@@ -161,7 +154,6 @@ class Stack {
for (StackServiceResponse stackService : stackServices) {
String serviceName = stackService.getServiceName();
parseComponents(serviceName);
- parseExcludedConfigurations(stackService);
parseConfigurations(serviceName);
registerConditionalDependencies();
}
@@ -222,18 +214,6 @@ class Stack {
}
/**
- * Get the set of excluded configuration types
- * for this service
- *
- * @param service service name
- *
- * @return Set of names of excluded config types
- */
- public Set<String> getExcludedConfigurationTypes(String service) {
- return excludedConfigurationTypes.get(service);
- }
-
- /**
* Get config properties for the specified service and configuration type.
*
* @param service service name
@@ -412,15 +392,6 @@ class Stack {
}
/**
- * Obtain the excluded configuration types from the StackServiceResponse
- *
- * @param stackServiceResponse the response object associated with this stack service
- */
- private void parseExcludedConfigurations(StackServiceResponse stackServiceResponse) {
- excludedConfigurationTypes.put(stackServiceResponse.getServiceName(), stackServiceResponse.getExcludedConfigTypes());
- }
-
- /**
* Parse configurations for the specified service from the stack definition.
*
* @param service service name
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
index 3427f9a..32668fa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java
@@ -141,7 +141,7 @@ public class RoleCommandOrder {
StackInfo stackInfo;
String rcoFileLocation = null;
try {
- stackInfo = ambariMetaInfo.getStackInfo(stackName, stackVersion);
+ stackInfo = ambariMetaInfo.getStack(stackName, stackVersion);
rcoFileLocation = stackInfo.getRcoFileLocation();
} catch (AmbariException e) {
LOG.warn("Error getting stack info for :" + stackName + "-" + stackVersion);
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
index 20ac03f..516dd2f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintEntity.java
@@ -22,6 +22,7 @@ import com.google.gson.Gson;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.ServiceInfo;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
@@ -165,8 +166,8 @@ public class BlueprintEntity {
* host group. An operational configuration is achieved by overlaying host group configuration
* on top of cluster configuration which overlays the default stack configurations.
*
- * @param stackInfo stack information
- * @param type type of required property to check (PASSWORD|DEFAULT)
+ * @param stackInfo stack information
+ * @param validatePasswords whether password properties should be validated
* @return map of required properties which are missing. Empty map if none are missing.
*
* @throws IllegalArgumentException if blueprint contains invalid information
@@ -191,17 +192,17 @@ public class BlueprintEntity {
for (HostGroupComponentEntity component : hostGroup.getComponents()) {
//for now, AMBARI is not recognized as a service in Stacks
if (! component.getName().equals("AMBARI_SERVER")) {
- String service;
+ ServiceInfo service;
+ String serviceName;
try {
- service = stackInfo.getComponentToService(stackName, stackVersion, component.getName());
+ serviceName = stackInfo.getComponentToService(stackName, stackVersion, component.getName());
+ service = stackInfo.getService(stackName, stackVersion, serviceName);
} catch (AmbariException e) {
throw new IllegalArgumentException("Unable to determine the service associated with the" +
" component: " + component.getName());
}
- if (processedServices.add(service)) {
- Map<String, PropertyInfo> serviceRequirements = stackInfo.getRequiredProperties(
- stackName, stackVersion, service);
-
+ if (processedServices.add(serviceName)) {
+ Map<String, PropertyInfo> serviceRequirements = service.getRequiredProperties();
for (PropertyInfo propertyInfo : serviceRequirements.values()) {
if (! (validatePasswords ^ propertyInfo.getPropertyTypes().contains(PropertyInfo.PropertyType.PASSWORD))) {
String configCategory = propertyInfo.getFilename();
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/BaseModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/BaseModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/BaseModule.java
new file mode 100644
index 0000000..b27adea
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/BaseModule.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.AmbariException;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Abstract base service definition module.
+ * Provides functionality that is common across multiple modules.
+ */
+public abstract class BaseModule<T, I> implements StackDefinitionModule<T, I> {
+ /**
+ * Merges child modules with the corresponding parent modules.
+ *
+ * @param allStacks collection of all stack module in stack definition
+ * @param modules child modules of this module that are to be merged
+ * @param parentModules parent modules which the modules are to be merged with
+ *
+ * @return collection of the merged modules
+ */
+ protected <T extends StackDefinitionModule<T, ?>> Collection<T> mergeChildModules(
+ Map<String, StackModule> allStacks, Map<String, T> modules, Map<String, T> parentModules)
+ throws AmbariException {
+
+ Set<String> addedModules = new HashSet<String>();
+ Collection<T> mergedModules = new HashSet<T>();
+
+ for (T module : modules.values()) {
+ String id = module.getId();
+ addedModules.add(id);
+ if (! module.isDeleted()) {
+ if (parentModules.containsKey(id)) {
+ module.resolve(parentModules.get(id), allStacks);
+ }
+ mergedModules.add(module);
+ }
+ }
+
+ // add non-overlapping parent modules
+ for (T parentModule : parentModules.values()) {
+ String id = parentModule.getId();
+ if (! addedModules.contains(id)) {
+ mergedModules.add(parentModule);
+ }
+ }
+ return mergedModules;
+ }
+
+ /**
+ * Finalize a modules child components.
+ * Any child module marked as deleted will be removed from this module after finalizing
+ * the child.
+ *
+ * @param modules child modules to finalize
+ */
+ protected void finalizeChildModules(Collection<? extends StackDefinitionModule> modules) {
+ Iterator<? extends StackDefinitionModule> iter = modules.iterator();
+ while (iter.hasNext()) {
+ StackDefinitionModule module = iter.next();
+ module.finalizeModule();
+ if (module.isDeleted()) {
+ iter.remove();
+ }
+ }
+ }
+
+ @Override
+ public void finalizeModule() {
+ // do nothing by default
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java
new file mode 100644
index 0000000..0f2a691
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.stack;
+
+import org.apache.ambari.server.state.ComponentInfo;
+import org.apache.ambari.server.state.CustomCommandDefinition;
+import org.apache.ambari.server.state.DependencyInfo;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Component module which provides all functionality related to parsing and fully
+ * resolving service components from the stack definition.
+ */
+public class ComponentModule extends BaseModule<ComponentModule, ComponentInfo> {
+ /**
+ * Corresponding component info
+ */
+ private ComponentInfo componentInfo;
+
+ /**
+ * Constructor.
+ *
+ * @param componentInfo associated component info
+ */
+ public ComponentModule(ComponentInfo componentInfo) {
+ this.componentInfo = componentInfo;
+ }
+
+ @Override
+ public void resolve(ComponentModule parent, Map<String, StackModule> allStacks) {
+ ComponentInfo parentInfo = parent.getModuleInfo();
+
+ if (componentInfo.getCommandScript() == null) {
+ componentInfo.setCommandScript(parentInfo.getCommandScript());
+ }
+ if (componentInfo.getDisplayName() == null) {
+ componentInfo.setDisplayName(parentInfo.getDisplayName());
+ }
+ if (componentInfo.getConfigDependencies() == null) {
+ componentInfo.setConfigDependencies(parentInfo.getConfigDependencies());
+ }
+ if (componentInfo.getClientConfigFiles() == null) {
+ componentInfo.setClientConfigFiles(parentInfo.getClientConfigFiles());
+ }
+ if (componentInfo.getClientsToUpdateConfigs() == null) {
+ componentInfo.setClientsToUpdateConfigs(parentInfo.getClientsToUpdateConfigs());
+ }
+ if (componentInfo.getCategory() == null) {
+ componentInfo.setCategory(parentInfo.getCategory());
+ }
+ if (componentInfo.getCardinality() == null) {
+ componentInfo.setCardinality(parentInfo.getCardinality());
+ }
+ if (componentInfo.getAutoDeploy() == null) {
+ componentInfo.setAutoDeploy(parentInfo.getAutoDeploy());
+ }
+
+ mergeComponentDependencies(parentInfo.getDependencies(),
+ componentInfo.getDependencies());
+
+ mergeCustomCommands(parentInfo.getCustomCommands(),
+ componentInfo.getCustomCommands());
+ }
+
+ @Override
+ public ComponentInfo getModuleInfo() {
+ return componentInfo;
+ }
+
+ @Override
+ public boolean isDeleted() {
+ return componentInfo.isDeleted();
+ }
+
+ @Override
+ public String getId() {
+ return componentInfo.getName();
+ }
+
+ /**
+ * Merge component dependencies.
+ * Child dependencies override a parent dependency of the same name.
+ *
+ * @param parentDependencies parent dependencies
+ * @param childDependencies child dependencies
+ */
+ //todo: currently there is no way to remove an inherited dependency
+ private void mergeComponentDependencies(List<DependencyInfo> parentDependencies,
+ List<DependencyInfo> childDependencies) {
+
+ Collection<String> existingNames = new HashSet<String>();
+
+ for (DependencyInfo childDependency : childDependencies) {
+ existingNames.add(childDependency.getName());
+ }
+ if (parentDependencies != null) {
+ for (DependencyInfo parentDependency : parentDependencies) {
+ if (! existingNames.contains(parentDependency.getName())) {
+ childDependencies.add(parentDependency);
+ }
+ }
+ }
+ }
+
+ /**
+ * Merge custom commands.
+ * Child commands override a parent command of the same name.
+ *
+ * @param parentCommands parent commands
+ * @param childCommands child commands
+ */
+ //todo: duplicated in ServiceModule
+ //todo: currently there is no way to remove an inherited custom command
+ private void mergeCustomCommands(List<CustomCommandDefinition> parentCommands,
+ List<CustomCommandDefinition> childCommands) {
+
+ Collection<String> existingNames = new HashSet<String>();
+
+ for (CustomCommandDefinition childCmd : childCommands) {
+ existingNames.add(childCmd.getName());
+ }
+ if (parentCommands != null) {
+ for (CustomCommandDefinition parentCmd : parentCommands) {
+ if (! existingNames.contains(parentCmd.getName())) {
+ childCommands.add(parentCmd);
+ }
+ }
+ }
+ }
+}
[02/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metrics.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metrics.json b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metrics.json
new file mode 100644
index 0000000..2938552
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks_with_cycle/OTHER/1.0/services/HDFS/metrics.json
@@ -0,0 +1,7840 @@
+{
+ "NAMENODE": {
+ "Component": [
+ {
+ "type": "ganglia",
+ "metrics": {
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "dfs.FSNamesystem.TotalLoad",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "dfs.FSNamesystem.BlockCapacity",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotal": {
+ "metric": "dfs.FSNamesystem.CapacityTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsed",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "dfs.FSNamesystem.CapacityRemaining",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsedNonDFS",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "dfs.namenode.GetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesAppended": {
+ "metric": "dfs.namenode.FilesAppended",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginSuccessAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/load/load_one": {
+ "metric": "load_one",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "dfs.FSNamesystem.CapacityTotalGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_run": {
+ "metric": "proc_run",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "dfs.FSNamesystem.CapacityUsedGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "dfs.namenode.AddBlockOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/swap_total": {
+ "metric": "swap_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesDeleted": {
+ "metric": "dfs.namenode.FilesDeleted",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_avg_time": {
+ "metric": "dfs.namenode.SyncsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsBlocked": {
+ "metric": "jvm.JvmMetrics.ThreadsBlocked",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_num_ops": {
+ "metric": "rpc.rpc.RpcQueueTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_total": {
+ "metric": "proc_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_avg_time": {
+ "metric": "dfs.namenode.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/part_max_used": {
+ "metric": "part_max_used",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginSuccessNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_idle": {
+ "metric": "cpu_idle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_aidle": {
+ "metric": "cpu_aidle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_free": {
+ "metric": "mem_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesCreated": {
+ "metric": "dfs.namenode.FilesCreated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenameAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/bytes_in": {
+ "metric": "bytes_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/pkts_out": {
+ "metric": "pkts_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_cached": {
+ "metric": "mem_cached",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/disk/disk_total": {
+ "metric": "disk_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesRenamed": {
+ "metric": "dfs.namenode.FilesRenamed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/JournalTransactionsBatchedInSync": {
+ "metric": "dfs.namenode.JournalTransactionsBatchedInSync",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginFailureNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetBlockLocations": {
+ "metric": "dfs.namenode.GetBlockLocations",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_wio": {
+ "metric": "cpu_wio",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CreateAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.PendingReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_speed": {
+ "metric": "cpu_speed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FileInfoOps": {
+ "metric": "dfs.namenode.FileInfoOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/DeleteFileOps": {
+ "metric": "dfs.namenode.DeleteFileOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_avg_time": {
+ "metric": "rpc.rpc.RpcProcessingTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthenticationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingDeletionBlocks": {
+ "metric": "dfs.FSNamesystem.PendingDeletionBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationFailures": {
+ "metric": "rpc.rpc.RpcAuthenticationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/pkts_in": {
+ "metric": "pkts_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_total": {
+ "metric": "mem_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesInGetListingOps": {
+ "metric": "dfs.namenode.FilesInGetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsRunnable": {
+ "metric": "jvm.JvmMetrics.ThreadsRunnable",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsNew": {
+ "metric": "jvm.JvmMetrics.ThreadsNew",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationFailures": {
+ "metric": "rpc.rpc.RpcAuthorizationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_num_ops": {
+ "metric": "dfs.namenode.SyncsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_avg_time": {
+ "metric": "rpc.rpc.RpcQueueTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/SentBytes": {
+ "metric": "rpc.rpc.SentBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/FilesTotal": {
+ "metric": "dfs.FSNamesystem.FilesTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logWarn": {
+ "metric": "jvm.JvmMetrics.LogWarn",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ExcessBlocks": {
+ "metric": "dfs.FSNamesystem.ExcessBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTimedWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsTimedWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/gcCount": {
+ "metric": "jvm.JvmMetrics.GcCount",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/ReceivedBytes": {
+ "metric": "rpc.rpc.ReceivedBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_nice": {
+ "metric": "cpu_nice",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_num_ops": {
+ "metric": "dfs.namenode.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/SafemodeTime": {
+ "metric": "dfs.namenode.SafemodeTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/NumOpenConnections": {
+ "metric": "rpc.rpc.NumOpenConnections",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ScheduledReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.ScheduledReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/disk_free": {
+ "metric": "disk_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlocksTotal": {
+ "metric": "dfs.FSNamesystem.BlocksTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_buffers": {
+ "metric": "mem_buffers",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/gcTimeMillis": {
+ "metric": "jvm.JvmMetrics.GcTimeMillis",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_num_ops": {
+ "metric": "dfs.namenode.TransactionsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CreateNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTerminated": {
+ "metric": "jvm.JvmMetrics.ThreadsTerminated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/bytes_out": {
+ "metric": "bytes_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_user": {
+ "metric": "cpu_user",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/swap_free": {
+ "metric": "swap_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_five": {
+ "metric": "load_five",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_system": {
+ "metric": "cpu_system",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemainingGB": {
+ "metric": "dfs.FSNamesystem.CapacityRemainingGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_avg_time": {
+ "metric": "dfs.namenode.TransactionsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/boottime": {
+ "metric": "boottime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/MissingBlocks": {
+ "metric": "dfs.FSNamesystem.MissingBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/callQueueLen": {
+ "metric": "rpc.rpc.CallQueueLength",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CorruptBlocks": {
+ "metric": "dfs.FSNamesystem.CorruptBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenameNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_fifteen": {
+ "metric": "load_fifteen",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/logInfo": {
+ "metric": "jvm.JvmMetrics.LogInfo",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/fsImageLoadTime": {
+ "metric": "dfs.namenode.FsImageLoadTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_num_ops": {
+ "metric": "rpc.rpc.RpcProcessingTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_shared": {
+ "metric": "mem_shared",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/UnderReplicatedBlocks": {
+ "metric": "dfs.FSNamesystem.UnderReplicatedBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/CreateFileOps": {
+ "metric": "dfs.namenode.CreateFileOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logError": {
+ "metric": "jvm.JvmMetrics.LogError",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginFailureAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_num": {
+ "metric": "cpu_num",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthorizationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logFatal": {
+ "metric": "jvm.JvmMetrics.LogFatal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ }
+ }
+ },
+ {
+ "type": "jmx",
+ "metrics": {
+ "metrics/dfs/namenode/Used": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.TotalLoad",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memMaxM":{
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemMaxM",
+ "pointInTime" : true,
+ "temporal" : false
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.BlockCapacity",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/TotalFiles": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalFiles",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/HostName": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.HostName",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.GetListingOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/UpgradeFinalized": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.UpgradeFinalized",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getProtocolVersion_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.fsync_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginSuccess_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/Safemode": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Safemode",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CorruptBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CorruptBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/LiveNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.LiveNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.renewLease_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getFileInfo_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/PercentRemaining": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.complete_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityTotalGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getBlockLocations_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.AddBlockOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityUsedGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Syncs_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Syncs_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsBlocked": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsBlocked",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcQueueTime_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcQueueTime_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/PercentUsed": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentUsed",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/DecomNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DecomNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/blockReport_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.blockReport_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/NonDfsUsedSpace": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.NonDfsUsedSpace",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/UpgradeFinalized": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.UpgradeFinalized",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getFileInfo_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getFileInfo_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getEditLogSize_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getEditLogSize_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginSuccess_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginSuccess_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReceived_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReceived_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Safemode": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Safemode",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/FilesCreated": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.FilesCreated",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/addBlock_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.addBlock_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/DecomNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DecomNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsed": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityUsed",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/NonHeapMemoryUsed": {
+ "metric": "java.lang:type=Memory.NonHeapMemoryUsage[used]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memNonHeapCommittedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemNonHeapCommittedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/DeadNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DeadNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/PercentUsed": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentUsed",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Free": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Free",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Total": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/GetBlockLocations": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.GetBlockLocations",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginFailure_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginFailure_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/fsync_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.fsync_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/HeapMemoryMax": {
+ "metric": "java.lang:type=Memory.HeapMemoryUsage[max]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/create_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.create_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/PendingReplicationBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.PendingReplicationBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/UnderReplicatedBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.UnderReplicatedBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/FileInfoOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.FileInfoOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/MissingBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.MissingBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/sendHeartbeat_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.sendHeartbeat_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcProcessingTime_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcProcessingTime_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReport_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReport_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CapacityRemaining": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemState.CapacityRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthenticationSuccesses": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthenticationSuccesses",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/PendingDeletionBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.PendingDeletionBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthenticationFailures": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthenticationFailures",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getEditLogSize_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getEditLogSize_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memHeapCommittedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemHeapCommittedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/FilesInGetListingOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.FilesInGetListingOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsRunnable": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsRunnable",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/BlocksTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.BlocksTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/complete_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.complete_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/LiveNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.LiveNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsNew": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsNew",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollFsImage_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollFsImage_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthorizationFailures": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthorizationFailures",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Syncs_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Syncs_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/StartTime": {
+ "metric": "java.lang:type=Runtime.StartTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcQueueTime_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcQueueTime_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReceived_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReceived_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollEditLog_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollEditLog_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/DeadNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DeadNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/SentBytes": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.SentBytes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/HeapMemoryUsed": {
+ "metric": "java.lang:type=Memory.HeapMemoryUsage[used]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/FilesTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.FilesTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Version": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Version",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logWarn": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogWarn",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/ExcessBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.ExcessBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsTimedWaiting": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsTimedWaiting",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/gcCount": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.GcCount",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/PercentRemaining": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/ReceivedBytes": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.ReceivedBytes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/blockReport_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.blockReport_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/NonHeapMemoryMax": {
+ "metric": "java.lang:type=Memory.NonHeapMemoryUsage[max]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollFsImage_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollFsImage_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/NumOpenConnections": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.NumOpenConnections",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memHeapUsedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemHeapUsedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/ScheduledReplicationBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.ScheduledReplicationBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsWaiting": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsWaiting",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/BlocksTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.BlocksTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/gcTimeMillis": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.GcTimeMillis",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getBlockLocations_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getBlockLocations_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Transactions_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Transactions_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/create_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.create_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CapacityTotal": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsTerminated": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsTerminated",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemainingGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityRemainingGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Transactions_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Transactions_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/MissingBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.MissingBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Threads": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Threads",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/callQueueLen": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.callQueueLen",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CorruptBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CorruptBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReport_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReport_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/TotalFiles": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalFiles",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logInfo": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogInfo",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/NameDirStatuses": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.NameDirStatuses",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getListing_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getListing_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollEditLog_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollEditLog_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/addBlock_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.addBlock_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcProcessingTime_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcProcessingTime_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CapacityUsed": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/UnderReplicatedBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.UnderReplicatedBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/sendHeartbeat_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.sendHeartbeat_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/CreateFileOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.CreateFileOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logError": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogError",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginFailure_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginFailure_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getProtocolVersion_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getProtocolVersion_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthorizationSuccesses": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthorizationSuccesses",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/Version": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Version",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getListing_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getListing_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logFatal": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogFatal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/NonDfsUsedSpace": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.NonDfsUsedSpace",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/renewLease_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.renewLease_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/TotalBlocks": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityNonDFSUsed",
+ "pointInTime": true,
+ "temporal": false
+ }
+ }
+ }
+ ],
+ "HostComponent": [
+ {
+ "type": "ganglia",
+ "metrics": {
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "dfs.FSNamesystem.TotalLoad",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "dfs.FSNamesystem.BlockCapacity",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotal": {
+ "metric": "dfs.FSNamesystem.CapacityTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsed",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "dfs.FSNamesystem.CapacityRemaining",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsedNonDFS",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "dfs.namenode.GetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesAppended": {
+ "metric": "dfs.namenode.FilesAppended",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginSuccessAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/load/load_one": {
+ "metric": "load_one",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "dfs.FSNamesystem.CapacityTotalGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_run": {
+ "metric": "proc_run",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "dfs.FSNamesystem.CapacityUsedGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "dfs.namenode.AddBlockOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/swap_total": {
+ "metric": "swap_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesDeleted": {
+ "metric": "dfs.namenode.FilesDeleted",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_avg_time": {
+ "metric": "dfs.namenode.SyncsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsBlocked": {
+ "metric": "jvm.JvmMetrics.ThreadsBlocked",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_num_ops": {
+ "metric": "rpc.rpc.RpcQueueTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_total": {
+ "metric": "proc_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_avg_time": {
+ "metric": "dfs.namenode.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/part_max_used": {
+ "metric": "part_max_used",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginSuccessNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_idle": {
+ "metric": "cpu_idle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_aidle": {
+ "metric": "cpu_aidle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_free": {
+ "metric": "mem_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesCreated": {
+ "metric": "dfs.namenode.FilesCreated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenameAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/bytes_in": {
+ "metric": "bytes_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/pkts_out": {
+ "metric": "pkts_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_cached": {
+ "metric": "mem_cached",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/disk/disk_total": {
+ "metric": "disk_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesRenamed": {
+ "metric": "dfs.namenode.FilesRenamed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/JournalTransactionsBatchedInSync": {
+ "metric": "dfs.namenode.JournalTransactionsBatchedInSync",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginFailureNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetBlockLocations": {
+ "metric": "dfs.namenode.GetBlockLocations",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_wio": {
+ "metric": "cpu_wio",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CreateAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.PendingReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_speed": {
+ "metric": "cpu_speed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FileInfoOps": {
+ "metric": "dfs.namenode.FileInfoOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/DeleteFileOps": {
+ "metric": "dfs.namenode.DeleteFileOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_avg_time": {
+ "metric": "rpc.rpc.RpcProcessingTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthenticationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingDeletionBlocks": {
+ "metric": "dfs.FSNamesystem.PendingDeletionBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationFailures": {
+ "metric": "rpc.rpc.RpcAuthenticationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/pkts_in": {
+ "metric": "pkts_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_total": {
+ "metric": "mem_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesInGetListingOps": {
+ "metric": "dfs.namenode.FilesInGetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsRunnable": {
+ "metric": "jvm.JvmMetrics.ThreadsRunnable",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsNew": {
+ "metric": "jvm.JvmMetrics.ThreadsNew",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationFailures": {
+ "metric": "rpc.rpc.RpcAuthorizationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_num_ops": {
+ "metric": "dfs.namenode.SyncsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_avg_time": {
+ "metric": "rpc.rpc.RpcQueueTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/SentBytes": {
+ "metric": "rpc.rpc.SentBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/FilesTotal": {
+ "metric": "dfs.FSNamesystem.FilesTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logWarn": {
+ "metric": "jvm.JvmMetrics.LogWarn",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ExcessBlocks": {
+ "metric": "dfs.FSNamesystem.ExcessBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTimedWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsTimedWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/gcCount": {
+ "metric": "jvm.JvmMetrics.GcCount",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/ReceivedBytes": {
+ "metric": "rpc.rpc.ReceivedBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_nice": {
+ "metric": "cpu_nice",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_num_ops": {
+ "metric": "dfs.namenode.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/SafemodeTime": {
+ "metric": "dfs.namenode.SafemodeTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/NumOpenConnections": {
+ "metric": "rpc.rpc.NumOpenConnections",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ScheduledReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.ScheduledReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/disk_free": {
+ "metric": "disk_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlocksTotal": {
+ "metric": "dfs.FSNamesystem.BlocksTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_buffers": {
+ "metric": "mem_buffers",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/gcTimeMillis": {
+ "metric": "jvm.JvmMetrics.GcTimeMillis",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_num_ops": {
+ "metric": "dfs.namenode.TransactionsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CreateNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTerminated": {
+ "metric": "jvm.JvmMetrics.ThreadsTerminated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/bytes_out": {
+ "metric": "bytes_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_user": {
+ "metric": "cpu_user",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/swap_free": {
+ "metric": "swap_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_five": {
+ "metric": "load_five",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_system": {
+ "metric": "cpu_system",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemainingGB": {
+ "metric": "dfs.FSNamesystem.CapacityRemainingGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_avg_time": {
+ "metric": "dfs.namenode.TransactionsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/boottime": {
+ "metric": "boottime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/MissingBlocks": {
+ "metric": "dfs.FSNamesystem.MissingBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/callQueueLen": {
+ "metric": "rpc.rpc.CallQueueLength",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CorruptBlocks": {
+ "metric": "dfs.FSNamesystem.CorruptBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenameNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_fifteen": {
+ "metric": "load_fifteen",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/logInfo": {
+ "metric": "jvm.JvmMetrics.LogInfo",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/fsImageLoadTime": {
+ "metric": "dfs.namenode.FsImageLoadTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_num_ops": {
+ "metric": "rpc.rpc.RpcProcessingTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_shared": {
+ "metric": "mem_shared",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/UnderReplicatedBlocks": {
+ "metric": "dfs.FSNamesystem.UnderReplicatedBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/CreateFileOps": {
+ "metric": "dfs.namenode.CreateFileOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logError": {
+ "metric": "jvm.JvmMetrics.LogError",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginFailureAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_num": {
+ "metric": "cpu_num",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthorizationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logFatal": {
+ "metric": "jvm.JvmMetrics.LogFatal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ }
+ }
+ },
+ {
+ "type": "jmx",
+ "metrics": {
+ "metrics/dfs/namenode/Used": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.TotalLoad",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memMaxM":{
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemMaxM",
+ "pointInTime" : true,
+ "temporal" : false
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.BlockCapacity",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/TotalFiles": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalFiles",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeActivity.GetListingOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/HostName": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeActivity.tag.Hostname",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/runtime/StartTime": {
+ "metric": "java.lang:type=Runtime.StartTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/UpgradeFinalized": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.UpgradeFinalized",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.VersionRequestNumOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.FsyncAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=UgiMetrics.LoginSuccessAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.renewLease_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.CapacityRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.GetFileInfoAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/PercentRemaining": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.CompleteAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.CapacityTotalGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.GetBlockLocationsNumOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.CapacityUsedGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNo
<TRUNCATED>
[04/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metrics.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metrics.json b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metrics.json
new file mode 100644
index 0000000..2938552
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/services/HDFS/metrics.json
@@ -0,0 +1,7840 @@
+{
+ "NAMENODE": {
+ "Component": [
+ {
+ "type": "ganglia",
+ "metrics": {
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "dfs.FSNamesystem.TotalLoad",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "dfs.FSNamesystem.BlockCapacity",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotal": {
+ "metric": "dfs.FSNamesystem.CapacityTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsed",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "dfs.FSNamesystem.CapacityRemaining",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsedNonDFS",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "dfs.namenode.GetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesAppended": {
+ "metric": "dfs.namenode.FilesAppended",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginSuccessAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/load/load_one": {
+ "metric": "load_one",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "dfs.FSNamesystem.CapacityTotalGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_run": {
+ "metric": "proc_run",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "dfs.FSNamesystem.CapacityUsedGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "dfs.namenode.AddBlockOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/swap_total": {
+ "metric": "swap_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesDeleted": {
+ "metric": "dfs.namenode.FilesDeleted",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_avg_time": {
+ "metric": "dfs.namenode.SyncsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsBlocked": {
+ "metric": "jvm.JvmMetrics.ThreadsBlocked",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_num_ops": {
+ "metric": "rpc.rpc.RpcQueueTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_total": {
+ "metric": "proc_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_avg_time": {
+ "metric": "dfs.namenode.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/part_max_used": {
+ "metric": "part_max_used",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginSuccessNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_idle": {
+ "metric": "cpu_idle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_aidle": {
+ "metric": "cpu_aidle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_free": {
+ "metric": "mem_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesCreated": {
+ "metric": "dfs.namenode.FilesCreated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenameAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/bytes_in": {
+ "metric": "bytes_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/pkts_out": {
+ "metric": "pkts_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_cached": {
+ "metric": "mem_cached",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/disk/disk_total": {
+ "metric": "disk_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesRenamed": {
+ "metric": "dfs.namenode.FilesRenamed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/JournalTransactionsBatchedInSync": {
+ "metric": "dfs.namenode.JournalTransactionsBatchedInSync",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginFailureNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetBlockLocations": {
+ "metric": "dfs.namenode.GetBlockLocations",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_wio": {
+ "metric": "cpu_wio",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CreateAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.PendingReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_speed": {
+ "metric": "cpu_speed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FileInfoOps": {
+ "metric": "dfs.namenode.FileInfoOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/DeleteFileOps": {
+ "metric": "dfs.namenode.DeleteFileOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_avg_time": {
+ "metric": "rpc.rpc.RpcProcessingTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthenticationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingDeletionBlocks": {
+ "metric": "dfs.FSNamesystem.PendingDeletionBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationFailures": {
+ "metric": "rpc.rpc.RpcAuthenticationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/pkts_in": {
+ "metric": "pkts_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_total": {
+ "metric": "mem_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesInGetListingOps": {
+ "metric": "dfs.namenode.FilesInGetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsRunnable": {
+ "metric": "jvm.JvmMetrics.ThreadsRunnable",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsNew": {
+ "metric": "jvm.JvmMetrics.ThreadsNew",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationFailures": {
+ "metric": "rpc.rpc.RpcAuthorizationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_num_ops": {
+ "metric": "dfs.namenode.SyncsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_avg_time": {
+ "metric": "rpc.rpc.RpcQueueTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/SentBytes": {
+ "metric": "rpc.rpc.SentBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/FilesTotal": {
+ "metric": "dfs.FSNamesystem.FilesTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logWarn": {
+ "metric": "jvm.JvmMetrics.LogWarn",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ExcessBlocks": {
+ "metric": "dfs.FSNamesystem.ExcessBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTimedWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsTimedWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/gcCount": {
+ "metric": "jvm.JvmMetrics.GcCount",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/ReceivedBytes": {
+ "metric": "rpc.rpc.ReceivedBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_nice": {
+ "metric": "cpu_nice",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_num_ops": {
+ "metric": "dfs.namenode.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/SafemodeTime": {
+ "metric": "dfs.namenode.SafemodeTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/NumOpenConnections": {
+ "metric": "rpc.rpc.NumOpenConnections",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ScheduledReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.ScheduledReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/disk_free": {
+ "metric": "disk_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlocksTotal": {
+ "metric": "dfs.FSNamesystem.BlocksTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_buffers": {
+ "metric": "mem_buffers",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/gcTimeMillis": {
+ "metric": "jvm.JvmMetrics.GcTimeMillis",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_num_ops": {
+ "metric": "dfs.namenode.TransactionsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CreateNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTerminated": {
+ "metric": "jvm.JvmMetrics.ThreadsTerminated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/bytes_out": {
+ "metric": "bytes_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_user": {
+ "metric": "cpu_user",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/swap_free": {
+ "metric": "swap_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_five": {
+ "metric": "load_five",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_system": {
+ "metric": "cpu_system",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemainingGB": {
+ "metric": "dfs.FSNamesystem.CapacityRemainingGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_avg_time": {
+ "metric": "dfs.namenode.TransactionsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/boottime": {
+ "metric": "boottime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/MissingBlocks": {
+ "metric": "dfs.FSNamesystem.MissingBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/callQueueLen": {
+ "metric": "rpc.rpc.CallQueueLength",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CorruptBlocks": {
+ "metric": "dfs.FSNamesystem.CorruptBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenameNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_fifteen": {
+ "metric": "load_fifteen",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/logInfo": {
+ "metric": "jvm.JvmMetrics.LogInfo",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/fsImageLoadTime": {
+ "metric": "dfs.namenode.FsImageLoadTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_num_ops": {
+ "metric": "rpc.rpc.RpcProcessingTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_shared": {
+ "metric": "mem_shared",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/UnderReplicatedBlocks": {
+ "metric": "dfs.FSNamesystem.UnderReplicatedBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/CreateFileOps": {
+ "metric": "dfs.namenode.CreateFileOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logError": {
+ "metric": "jvm.JvmMetrics.LogError",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginFailureAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_num": {
+ "metric": "cpu_num",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthorizationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logFatal": {
+ "metric": "jvm.JvmMetrics.LogFatal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ }
+ }
+ },
+ {
+ "type": "jmx",
+ "metrics": {
+ "metrics/dfs/namenode/Used": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.TotalLoad",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memMaxM":{
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemMaxM",
+ "pointInTime" : true,
+ "temporal" : false
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.BlockCapacity",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/TotalFiles": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalFiles",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/HostName": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.HostName",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.GetListingOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/UpgradeFinalized": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.UpgradeFinalized",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getProtocolVersion_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.fsync_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginSuccess_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/Safemode": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Safemode",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CorruptBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CorruptBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/LiveNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.LiveNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.renewLease_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getFileInfo_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/PercentRemaining": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.complete_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityTotalGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getBlockLocations_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.AddBlockOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityUsedGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Syncs_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Syncs_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsBlocked": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsBlocked",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcQueueTime_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcQueueTime_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/PercentUsed": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentUsed",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/DecomNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DecomNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/blockReport_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.blockReport_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/NonDfsUsedSpace": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.NonDfsUsedSpace",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/UpgradeFinalized": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.UpgradeFinalized",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getFileInfo_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getFileInfo_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getEditLogSize_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getEditLogSize_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginSuccess_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginSuccess_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReceived_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReceived_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Safemode": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Safemode",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/FilesCreated": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.FilesCreated",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/addBlock_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.addBlock_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/DecomNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DecomNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsed": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityUsed",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/NonHeapMemoryUsed": {
+ "metric": "java.lang:type=Memory.NonHeapMemoryUsage[used]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memNonHeapCommittedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemNonHeapCommittedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/DeadNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DeadNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/PercentUsed": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentUsed",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Free": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Free",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Total": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/GetBlockLocations": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.GetBlockLocations",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginFailure_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginFailure_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/fsync_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.fsync_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/HeapMemoryMax": {
+ "metric": "java.lang:type=Memory.HeapMemoryUsage[max]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/create_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.create_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/PendingReplicationBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.PendingReplicationBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/UnderReplicatedBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.UnderReplicatedBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/FileInfoOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.FileInfoOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/MissingBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.MissingBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/sendHeartbeat_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.sendHeartbeat_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcProcessingTime_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcProcessingTime_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReport_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReport_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CapacityRemaining": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemState.CapacityRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthenticationSuccesses": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthenticationSuccesses",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/PendingDeletionBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.PendingDeletionBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthenticationFailures": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthenticationFailures",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getEditLogSize_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getEditLogSize_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memHeapCommittedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemHeapCommittedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/FilesInGetListingOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.FilesInGetListingOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsRunnable": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsRunnable",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/BlocksTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.BlocksTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/complete_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.complete_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/LiveNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.LiveNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsNew": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsNew",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollFsImage_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollFsImage_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthorizationFailures": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthorizationFailures",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Syncs_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Syncs_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/StartTime": {
+ "metric": "java.lang:type=Runtime.StartTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcQueueTime_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcQueueTime_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReceived_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReceived_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollEditLog_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollEditLog_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/DeadNodes": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.DeadNodes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/SentBytes": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.SentBytes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/HeapMemoryUsed": {
+ "metric": "java.lang:type=Memory.HeapMemoryUsage[used]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/FilesTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.FilesTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Version": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Version",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logWarn": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogWarn",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/ExcessBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.ExcessBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsTimedWaiting": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsTimedWaiting",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/gcCount": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.GcCount",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/PercentRemaining": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/ReceivedBytes": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.ReceivedBytes",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/blockReport_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.blockReport_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/NonHeapMemoryMax": {
+ "metric": "java.lang:type=Memory.NonHeapMemoryUsage[max]",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollFsImage_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollFsImage_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/NumOpenConnections": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.NumOpenConnections",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memHeapUsedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemHeapUsedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/ScheduledReplicationBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.ScheduledReplicationBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsWaiting": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsWaiting",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/BlocksTotal": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.BlocksTotal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/gcTimeMillis": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.GcTimeMillis",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getBlockLocations_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getBlockLocations_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Transactions_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Transactions_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/create_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.create_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CapacityTotal": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Total",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/threadsTerminated": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.ThreadsTerminated",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemainingGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityRemainingGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Transactions_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.Transactions_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/MissingBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.MissingBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/Threads": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Threads",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/callQueueLen": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.callQueueLen",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CorruptBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CorruptBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/blockReport_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.blockReport_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/TotalFiles": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalFiles",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logInfo": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogInfo",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/NameDirStatuses": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.NameDirStatuses",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getListing_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getListing_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/rollEditLog_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.rollEditLog_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/addBlock_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.addBlock_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/RpcProcessingTime_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.RpcProcessingTime_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/CapacityUsed": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/UnderReplicatedBlocks": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.UnderReplicatedBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/sendHeartbeat_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.sendHeartbeat_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/CreateFileOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNode.CreateFileOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logError": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogError",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginFailure_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=ugi.loginFailure_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getProtocolVersion_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getProtocolVersion_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpc/rpcAuthorizationSuccesses": {
+ "metric": "Hadoop:service=NameNode,name=RpcActivity.rpcAuthorizationSuccesses",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "ServiceComponentInfo/Version": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Version",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getListing_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.getListing_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/logFatal": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.LogFatal",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/NonDfsUsedSpace": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.NonDfsUsedSpace",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/renewLease_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.renewLease_avg_time",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/TotalBlocks": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalBlocks",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystemMetrics.CapacityNonDFSUsed",
+ "pointInTime": true,
+ "temporal": false
+ }
+ }
+ }
+ ],
+ "HostComponent": [
+ {
+ "type": "ganglia",
+ "metrics": {
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "dfs.FSNamesystem.TotalLoad",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "dfs.FSNamesystem.BlockCapacity",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotal": {
+ "metric": "dfs.FSNamesystem.CapacityTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsed",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "dfs.FSNamesystem.CapacityRemaining",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityNonDFSUsed": {
+ "metric": "dfs.FSNamesystem.CapacityUsedNonDFS",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "dfs.namenode.GetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesAppended": {
+ "metric": "dfs.namenode.FilesAppended",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginSuccessAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/load/load_one": {
+ "metric": "load_one",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "dfs.FSNamesystem.CapacityTotalGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_run": {
+ "metric": "proc_run",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "dfs.FSNamesystem.CapacityUsedGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "dfs.namenode.AddBlockOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/swap_total": {
+ "metric": "swap_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesDeleted": {
+ "metric": "dfs.namenode.FilesDeleted",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_avg_time": {
+ "metric": "dfs.namenode.SyncsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsBlocked": {
+ "metric": "jvm.JvmMetrics.ThreadsBlocked",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_num_ops": {
+ "metric": "rpc.rpc.RpcQueueTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/process/proc_total": {
+ "metric": "proc_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_avg_time": {
+ "metric": "dfs.namenode.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/part_max_used": {
+ "metric": "part_max_used",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getFileInfo_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetFileInfoNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginSuccess_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginSuccessNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_idle": {
+ "metric": "cpu_idle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_aidle": {
+ "metric": "cpu_aidle",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_free": {
+ "metric": "mem_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/versionRequest_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.VersionRequestNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesCreated": {
+ "metric": "dfs.namenode.FilesCreated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenameAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/bytes_in": {
+ "metric": "bytes_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/network/pkts_out": {
+ "metric": "pkts_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/memNonHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemNonHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_cached": {
+ "metric": "mem_cached",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/disk/disk_total": {
+ "metric": "disk_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setPermission_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetPermissionAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesRenamed": {
+ "metric": "dfs.namenode.FilesRenamed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/JournalTransactionsBatchedInSync": {
+ "metric": "dfs.namenode.JournalTransactionsBatchedInSync",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_num_ops": {
+ "metric": "ugi.UgiMetrics.LoginFailureNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/GetBlockLocations": {
+ "metric": "dfs.namenode.GetBlockLocations",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/fsync_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.FsyncNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_wio": {
+ "metric": "cpu_wio",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.CreateAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.PendingReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_speed": {
+ "metric": "cpu_speed",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FileInfoOps": {
+ "metric": "dfs.namenode.FileInfoOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/DeleteFileOps": {
+ "metric": "dfs.namenode.DeleteFileOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_avg_time": {
+ "metric": "rpc.rpc.RpcProcessingTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setSafeMode_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetSafeModeAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthenticationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/PendingDeletionBlocks": {
+ "metric": "dfs.FSNamesystem.PendingDeletionBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthenticationFailures": {
+ "metric": "rpc.rpc.RpcAuthenticationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/pkts_in": {
+ "metric": "pkts_in",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/mem_total": {
+ "metric": "mem_total",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getEditLogSize_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetEditLogManifestNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapCommittedM": {
+ "metric": "jvm.JvmMetrics.MemHeapCommittedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/FilesInGetListingOps": {
+ "metric": "dfs.namenode.FilesInGetListingOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsRunnable": {
+ "metric": "jvm.JvmMetrics.ThreadsRunnable",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/complete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CompleteNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsNew": {
+ "metric": "jvm.JvmMetrics.ThreadsNew",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_num_ops",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationFailures": {
+ "metric": "rpc.rpc.RpcAuthorizationFailures",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Syncs_num_ops": {
+ "metric": "dfs.namenode.SyncsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/RpcQueueTime_avg_time": {
+ "metric": "rpc.rpc.RpcQueueTimeAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReceived_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReceivedAndDeletedNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setReplication_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.setReplication_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/SentBytes": {
+ "metric": "rpc.rpc.SentBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/FilesTotal": {
+ "metric": "dfs.FSNamesystem.FilesTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logWarn": {
+ "metric": "jvm.JvmMetrics.LogWarn",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ExcessBlocks": {
+ "metric": "dfs.FSNamesystem.ExcessBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTimedWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsTimedWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/gcCount": {
+ "metric": "jvm.JvmMetrics.GcCount",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/ReceivedBytes": {
+ "metric": "rpc.rpc.ReceivedBytes",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_nice": {
+ "metric": "cpu_nice",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/blockReport_num_ops": {
+ "metric": "dfs.namenode.BlockReportNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/SafemodeTime": {
+ "metric": "dfs.namenode.SafemodeTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollFsImage_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.rollFsImage_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/NumOpenConnections": {
+ "metric": "rpc.rpc.NumOpenConnections",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/memHeapUsedM": {
+ "metric": "jvm.JvmMetrics.MemHeapUsedM",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/ScheduledReplicationBlocks": {
+ "metric": "dfs.FSNamesystem.ScheduledReplicationBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsWaiting": {
+ "metric": "jvm.JvmMetrics.ThreadsWaiting",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/disk/disk_free": {
+ "metric": "disk_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/BlocksTotal": {
+ "metric": "dfs.FSNamesystem.BlocksTotal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_buffers": {
+ "metric": "mem_buffers",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/gcTimeMillis": {
+ "metric": "jvm.JvmMetrics.GcTimeMillis",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getBlockLocations_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetBlockLocationsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_num_ops": {
+ "metric": "dfs.namenode.TransactionsNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/create_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.CreateNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/threadsTerminated": {
+ "metric": "jvm.JvmMetrics.ThreadsTerminated",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/network/bytes_out": {
+ "metric": "bytes_out",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_user": {
+ "metric": "cpu_user",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/memory/swap_free": {
+ "metric": "swap_free",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_five": {
+ "metric": "load_five",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_system": {
+ "metric": "cpu_system",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemainingGB": {
+ "metric": "dfs.FSNamesystem.CapacityRemainingGB",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/Transactions_avg_time": {
+ "metric": "dfs.namenode.TransactionsAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/boottime": {
+ "metric": "boottime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/MissingBlocks": {
+ "metric": "dfs.FSNamesystem.MissingBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpc/callQueueLen": {
+ "metric": "rpc.rpc.CallQueueLength",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/delete_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.DeleteNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/CorruptBlocks": {
+ "metric": "dfs.FSNamesystem.CorruptBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rename_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RenameNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blockReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.BlockReportAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/mkdirs_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.MkdirsNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/load/load_fifteen": {
+ "metric": "load_fifteen",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/jvm/logInfo": {
+ "metric": "jvm.JvmMetrics.LogInfo",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/fsImageLoadTime": {
+ "metric": "dfs.namenode.FsImageLoadTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_num_ops",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/rollEditLog_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RollEditLogNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/addBlock_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.AddBlockAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/blocksBeingWrittenReport_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.blocksBeingWrittenReport_avg_time",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/setOwner_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SetOwnerAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/RpcProcessingTime_num_ops": {
+ "metric": "rpc.rpc.RpcProcessingTimeNumOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/memory/mem_shared": {
+ "metric": "mem_shared",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/dfs/FSNamesystem/UnderReplicatedBlocks": {
+ "metric": "dfs.FSNamesystem.UnderReplicatedBlocks",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/sendHeartbeat_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.SendHeartbeatAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/dfs/namenode/CreateFileOps": {
+ "metric": "dfs.namenode.CreateFileOps",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logError": {
+ "metric": "jvm.JvmMetrics.LogError",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/ugi/loginFailure_avg_time": {
+ "metric": "ugi.UgiMetrics.LoginFailureAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/cpu/cpu_num": {
+ "metric": "cpu_num",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getProtocolVersion_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.getProtocolVersion_avg_time",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/register_num_ops": {
+ "metric": "rpcdetailed.rpcdetailed.RegisterDatanodeNumOps",
+ "pointInTime": true,
+ "temporal": true
+ },
+ "metrics/rpc/rpcAuthorizationSuccesses": {
+ "metric": "rpc.rpc.RpcAuthorizationSuccesses",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/getListing_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.GetListingAvgTime",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/jvm/logFatal": {
+ "metric": "jvm.JvmMetrics.LogFatal",
+ "pointInTime": false,
+ "temporal": true
+ },
+ "metrics/rpcdetailed/renewLease_avg_time": {
+ "metric": "rpcdetailed.rpcdetailed.RenewLeaseAvgTime",
+ "pointInTime": true,
+ "temporal": true
+ }
+ }
+ },
+ {
+ "type": "jmx",
+ "metrics": {
+ "metrics/dfs/namenode/Used": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.Used",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/TotalLoad": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.TotalLoad",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memMaxM":{
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemMaxM",
+ "pointInTime" : true,
+ "temporal" : false
+ },
+ "metrics/dfs/FSNamesystem/BlockCapacity": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.BlockCapacity",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/TotalFiles": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.TotalFiles",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/GetListingOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeActivity.GetListingOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/HostName": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeActivity.tag.Hostname",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/runtime/StartTime": {
+ "metric": "java.lang:type=Runtime.StartTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/UpgradeFinalized": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.UpgradeFinalized",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getProtocolVersion_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.VersionRequestNumOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/fsync_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.FsyncAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/ugi/loginSuccess_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=UgiMetrics.LoginSuccessAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/renewLease_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.renewLease_num_ops",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityRemaining": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.CapacityRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getFileInfo_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.GetFileInfoAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/PercentRemaining": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeInfo.PercentRemaining",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/jvm/memNonHeapUsedM": {
+ "metric": "Hadoop:service=NameNode,name=JvmMetrics.MemNonHeapUsedM",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/complete_avg_time": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.CompleteAvgTime",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityTotalGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.CapacityTotalGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/rpcdetailed/getBlockLocations_num_ops": {
+ "metric": "Hadoop:service=NameNode,name=RpcDetailedActivity.GetBlockLocationsNumOps",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/FSNamesystem/CapacityUsedGB": {
+ "metric": "Hadoop:service=NameNode,name=FSNamesystem.CapacityUsedGB",
+ "pointInTime": true,
+ "temporal": false
+ },
+ "metrics/dfs/namenode/AddBlockOps": {
+ "metric": "Hadoop:service=NameNode,name=NameNodeActivity.AddBlockOps",
+ "point
<TRUNCATED>
[11/11] ambari git commit: AMBARI-7175. Add explicit stack service
inheritance
Posted by js...@apache.org.
AMBARI-7175. Add explicit stack service inheritance
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2fc7adec
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2fc7adec
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2fc7adec
Branch: refs/heads/trunk
Commit: 2fc7adecea03ec70141f41e98f1aa2d089c3b364
Parents: b71407f
Author: John Speidel <js...@hortonworks.com>
Authored: Wed Nov 5 23:12:11 2014 -0500
Committer: John Speidel <js...@hortonworks.com>
Committed: Mon Nov 10 16:31:43 2014 -0500
----------------------------------------------------------------------
.../ambari/server/ObjectNotFoundException.java | 18 +-
.../server/ParentObjectNotFoundException.java | 2 +-
.../ambari/server/agent/HeartBeatHandler.java | 2 +-
.../ambari/server/agent/HeartbeatMonitor.java | 8 +-
.../query/render/ClusterBlueprintRenderer.java | 20 +-
.../server/api/services/AmbariMetaInfo.java | 629 +-
.../server/api/util/StackExtensionHelper.java | 975 ---
.../controller/AmbariActionExecutionHelper.java | 9 +-
.../AmbariCustomCommandExecutionHelper.java | 14 +-
.../AmbariManagementControllerImpl.java | 49 +-
.../server/controller/StackServiceResponse.java | 10 +-
.../internal/AbstractResourceProvider.java | 12 +-
.../internal/BlueprintResourceProvider.java | 1 -
.../internal/ClientConfigResourceProvider.java | 4 +-
.../internal/ClusterResourceProvider.java | 21 +-
.../internal/ComponentResourceProvider.java | 29 +-
.../internal/ServiceResourceProvider.java | 36 +-
.../server/controller/internal/Stack.java | 29 -
.../server/metadata/RoleCommandOrder.java | 2 +-
.../server/orm/entities/BlueprintEntity.java | 17 +-
.../apache/ambari/server/stack/BaseModule.java | 93 +
.../ambari/server/stack/ComponentModule.java | 150 +
.../server/stack/ConfigurationDirectory.java | 129 +
.../ambari/server/stack/ConfigurationInfo.java | 164 +
.../server/stack/ConfigurationModule.java | 170 +
.../server/stack/ModuleFileUnmarshaller.java | 77 +
.../ambari/server/stack/ServiceDirectory.java | 181 +
.../ambari/server/stack/ServiceModule.java | 289 +
.../ambari/server/stack/StackContext.java | 196 +
.../server/stack/StackDefinitionDirectory.java | 106 +
.../server/stack/StackDefinitionModule.java | 69 +
.../ambari/server/stack/StackDirectory.java | 365 +
.../ambari/server/stack/StackManager.java | 175 +
.../apache/ambari/server/stack/StackModule.java | 498 ++
.../ambari/server/state/AutoDeployInfo.java | 20 +
.../ambari/server/state/ComponentInfo.java | 48 +
.../ambari/server/state/ConfigHelper.java | 54 +-
.../ambari/server/state/DependencyInfo.java | 26 +
.../server/state/ServiceComponentImpl.java | 27 +-
.../apache/ambari/server/state/ServiceImpl.java | 4 +-
.../apache/ambari/server/state/ServiceInfo.java | 145 +-
.../ambari/server/state/ServiceOsSpecific.java | 60 +
.../org/apache/ambari/server/state/Stack.java | 66 -
.../apache/ambari/server/state/StackInfo.java | 112 +-
.../server/state/cluster/ClusterImpl.java | 8 +-
.../render/ClusterBlueprintRendererTest.java | 10 +-
.../server/api/services/AmbariMetaInfoTest.java | 405 +-
.../api/util/StackExtensionHelperTest.java | 792 --
.../AmbariManagementControllerImplTest.java | 6 +-
.../AmbariManagementControllerTest.java | 31 +-
.../internal/BlueprintResourceProviderTest.java | 33 +-
.../ClientConfigResourceProviderTest.java | 6 +-
.../internal/ClusterResourceProviderTest.java | 8 -
.../internal/ComponentResourceProviderTest.java | 75 +-
.../internal/HostResourceProviderTest.java | 8 +-
.../internal/ServiceResourceProviderTest.java | 32 +-
.../orm/entities/BlueprintEntityTest.java | 61 +-
.../server/stack/ComponentModuleTest.java | 409 +
.../ambari/server/stack/ServiceModuleTest.java | 983 +++
.../ambari/server/stack/StackManagerTest.java | 594 ++
.../ambari/server/state/PropertyInfoTest.java | 85 +-
.../bad-stacks/HDP/0.1/repos/repoinfo.xml | 37 +-
.../stacks/HDP/2.0.7/services/HIVE/metainfo.xml | 2 -
.../stacks/HDP/2.0.7/services/YARN/metainfo.xml | 4 +
.../services/HBASE/configuration/hbase-site.xml | 356 +
.../HDP/2.0.8/services/HBASE/metainfo.xml | 3 +
.../stacks/HDP/2.1.1/services/PIG/metainfo.xml | 8 +-
.../resources/stacks/OTHER/1.0/metainfo.xml | 23 +
.../resources/stacks/OTHER/1.0/repos/hdp.json | 10 +
.../stacks/OTHER/1.0/repos/repoinfo.xml | 62 +
.../stacks/OTHER/1.0/role_command_order.json | 104 +
.../services/HDFS/configuration/hdfs-site.xml | 45 +
.../stacks/OTHER/1.0/services/HDFS/metainfo.xml | 146 +
.../stacks/OTHER/1.0/services/HDFS/metrics.json | 7840 ++++++++++++++++++
.../1.0/services/HDFS/package/dummy-script.py | 20 +
.../OTHER/1.0/services/SQOOP2/metainfo.xml | 30 +
.../STORM/configuration/placeholder.txt | 17 +
.../OTHER/1.0/services/STORM/metainfo.xml | 29 +
.../OTHER/1.0/services/STORM/metrics.json | 99 +
.../1.0/services/STORM/package/placeholder.txt | 17 +
.../resources/stacks/OTHER/2.0/metainfo.xml | 24 +
.../resources/stacks/OTHER/2.0/repos/hdp.json | 10 +
.../stacks/OTHER/2.0/repos/repoinfo.xml | 62 +
.../OTHER/2.0/services/HBASE/metainfo.xml | 32 +
.../OTHER/2.0/services/SQOOP2/metainfo.xml | 28 +
.../stacks_with_cycle/OTHER/1.0/metainfo.xml | 23 +
.../stacks_with_cycle/OTHER/1.0/repos/hdp.json | 10 +
.../OTHER/1.0/repos/repoinfo.xml | 62 +
.../OTHER/1.0/role_command_order.json | 104 +
.../services/HDFS/configuration/hdfs-site.xml | 45 +
.../OTHER/1.0/services/HDFS/metainfo.xml | 146 +
.../OTHER/1.0/services/HDFS/metrics.json | 7840 ++++++++++++++++++
.../1.0/services/HDFS/package/dummy-script.py | 20 +
.../OTHER/1.0/services/SQOOP2/metainfo.xml | 30 +
.../stacks_with_cycle/OTHER/2.0/metainfo.xml | 24 +
.../stacks_with_cycle/OTHER/2.0/repos/hdp.json | 10 +
.../OTHER/2.0/repos/repoinfo.xml | 62 +
.../OTHER/2.0/services/SQOOP2/metainfo.xml | 29 +
.../stacks_with_cycle2/stack1/1.0/metainfo.xml | 23 +
.../stack1/1.0/repos/hdp.json | 10 +
.../stack1/1.0/repos/repoinfo.xml | 62 +
.../stack1/1.0/services/HDFS/metainfo.xml | 28 +
.../stacks_with_cycle2/stack2/1.0/metainfo.xml | 23 +
.../stack2/1.0/repos/hdp.json | 10 +
.../stack2/1.0/repos/repoinfo.xml | 62 +
.../stack2/1.0/services/HDFS/metainfo.xml | 28 +
.../stacks_with_cycle2/stack3/1.0/metainfo.xml | 23 +
.../stack3/1.0/repos/hdp.json | 10 +
.../stack3/1.0/repos/repoinfo.xml | 62 +
.../stack3/1.0/services/HDFS/metainfo.xml | 28 +
110 files changed, 23438 insertions(+), 3001 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/ObjectNotFoundException.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ObjectNotFoundException.java b/ambari-server/src/main/java/org/apache/ambari/server/ObjectNotFoundException.java
index 138774a..59063cb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ObjectNotFoundException.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ObjectNotFoundException.java
@@ -21,8 +21,22 @@ package org.apache.ambari.server;
@SuppressWarnings("serial")
public class ObjectNotFoundException extends AmbariException {
- public ObjectNotFoundException(String message) {
- super(message);
+ /**
+ * Constructor.
+ *
+ * @param msg message
+ * @param cause the root cause
+ */
+ public ObjectNotFoundException(String msg, ObjectNotFoundException cause) {
+ super(msg + ". " + cause.getMessage(), cause);
}
+ /**
+ * Constructor.
+ *
+ * @param msg message
+ */
+ public ObjectNotFoundException(String msg) {
+ super(msg);
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/ParentObjectNotFoundException.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/ParentObjectNotFoundException.java b/ambari-server/src/main/java/org/apache/ambari/server/ParentObjectNotFoundException.java
index 98d60d5..0674dda 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/ParentObjectNotFoundException.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/ParentObjectNotFoundException.java
@@ -21,7 +21,7 @@ package org.apache.ambari.server;
/**
* Indicates that a parent of a resource doesn't exist.
*/
-public class ParentObjectNotFoundException extends AmbariException {
+public class ParentObjectNotFoundException extends ObjectNotFoundException {
/**
* Constructor.
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index e99e39f..82b642b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -786,7 +786,7 @@ public class HeartBeatHandler {
throw new AmbariException("Cannot provide stack components map. " +
"Stack hasn't been selected yet.");
}
- StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(),
stackId.getStackVersion());
response.setClusterName(clusterName);
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index c39ba29..ed4d39c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -228,13 +228,13 @@ public class HeartbeatMonitor implements Runnable {
String componentName = sch.getServiceComponentName();
Service service = cluster.getService(sch.getServiceName());
StackId stackId = cluster.getDesiredStackVersion();
- ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
- stackId.getStackVersion(), serviceName);
+ ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(),
+ stackId.getStackVersion(), serviceName);
ComponentInfo componentInfo = ambariMetaInfo.getComponent(
stackId.getStackName(), stackId.getStackVersion(),
serviceName, componentName);
- StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
- stackId.getStackVersion());
+ StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
+ stackId.getStackVersion());
Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>();
Map<String, Map<String, Map<String, String>>> configurationAttributes = new TreeMap<String, Map<String, Map<String, String>>>();
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
index 847735d..9967fc6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.api.query.render;
+import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.api.query.QueryInfo;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.api.services.Request;
@@ -39,7 +40,7 @@ import org.apache.ambari.server.state.HostConfig;
import org.apache.ambari.server.state.PropertyInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
+import org.apache.ambari.server.state.ServiceInfo;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
@@ -175,9 +176,20 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements Renderer {
*/
private void determinePropertiesToStrip(TreeNode<Resource> servicesNode, String stackName, String stackVersion) {
AmbariMetaInfo stackInfo = getController().getAmbariMetaInfo();
- for (TreeNode<Resource> service : servicesNode.getChildren()) {
- String name = (String) service.getObject().getPropertyValue("ServiceInfo/service_name");
- Map<String, PropertyInfo> requiredProperties = stackInfo.getRequiredProperties(stackName, stackVersion, name);
+ for (TreeNode<Resource> serviceNode : servicesNode.getChildren()) {
+ String name = (String) serviceNode.getObject().getPropertyValue("ServiceInfo/service_name");
+ ServiceInfo service;
+ try {
+ service = stackInfo.getService(stackName, stackVersion, name);
+ } catch (AmbariException e) {
+ // shouldn't ever happen.
+ // Exception indicates that service is not in the stack
+ // but we are getting the name from a running cluster.
+ throw new RuntimeException("Unexpected exception occurred while generating a blueprint. The service '" +
+ name + "' was not found in the stack: '" + stackName + ":" + stackVersion);
+ }
+
+ Map<String, PropertyInfo> requiredProperties = service.getRequiredProperties();
for (Map.Entry<String, PropertyInfo> entry : requiredProperties.entrySet()) {
String propertyName = entry.getKey();
PropertyInfo propertyInfo = entry.getValue();
http://git-wip-us.apache.org/repos/asf/ambari/blob/2fc7adec/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index ba365f5..2a137c9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -21,9 +21,9 @@ package org.apache.ambari.server.api.services;
import java.io.File;
import java.io.FileReader;
import java.io.FilenameFilter;
-import java.io.IOException;
import java.lang.reflect.Type;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@@ -32,17 +32,12 @@ import java.util.List;
import java.util.Map;
import java.util.Scanner;
import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ThreadFactory;
import javax.xml.bind.JAXBException;
import org.apache.ambari.server.AmbariException;
-import org.apache.ambari.server.ObjectNotFoundException;
import org.apache.ambari.server.ParentObjectNotFoundException;
import org.apache.ambari.server.StackAccessException;
-import org.apache.ambari.server.api.util.StackExtensionHelper;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.customactions.ActionDefinition;
import org.apache.ambari.server.customactions.ActionDefinitionManager;
@@ -50,11 +45,15 @@ import org.apache.ambari.server.events.AlertDefinitionRegistrationEvent;
import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
import org.apache.ambari.server.metadata.AgentAlertDefinitions;
import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.metadata.ActionMetadata;
import org.apache.ambari.server.orm.dao.MetainfoDAO;
import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.orm.entities.MetainfoEntity;
+import org.apache.ambari.server.stack.StackContext;
+import org.apache.ambari.server.stack.StackDirectory;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.stack.StackManager;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.DependencyInfo;
import org.apache.ambari.server.state.OperatingSystemInfo;
@@ -62,26 +61,19 @@ import org.apache.ambari.server.state.PropertyInfo;
import org.apache.ambari.server.state.RepositoryInfo;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.Stack;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
-import org.apache.ambari.server.state.stack.LatestRepoCallable;
import org.apache.ambari.server.state.stack.MetricDefinition;
import org.apache.ambari.server.state.stack.OsFamily;
-import org.apache.ambari.server.state.stack.RepositoryXml;
-import org.apache.ambari.server.state.stack.RepositoryXml.Os;
-import org.apache.ambari.server.state.stack.RepositoryXml.Repo;
import org.apache.ambari.server.state.stack.UpgradePack;
-import org.apache.commons.io.FilenameUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.google.inject.Inject;
-import com.google.inject.Injector;
import com.google.inject.Singleton;
@@ -91,9 +83,7 @@ import com.google.inject.Singleton;
@Singleton
public class AmbariMetaInfo {
- public static final String STACK_METAINFO_FILE_NAME = "metainfo.xml";
- public static final String SERVICES_FOLDER_NAME = "services";
- public static final String SERVICE_METAINFO_FILE_NAME = "metainfo.xml";
+
public static final String SERVICE_CONFIG_FOLDER_NAME = "configuration";
public static final String SERVICE_CONFIG_FILE_NAME_POSTFIX = ".xml";
public static final String RCO_FILE_NAME = "role_command_order.json";
@@ -112,19 +102,12 @@ public class AmbariMetaInfo {
public static final FilenameFilter FILENAME_FILTER = new FilenameFilter() {
@Override
public boolean accept(File dir, String s) {
- if (s.equals(".svn") || s.equals(".git") ||
- s.equals(StackExtensionHelper.HOOKS_FOLDER_NAME)) // Hooks dir is not a service
- {
- return false;
- }
- return true;
+ return !(s.equals(".svn") || s.equals(".git") ||
+ s.equals(StackDirectory.HOOKS_FOLDER_NAME));
}
};
private final static Logger LOG = LoggerFactory.getLogger(AmbariMetaInfo.class);
- private static final String REPOSITORY_FILE_NAME = "repoinfo.xml";
- private static final String REPOSITORY_FOLDER_NAME = "repos";
public static final String REPOSITORY_XML_PROPERTY_BASEURL = "baseurl";
- private static final String UPGRADE_PACK_FOLDER_NAME = "upgrades";
// all the supported OS'es
@Inject
@@ -138,28 +121,26 @@ public class AmbariMetaInfo {
private final ActionDefinitionManager adManager = new ActionDefinitionManager();
private String serverVersion = "undefined";
- private List<StackInfo> stacksResult = new ArrayList<StackInfo>();
+ private StackManager stackManager;
+
private File stackRoot;
private File serverVersionFile;
private File customActionRoot;
@Inject
- private MetainfoDAO metainfoDAO;
+ private MetainfoDAO metaInfoDAO;
- @Inject
- Injector injector;
-
- @Inject
- Configuration cfg;
/**
* Alert Definition DAO used to merge stack definitions into the database.
*/
+ @Inject
AlertDefinitionDAO alertDefinitionDao;
/**
* A factory that assists in the creation of {@link AlertDefinition} and
* {@link AlertDefinitionEntity}.
*/
+ @Inject
private AlertDefinitionFactory alertDefinitionFactory;
/**
@@ -178,9 +159,10 @@ public class AmbariMetaInfo {
@Inject
private AmbariEventPublisher eventPublisher;
- // Required properties by stack version
- private final Map<StackId, Map<String, Map<String, PropertyInfo>>> requiredProperties =
- new HashMap<StackId, Map<String, Map<String, PropertyInfo>>>();
+ //todo: only used by StackManager
+ @Inject
+ ActionMetadata actionMetadata;
+
/**
* Ambari Meta Info Object
@@ -212,43 +194,19 @@ public class AmbariMetaInfo {
@Inject
public void init() throws Exception {
// Need to be initialized before all actions
- os_family = injector.getInstance(OsFamily.class);
ALL_SUPPORTED_OS = new ArrayList<String>(os_family.os_list());
-
- stacksResult = new ArrayList<StackInfo>();
readServerVersion();
- getConfigurationInformation(stackRoot);
+ stackManager = new StackManager(stackRoot,
+ new StackContext(metaInfoDAO, actionMetadata, os_family));
getCustomActionDefinitions(customActionRoot);
-
- alertDefinitionFactory = injector.getInstance(AlertDefinitionFactory.class);
- alertDefinitionDao = injector.getInstance(AlertDefinitionDAO.class);
- eventPublisher = injector.getInstance(AmbariEventPublisher.class);
- agentAlertDefinitions = injector.getInstance(AgentAlertDefinitions.class);
}
/**
- * Get component category
- *
- * @param stackName stack name
- * @param version stack version
- * @param serviceName service name
- * @param componentName component name
- * @return component component Info
- * @throws AmbariException
+ * Obtain the underlying stack manager.
+ * @return stack manager
*/
- public ComponentInfo getComponentCategory(String stackName, String version,
- String serviceName, String componentName) throws AmbariException {
- ComponentInfo component = null;
- List<ComponentInfo> components = getComponentsByService(stackName, version, serviceName);
- if (components != null) {
- for (ComponentInfo cmp : components) {
- if (cmp.getName().equals(componentName)) {
- component = cmp;
- break;
- }
- }
- }
- return component;
+ public StackManager getStackManager() {
+ return stackManager;
}
/**
@@ -263,44 +221,28 @@ public class AmbariMetaInfo {
public List<ComponentInfo> getComponentsByService(String stackName, String version, String serviceName)
throws AmbariException {
- ServiceInfo service = getServiceInfo(stackName, version, serviceName);
- if (service == null) {
+ ServiceInfo service;
+ try {
+ service = getService(stackName, version, serviceName);
+ } catch (StackAccessException e) {
throw new ParentObjectNotFoundException("Parent Service resource doesn't exist. stackName=" +
stackName + ", stackVersion=" + version + ", serviceName=" + serviceName);
}
-
return service.getComponents();
}
-
public ComponentInfo getComponent(String stackName, String version, String serviceName,
String componentName) throws AmbariException {
- List<ComponentInfo> componentsByService = getComponentsByService(stackName, version, serviceName);
+ ComponentInfo component = getService(stackName, version, serviceName).getComponentByName(componentName);
- if (componentsByService.size() == 0) {
+ if (component == null) {
throw new StackAccessException("stackName=" + stackName
+ ", stackVersion=" + version
+ ", serviceName=" + serviceName
+ ", componentName=" + componentName);
}
-
- ComponentInfo componentResult = null;
-
- for (ComponentInfo component : componentsByService) {
- if (component.getName().equals(componentName)) {
- componentResult = component;
- }
- }
-
- if (componentResult == null) {
- throw new StackAccessException("stackName=" + stackName
- + ", stackVersion=" + version
- + ", serviceName=" + serviceName
- + ", componentName=" + componentName);
- }
-
- return componentResult;
+ return component;
}
/**
@@ -364,18 +306,16 @@ public class AmbariMetaInfo {
public Map<String, List<RepositoryInfo>> getRepository(String stackName,
String version) throws AmbariException {
- Map<String, List<RepositoryInfo>> reposResult = null;
- StackInfo stack = getStackInfo(stackName, version);
- if (stack != null) {
- List<RepositoryInfo> repository = stack.getRepositories();
- reposResult = new HashMap<String, List<RepositoryInfo>>();
- for (RepositoryInfo repo : repository) {
- if (!reposResult.containsKey(repo.getOsType())) {
- reposResult.put(repo.getOsType(),
- new ArrayList<RepositoryInfo>());
- }
- reposResult.get(repo.getOsType()).add(repo);
+ StackInfo stack = getStack(stackName, version);
+ List<RepositoryInfo> repository = stack.getRepositories();
+
+ Map<String, List<RepositoryInfo>> reposResult = new HashMap<String, List<RepositoryInfo>>();
+ for (RepositoryInfo repo : repository) {
+ if (!reposResult.containsKey(repo.getOsType())) {
+ reposResult.put(repo.getOsType(),
+ new ArrayList<RepositoryInfo>());
}
+ reposResult.get(repo.getOsType()).add(repo);
}
return reposResult;
}
@@ -383,7 +323,7 @@ public class AmbariMetaInfo {
public List<RepositoryInfo> getRepositories(String stackName,
String version, String osType) throws AmbariException {
- StackInfo stack = getStackInfo(stackName, version);
+ StackInfo stack = getStack(stackName, version);
List<RepositoryInfo> repositories = stack.getRepositories();
List<RepositoryInfo> repositoriesResult = new ArrayList<RepositoryInfo>();
@@ -425,42 +365,40 @@ public class AmbariMetaInfo {
/*
* function for given a stack name and version, is it a supported stack
*/
- public boolean isSupportedStack(String stackName, String version) throws AmbariException {
- boolean exist = false;
+ public boolean isSupportedStack(String stackName, String version) {
try {
- StackInfo stackInfo = getStackInfo(stackName, version);
- if (stackInfo != null) {
- exist = true;
- }
- } catch (ObjectNotFoundException e) {
+ // thrown an exception if the stack doesn't exist
+ getStack(stackName, version);
+ return true;
+ } catch (AmbariException e) {
+ return false;
}
- return exist;
}
/*
* support isValidService(), isValidComponent for a given stack/version
*/
- public boolean isValidService(String stackName, String version,
- String serviceName) throws AmbariException {
+ public boolean isValidService(String stackName, String version, String serviceName){
- boolean exist = false;
try {
- ServiceInfo info= getServiceInfo(stackName, version, serviceName);
- if (info != null) {
- exist = true;
- }
- } catch (ObjectNotFoundException e) {
+ getService(stackName, version, serviceName);
+ return true;
+ } catch (AmbariException e) {
+ return false;
}
- return exist;
}
/*
* support isValidService(), isValidComponent for a given stack/version
*/
public boolean isValidServiceComponent(String stackName, String version,
- String serviceName, String componentName) throws AmbariException {
- ServiceInfo service = getServiceInfo(stackName, version, serviceName);
- return service != null && service.getComponentByName(componentName) != null;
+ String serviceName, String componentName) {
+ try {
+ getService(stackName, version, serviceName).getComponentByName(componentName);
+ return true;
+ } catch (AmbariException e) {
+ return false;
+ }
}
/**
@@ -482,8 +420,7 @@ public class AmbariMetaInfo {
}
Map<String, ServiceInfo> services = getServices(stackName, version);
String retService = null;
- if (services == null
- || services.isEmpty()) {
+ if (services == null || services.isEmpty()) {
return retService;
}
for (Map.Entry<String, ServiceInfo> entry : services.entrySet()) {
@@ -497,46 +434,6 @@ public class AmbariMetaInfo {
}
/**
- * Get the service configs supported for a service in a particular stack
- *
- * @param stackName the stack name
- * @param version the version of the stack
- * @param serviceName the name of the service in the stack
- * @return the config knobs supported for the service
- * @throws AmbariException
- */
- public Map<String, Map<String, String>> getSupportedConfigs(String stackName,
- String version, String serviceName) throws AmbariException {
- Map<String, Map<String, String>> propertiesResult = new HashMap<String, Map<String, String>>();
-
- ServiceInfo service = getServiceInfo(stackName, version, serviceName);
- if (service != null) {
- if (serviceName.equals(service.getName())) {
- List<PropertyInfo> properties = service.getProperties();
- if (properties != null) {
- for (PropertyInfo propertyInfo : properties) {
- Map<String, String> fileProperties = propertiesResult
- .get(propertyInfo.getFilename());
- if (fileProperties == null) {
- fileProperties = new HashMap<String, String>();
- fileProperties.put(propertyInfo.getName(),
- propertyInfo.getValue());
- propertiesResult.put(propertyInfo.getFilename(), fileProperties);
-
- } else {
- fileProperties.put(propertyInfo.getName(),
- propertyInfo.getValue());
- }
-
- }
- }
- }
- }
-
- return propertiesResult;
- }
-
- /**
* Given a stack name and version return all the services with info
*
* @param stackName the stack name
@@ -548,10 +445,10 @@ public class AmbariMetaInfo {
Map<String, ServiceInfo> servicesInfoResult = new HashMap<String, ServiceInfo>();
- List<ServiceInfo> services;
+ Collection<ServiceInfo> services;
StackInfo stack;
try {
- stack = getStackInfo(stackName, version);
+ stack = getStack(stackName, version);
} catch (StackAccessException e) {
throw new ParentObjectNotFoundException("Parent Stack Version resource doesn't exist", e);
}
@@ -566,63 +463,22 @@ public class AmbariMetaInfo {
}
public ServiceInfo getService(String stackName, String version, String serviceName) throws AmbariException {
+ ServiceInfo service = getStack(stackName, version).getService(serviceName);
- Map<String, ServiceInfo> services = getServices(stackName, version);
-
- if (services.size() == 0) {
- throw new StackAccessException("stackName=" + stackName + ", stackVersion=" + version + ", serviceName=" + serviceName);
- }
-
- ServiceInfo serviceInfo = services.get(serviceName);
-
- if (serviceInfo == null) {
- throw new StackAccessException("stackName=" + stackName + ", stackVersion=" + version + ", serviceName=" + serviceName);
- }
-
- return serviceInfo;
-
- }
-
- public ServiceInfo getServiceInfo(String stackName, String version,
- String serviceName) throws AmbariException {
- ServiceInfo serviceInfoResult = null;
- List<ServiceInfo> services;
- StackInfo stack;
- try {
- stack = getStackInfo(stackName, version);
- } catch (StackAccessException e) {
- throw new ParentObjectNotFoundException("Parent Stack Version resource doesn't exist", e);
- }
-
- services = stack.getServices();
- if (services != null) {
- for (ServiceInfo service : services) {
- if (serviceName.equals(service.getName())) {
- serviceInfoResult = service;
- break;
- }
- }
+ if (service == null) {
+ throw new StackAccessException("stackName=" + stackName + ", stackVersion=" +
+ version + ", serviceName=" + serviceName);
}
- return serviceInfoResult;
- }
- public List<ServiceInfo> getSupportedServices(String stackName, String version)
- throws AmbariException {
- List<ServiceInfo> servicesResult = null;
- StackInfo stack = getStackInfo(stackName, version);
- if (stack != null) {
- servicesResult = stack.getServices();
- }
- return servicesResult;
+ return service;
}
- public List<String> getMonitoringServiceNames(String stackName, String version)
+ public Collection<String> getMonitoringServiceNames(String stackName, String version)
throws AmbariException{
List<String> monitoringServices = new ArrayList<String>();
- for (ServiceInfo service : getSupportedServices(stackName, version)) {
- if ((service.isMonitoringService() != null) &&
- service.isMonitoringService()) {
+ for (ServiceInfo service : getServices(stackName, version).values()) {
+ if ((service.isMonitoringService() != null) && service.isMonitoringService()) {
monitoringServices.add(service.getName());
}
}
@@ -633,9 +489,7 @@ public class AmbariMetaInfo {
throws AmbariException{
HashSet<String> needRestartServices = new HashSet<String>();
-
- List<ServiceInfo> serviceInfos = getSupportedServices(stackName, version);
-
+ Collection<ServiceInfo> serviceInfos = getServices(stackName, version).values();
for (ServiceInfo service : serviceInfos) {
if (service.isRestartRequiredAfterChange() != null && service.isRestartRequiredAfterChange()) {
@@ -644,68 +498,23 @@ public class AmbariMetaInfo {
}
return needRestartServices;
}
-
- public List<StackInfo> getSupportedStacks() {
- return stacksResult;
- }
-
- public Set<Stack> getStackNames() {
-
- Set<Stack> stacks = new HashSet<Stack>();
- List<StackInfo> supportedStacks = getSupportedStacks();
-
- for (StackInfo stackInfo : supportedStacks) {
- Stack stack = new Stack(stackInfo.getName());
- stacks.add(stack);
- }
-
- return stacks;
+
+ public Collection<StackInfo> getStacks() {
+ return stackManager.getStacks();
}
- public Stack getStack(String stackName) throws AmbariException {
-
- Set<Stack> supportedStackNames = getStackNames();
-
- if (supportedStackNames.size() == 0) {
- throw new StackAccessException("stackName=" + stackName);
- }
-
- Stack stackResult = null;
-
- for (Stack stack : supportedStackNames) {
- if (stack.getStackName().equals(stackName)) {
- stackResult = stack;
- }
- }
+ public Collection<StackInfo> getStacks(String stackName) throws AmbariException {
+ Collection<StackInfo> stacks = stackManager.getStacks(stackName);
- if (stackResult == null) {
+ if (stacks.isEmpty()) {
throw new StackAccessException("stackName=" + stackName);
}
- return stackResult;
- }
-
- public Set<StackInfo> getStackInfos(String stackName) {
-
- Set<StackInfo> stackVersions = new HashSet<StackInfo>();
- for (StackInfo stackInfo : stacksResult) {
- if (stackName.equals(stackInfo.getName())) {
- stackVersions.add(stackInfo);
- }
- }
- return stackVersions;
+ return stacks;
}
- public StackInfo getStackInfo(String stackName, String version) throws AmbariException {
- StackInfo stackInfoResult = null;
-
- for (StackInfo stack : stacksResult) {
- if (stackName.equals(stack.getName())
- && version.equals(stack.getVersion())) {
- stackInfoResult = stack;
- break;
- }
- }
+ public StackInfo getStack(String stackName, String version) throws AmbariException {
+ StackInfo stackInfoResult = stackManager.getStack(stackName, version);
if (stackInfoResult == null) {
throw new StackAccessException("stackName=" + stackName
@@ -718,42 +527,33 @@ public class AmbariMetaInfo {
public List<String> getStackParentVersions(String stackName, String version) {
List<String> parents = new ArrayList<String>();
try {
- StackInfo stackInfo = getStackInfo(stackName, version);
- if (stackInfo != null) {
- String parentVersion = stackInfo.getParentStackVersion();
- if (parentVersion != null) {
- parents.add(parentVersion);
- parents.addAll(getStackParentVersions(stackName, parentVersion));
- }
+ StackInfo stackInfo = getStack(stackName, version);
+ String parentVersion = stackInfo.getParentStackVersion();
+ if (parentVersion != null) {
+ parents.add(parentVersion);
+ parents.addAll(getStackParentVersions(stackName, parentVersion));
}
} catch (AmbariException e) {
- // parent was not found. just returning empty list
- } finally {
- return parents;
+ // parent was not found.
}
+ return parents;
}
- public Set<PropertyInfo> getProperties(String stackName, String version, String serviceName)
+ public Set<PropertyInfo> getServiceProperties(String stackName, String version, String serviceName)
throws AmbariException {
- ServiceInfo serviceInfo = getServiceInfo(stackName, version, serviceName);
- List<PropertyInfo> properties = serviceInfo.getProperties();
- Set<PropertyInfo> propertiesResult = new HashSet<PropertyInfo>(properties);
- return propertiesResult;
+ return new HashSet<PropertyInfo>(getService(stackName, version, serviceName).getProperties());
}
public Set<PropertyInfo> getStackProperties(String stackName, String version)
throws AmbariException {
- StackInfo stackInfo = getStackInfo(stackName, version);
- List<PropertyInfo> properties = stackInfo.getProperties();
- Set<PropertyInfo> propertiesResult = new HashSet<PropertyInfo>(properties);
- return propertiesResult;
+ return new HashSet<PropertyInfo>(getStack(stackName, version).getProperties());
}
public Set<PropertyInfo> getPropertiesByName(String stackName, String version, String serviceName, String propertyName)
throws AmbariException {
- Set<PropertyInfo> properties = getProperties(stackName, version, serviceName);
+ Set<PropertyInfo> properties = getServiceProperties(stackName, version, serviceName);
if (properties.size() == 0) {
throw new StackAccessException("stackName=" + stackName
@@ -815,7 +615,7 @@ public class AmbariMetaInfo {
throws AmbariException {
Set<OperatingSystemInfo> operatingSystems = new HashSet<OperatingSystemInfo>();
- StackInfo stack = getStackInfo(stackName, version);
+ StackInfo stack = getStack(stackName, version);
List<RepositoryInfo> repositories = stack.getRepositories();
for (RepositoryInfo repository : repositories) {
operatingSystems.add(new OperatingSystemInfo(repository.getOsType()));
@@ -840,6 +640,7 @@ public class AmbariMetaInfo {
for (OperatingSystemInfo operatingSystem : operatingSystems) {
if (operatingSystem.getOsType().equals(osType)) {
resultOperatingSystem = operatingSystem;
+ break;
}
}
@@ -895,168 +696,10 @@ public class AmbariMetaInfo {
adManager.addActionDefinition(ad);
}
- private void getConfigurationInformation(File stackRoot) throws Exception {
- String stackRootAbsPath = stackRoot.getAbsolutePath();
- if (LOG.isDebugEnabled()) {
- LOG.debug("Loading stack information"
- + ", stackRoot = " + stackRootAbsPath);
- }
-
- if (!stackRoot.isDirectory() && !stackRoot.exists()) {
- throw new IOException("" + Configuration.METADETA_DIR_PATH
- + " should be a directory with stack"
- + ", stackRoot = " + stackRootAbsPath);
- }
-
- StackExtensionHelper stackExtensionHelper = new StackExtensionHelper(injector, stackRoot);
- stackExtensionHelper.fillInfo();
-
- List<StackInfo> stacks = stackExtensionHelper.getAllAvailableStacks();
- if (stacks.isEmpty()) {
- throw new AmbariException("Unable to find stack definitions under " +
- "stackRoot = " + stackRootAbsPath);
- }
-
- ExecutorService es = Executors.newSingleThreadExecutor(new ThreadFactory() {
- @Override
- public Thread newThread(Runnable r) {
- return new Thread(r, "Stack Version Loading Thread");
- }
- });
-
- List<LatestRepoCallable> lookupList = new ArrayList<LatestRepoCallable>();
-
- for (StackInfo stack : stacks) {
- LOG.debug("Adding new stack to known stacks"
- + ", stackName = " + stack.getName()
- + ", stackVersion = " + stack.getVersion());
-
- stacksResult.add(stack);
-
- String stackPath = stackRootAbsPath + File.separator +
- stack.getName() + File.separator + stack.getVersion();
-
- // get repository data for current stack of techs
- File repositoryFolder = new File(stackPath
- + File.separator + REPOSITORY_FOLDER_NAME + File.separator
- + REPOSITORY_FILE_NAME);
-
- if (repositoryFolder.exists()) {
- LOG.debug("Adding repositories to stack"
- + ", stackName=" + stack.getName()
- + ", stackVersion=" + stack.getVersion()
- + ", repoFolder=" + repositoryFolder.getPath());
-
- List<RepositoryInfo> repositoryInfoList = getRepository(repositoryFolder,
- stack, lookupList);
-
- stack.getRepositories().addAll(repositoryInfoList);
- } else {
- LOG.warn("No repository information defined for "
- + ", stackName=" + stack.getName()
- + ", stackVersion=" + stack.getVersion()
- + ", repoFolder=" + repositoryFolder.getPath());
- }
-
- // Populate services
- List<ServiceInfo> services = stackExtensionHelper.getAllApplicableServices(stack);
- stack.setServices(services);
-
- Map<String, Map<String, PropertyInfo>> stackRequiredProps = new HashMap<String, Map<String, PropertyInfo>>();
- requiredProperties.put(new StackId(stack.getName(), stack.getVersion()), stackRequiredProps);
- for (ServiceInfo service : services) {
- // Set required config properties
- stackRequiredProps.put(service.getName(), getAllRequiredProperties(service));
- }
-
- // Resolve hooks folder
- String stackHooksToUse = stackExtensionHelper.resolveHooksFolder(stack);
- stack.setStackHooksFolder(stackHooksToUse);
-
- File upgradesFolder = new File(stackPath + File.separator + UPGRADE_PACK_FOLDER_NAME);
- if (upgradesFolder.exists() && upgradesFolder.isDirectory()) {
- stack.setUpgradesFolder(upgradesFolder.getAbsolutePath());
- }
- }
-
- es.invokeAll(lookupList);
-
- es.shutdown();
- }
-
- /**
- * Get properties with require_input attribute set to true.
- *
- * @param stackName name of the stack, e.g.: HDP
- * @param stackVersion version of the stack
- * @return Map of property name to PropertyInfo
- */
- public Map<String, PropertyInfo> getRequiredProperties(String stackName, String stackVersion, String service) {
-
- Map<String, Map<String, PropertyInfo>> requiredStackProps =
- requiredProperties.get(new StackId(stackName, stackVersion));
-
- if (requiredStackProps != null) {
- Map<String, PropertyInfo> requiredServiceProperties = requiredStackProps.get(service);
- return requiredServiceProperties == null ? Collections.<String, PropertyInfo>emptyMap() :
- requiredServiceProperties;
- }
- return Collections.emptyMap();
- }
-
public String getServerVersion() {
return serverVersion;
}
- private List<RepositoryInfo> getRepository(File repositoryFile, StackInfo stack,
- List<LatestRepoCallable> lookupList)
- throws JAXBException {
-
- RepositoryXml rxml = StackExtensionHelper.unmarshal(RepositoryXml.class, repositoryFile);
-
- List<RepositoryInfo> list = new ArrayList<RepositoryInfo>();
-
- for (Os o : rxml.getOses()) {
- for (String os : o.getFamily().split(",")) {
- for (Repo r : o.getRepos()) {
- RepositoryInfo ri = new RepositoryInfo();
- ri.setBaseUrl(r.getBaseUrl());
- ri.setDefaultBaseUrl(r.getBaseUrl());
- ri.setMirrorsList(r.getMirrorsList());
- ri.setOsType(os.trim());
- ri.setRepoId(r.getRepoId());
- ri.setRepoName(r.getRepoName());
- ri.setLatestBaseUrl(r.getBaseUrl());
-
- if (null != metainfoDAO) {
- LOG.debug("Checking for override for base_url");
- String key = generateRepoMetaKey(r.getRepoName(), stack.getVersion(),
- o.getFamily(), r.getRepoId(), REPOSITORY_XML_PROPERTY_BASEURL);
- MetainfoEntity entity = metainfoDAO.findByKey(key);
- if (null != entity) {
- ri.setBaseUrl(entity.getMetainfoValue());
- }
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Adding repo to stack"
- + ", repoInfo=" + ri.toString());
- }
-
- list.add(ri);
- }
- }
- }
-
- if (null != rxml.getLatestURI() && list.size() > 0) {
- lookupList.add(new LatestRepoCallable(rxml.getLatestURI(),
- repositoryFile.getParentFile(), stack, os_family));
- }
-
- return list;
-
- }
-
public boolean isOsSupported(String osType) {
return ALL_SUPPORTED_OS.contains(osType);
}
@@ -1102,7 +745,7 @@ public class AmbariMetaInfo {
ri.setBaseUrl(newBaseUrl);
- if (null != metainfoDAO) {
+ if (null != metaInfoDAO) {
String metaKey = generateRepoMetaKey(stackName, stackVersion, osType,
repoId, REPOSITORY_XML_PROPERTY_BASEURL);
@@ -1111,9 +754,9 @@ public class AmbariMetaInfo {
entity.setMetainfoValue(newBaseUrl);
if (null != ri.getDefaultBaseUrl() && newBaseUrl.equals(ri.getDefaultBaseUrl())) {
- metainfoDAO.remove(entity);
+ metaInfoDAO.remove(entity);
} else {
- metainfoDAO.merge(entity);
+ metaInfoDAO.merge(entity);
}
}
}
@@ -1169,23 +812,6 @@ public class AmbariMetaInfo {
}
/**
- * Get all required properties for the given service.
- *
- * @param service associated service
- * @return map of property name to PropertyInfo containing all required properties for service
- */
- private Map<String, PropertyInfo> getAllRequiredProperties(ServiceInfo service) {
- Map<String, PropertyInfo> requiredProperties = new HashMap<String, PropertyInfo>();
- List<PropertyInfo> properties = service.getProperties();
- for (PropertyInfo propertyInfo : properties) {
- if (propertyInfo.isRequireInput()) {
- requiredProperties.put(propertyInfo.getName(), propertyInfo);
- }
- }
- return requiredProperties;
- }
-
- /**
* Gets the alert definitions for the specified stack and service.
*
* @param stackName
@@ -1199,20 +825,15 @@ public class AmbariMetaInfo {
* @throws AmbariException
*/
public Set<AlertDefinition> getAlertDefinitions(String stackName, String stackVersion,
- String serviceName) throws AmbariException {
+ String serviceName) throws AmbariException {
ServiceInfo svc = getService(stackName, stackVersion, serviceName);
return getAlertDefinitions(svc);
}
-
/**
* Gets the alert definitions for the specified stack and service.
*
- * @param stackName
- * the stack name
- * @param stackVersion
- * the stack version
- * @param serviceName
+ * @param service
* the service name
* @return the alert definitions for a stack or an empty list if none (never
* {@code null}).
@@ -1243,7 +864,7 @@ public class AmbariMetaInfo {
* definitions that should be run on agent hosts but are not associated with a
* service.
*
- * @param clusters
+ * @param clusters all clusters
* @throws AmbariException
*/
public void reconcileAlertDefinitions(Clusters clusters)
@@ -1258,11 +879,11 @@ public class AmbariMetaInfo {
for (Cluster cluster : clusterMap.values()) {
long clusterId = cluster.getClusterId();
StackId stackId = cluster.getDesiredStackVersion();
- StackInfo stackInfo = getStackInfo(stackId.getStackName(),
+ StackInfo stackInfo = getStack(stackId.getStackName(),
stackId.getStackVersion());
// creating a mapping between service name and service for fast lookups
- List<ServiceInfo> stackServices = stackInfo.getServices();
+ Collection<ServiceInfo> stackServices = stackInfo.getServices();
Map<String, ServiceInfo> stackServiceMap = new HashMap<String, ServiceInfo>();
for (ServiceInfo stackService : stackServices) {
stackServiceMap.put(stackService.getName(), stackService);
@@ -1318,8 +939,6 @@ public class AmbariMetaInfo {
LOG.debug(
"The alert named {} has been modified from the stack definition and will not be merged",
stackDefinition.getName());
-
- continue;
}
}
@@ -1357,54 +976,22 @@ public class AmbariMetaInfo {
}
/**
- * Gets upgrade packs available for a stack.
+ * Get all upgrade packs available for a stack.
+ *
* @param stackName the stack name
* @param stackVersion the stack version
* @return a map of upgrade packs, keyed by the name of the upgrade pack
*/
public Map<String, UpgradePack> getUpgradePacks(String stackName, String stackVersion) {
- StackInfo stack = null;
try {
- stack = getStackInfo(stackName, stackVersion);
- } catch (AmbariException e) {
- LOG.debug("Cannot load upgrade packs for non-existent stack {}-{}",
- stackName, stackVersion, e);
- return Collections.emptyMap();
- }
-
- File folder = new File(stack.getUpgradesFolder());
- if (!folder.exists() || !folder.isDirectory()) {
- LOG.error("Upgrades folder {} no longer exists", stack.getUpgradesFolder());
- return Collections.emptyMap();
- }
+ StackInfo stack = getStack(stackName, stackVersion);
+ return stack.getUpgradePacks() == null ?
+ Collections.<String, UpgradePack>emptyMap() : stack.getUpgradePacks();
- String[] fileNames = folder.list(new FilenameFilter() {
- @Override
- public boolean accept(File folder, String fileName) {
- if (fileName.toLowerCase().endsWith(".xml")) {
- return true;
- } else {
- return false;
- }
- }
- });
-
-
- Map<String, UpgradePack> packs = new HashMap<String, UpgradePack>();
-
- for (String fileName : fileNames) {
- File f = new File(folder, fileName);
-
- String packName = FilenameUtils.removeExtension(fileName);
-
- try {
- UpgradePack up = StackExtensionHelper.unmarshal(UpgradePack.class, f);
- packs.put(packName, up);
- } catch (Exception e) {
- LOG.error("Could not parse {} into an upgrade pack", f.getAbsolutePath());
- }
+ } catch (AmbariException e) {
+ LOG.debug("Cannot load upgrade packs for non-existent stack {}-{}", stackName, stackVersion, e);
}
- return packs;
+ return Collections.emptyMap();
}
}