You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mr...@apache.org on 2017/11/27 23:29:18 UTC
[06/30] ambari git commit: Merge trunk with feature branch and fix
some UT compilation issues (mradhakrishnan)
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 7c43948..4fc9b42 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -35,6 +35,7 @@
<check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
<check>org.apache.ambari.server.checks.RangerSSLConfigCheck</check>
<check>org.apache.ambari.server.checks.DruidHighAvailabilityCheck</check>
+ <check>org.apache.ambari.server.checks.LZOCheck</check>
<configuration>
<!-- Configuration properties for all pre-reqs including required pre-reqs -->
@@ -182,7 +183,7 @@
<component>HBASE_MASTER</component>
</service>
</group>
-
+
<group name="SERVICE_CHECK_1" title="All Service Checks" xsi:type="service-check">
<direction>UPGRADE</direction>
<skippable>true</skippable>
@@ -198,7 +199,7 @@
<service>LOGSEARCH</service>
</exclude>
</group>
-
+
<group name="CORE_SLAVES" title="Core Slaves" xsi:type="colocated">
<service-check>false</service-check>
<skippable>true</skippable>
@@ -206,7 +207,7 @@
<component>DATANODE</component>
<component>NFS_GATEWAY</component>
</service>
-
+
<service name="HBASE">
<component>HBASE_REGIONSERVER</component>
<component>PHOENIX_QUERY_SERVER</component>
@@ -222,7 +223,7 @@
<message>The initial batch of {{components}} hosts have been {{direction.past}}. You are advised to check the hosts and perform cluster/workload-specific tests against your cluster to ensure proper operation before proceeding with {{direction.text}} of the remaining services.</message>
</batch>
</group>
-
+
<group name="SERVICE_CHECK_2" title="All Service Checks" xsi:type="service-check">
<direction>UPGRADE</direction>
<skippable>true</skippable>
@@ -457,7 +458,7 @@
<group xsi:type="cluster" name="FINALIZE_PRE_CHECK" title="Finalize {{direction.text.proper}} Pre-Check">
<direction>UPGRADE</direction>
-
+
<execute-stage title="Check Component Versions">
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.ComponentVersionCheckAction" />
</execute-stage>
@@ -466,7 +467,7 @@
<group xsi:type="cluster" name="POST_CLUSTER" title="Finalize {{direction.text.proper}}">
<skippable>true</skippable>
<supports-auto-skip-failure>false</supports-auto-skip-failure>
-
+
<execute-stage title="Confirm Finalize">
<direction>UPGRADE</direction>
<task xsi:type="manual">
@@ -490,7 +491,7 @@
<function>finalize_rolling_upgrade</function>
</task>
</execute-stage>
-
+
<execute-stage title="Save Cluster State">
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FinalizeUpgradeAction">
</task>
@@ -576,7 +577,7 @@
<function>configure_atlas_user_for_tagsync</function>
</task>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
<upgrade>
@@ -667,7 +668,7 @@
</upgrade>
</component>
</service>
-
+
<service name="SLIDER">
<component name="SLIDER">
<upgrade>
@@ -690,7 +691,7 @@
</task>
<task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename"/>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
<upgrade>
@@ -721,11 +722,11 @@
<task xsi:type="configure" id="yarn_site_retained_log_count" />
<task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name"/>
<task xsi:type="configure" id="hdp_2_6_0_0_ats_scan_interval_default"/>
-
+
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixCapacitySchedulerOrderingPolicy">
<summary>Validate Root Queue Ordering Policy</summary>
</task>
-
+
</pre-upgrade>
<pre-downgrade />
<upgrade>
@@ -784,9 +785,9 @@
</task>
<task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name"/>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
-
+
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -864,9 +865,9 @@
<task xsi:type="configure" id="hdp_2_6_maint_llap_config"/>
<task xsi:type="configure" id="llap_update_tez_shuffle_ssl_enable"/>
</pre-upgrade>
-
+
<pre-downgrade />
-
+
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -912,9 +913,9 @@
<task xsi:type="configure" id="hdp_2_5_0_0_rename_spark_livy_configs" />
<task xsi:type="configure" id="hdp_2_5_0_0_add_spark_conf_dir_livy_configs" />
</pre-upgrade>
-
+
<pre-downgrade />
-
+
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -932,11 +933,19 @@
<service name="SPARK2">
<component name="SPARK2_JOBHISTORYSERVER">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_jobhistoryserver_log4j"/>
+ </pre-upgrade>
+ <pre-downgrade />
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
</component>
<component name="SPARK2_THRIFTSERVER">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_thriftserver_log4j"/>
+ </pre-upgrade>
+ <pre-downgrade />
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -944,6 +953,7 @@
<component name="SPARK2_CLIENT">
<pre-upgrade>
<task xsi:type="configure" id="hdp_2_5_0_0_spark2_yarn_queue"/>
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_client_log4j"/>
</pre-upgrade>
<pre-downgrade/>
<upgrade>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
index ff132aa..eb0b0ef 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/repos/repoinfo.xml
@@ -33,7 +33,7 @@
</os>
<os family="redhat-ppc7">
<repo>
- <baseurl>http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.6.0.3</baseurl>
+ <baseurl>http://public-repo-1.hortonworks.com/HDP/centos7-ppc/2.x/updates/2.6.0.3</baseurl>
<repoid>HDP-2.6</repoid>
<reponame>HDP</reponame>
<unique>true</unique>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml
new file mode 100644
index 0000000..d7479f4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+ <property>
+ <name>content</name>
+ <description>Spark2-log4j-Properties</description>
+ <value>
+# Set everything to be logged to the console
+log4j.rootCategory=INFO, console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Set the default spark-shell log level to WARN. When running the spark-shell, the
+# log level for this class is used to overwrite the root logger's log level, so that
+# the user can have different defaults for the shell and regular Spark apps.
+log4j.logger.org.apache.spark.repl.Main=WARN
+
+# Settings to quiet third party logs that are too verbose
+log4j.logger.org.spark_project.jetty=WARN
+log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
+log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
+log4j.logger.org.apache.parquet=ERROR
+log4j.logger.parquet=ERROR
+
+# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
+log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
+log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
+ </value>
+ <value-attributes>
+ <type>content</type>
+ <show-property-name>false</show-property-name>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
index e0417bf..bd6798c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
@@ -107,21 +107,11 @@
},
{
"name": "llap_task_hive",
+ "reference": "/HIVE/HIVE_SERVER/hive_server_hive",
"principal": {
- "value": "hive/_HOST@${realm}",
- "type" : "service",
"configuration": "hive-interactive-site/hive.llap.task.principal"
},
"keytab": {
- "file": "${keytab_dir}/hive.llap.task.keytab",
- "owner": {
- "name": "${yarn-env/yarn_user}",
- "access": "r"
- },
- "group": {
- "name": "${cluster-env/user_group}",
- "access": "r"
- },
"configuration": "hive-interactive-site/hive.llap.task.keytab.file"
},
"when" : {
@@ -130,21 +120,11 @@
},
{
"name": "llap_zk_hive",
+ "reference": "/HIVE/HIVE_SERVER/hive_server_hive",
"principal": {
- "value": "hive/_HOST@${realm}",
- "type" : "service",
"configuration": "hive-interactive-site/hive.llap.zk.sm.principal"
},
"keytab": {
- "file": "${keytab_dir}/hive.llap.zk.sm.keytab",
- "owner": {
- "name": "${yarn-env/yarn_user}",
- "access": "r"
- },
- "group": {
- "name": "${cluster-env/user_group}",
- "access": "r"
- },
"configuration": "hive-interactive-site/hive.llap.zk.sm.keytab.file"
},
"when" : {
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
index 63624d6..bcb13bc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/config-upgrade.xml
@@ -258,5 +258,28 @@
</component>
</service>
+ <service name="TEZ">
+ <component name="TEZ_CLIENT">
+ <changes>
+ <definition xsi:type="configure" id="hdp_2_6_tez_tarball_ld_library">
+ <type>tez-site</type>
+ <set key="tez.am.launch.env" value="LD_LIBRARY_PATH=./tezlib/lib/native:./tezlib/lib/native/Linux-{{architecture}}-64:/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64"/>
+ <set key="tez.task.launch.env" value="LD_LIBRARY_PATH=./tezlib/lib/native:./tezlib/lib/native/Linux-{{architecture}}-64:/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64"/>
+ </definition>
+ </changes>
+ </component>
+ </service>
+
+ <service name="MAPREDUCE2">
+ <component name="MAPREDUCE2_CLIENT">
+ <changes>
+ <definition xsi:type="configure" id="hdp_2_6_mapreduce_tarball_ld_library">
+ <type>mapred-site</type>
+ <set key="mapreduce.admin.user.env" value="LD_LIBRARY_PATH=./mr-framework/hadoop/lib/native:./mr-framework/hadoop/lib/native/Linux-{{architecture}}-64:{{hadoop_lib_home}}/native/Linux-{{architecture}}-64"/>
+ </definition>
+ </changes>
+ </component>
+ </service>
+
</services>
</upgrade-config-changes>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index c9e90a9..0355362 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -21,6 +21,7 @@
<target-stack>HDP-2.6</target-stack>
<type>NON_ROLLING</type>
<prerequisite-checks>
+ <check>org.apache.ambari.server.checks.LZOCheck</check>
<configuration>
<!-- Configuration properties for all pre-reqs including required pre-reqs -->
<check-properties name="org.apache.ambari.server.checks.HiveDynamicServiceDiscoveryCheck">
@@ -409,6 +410,15 @@
</task>
</execute-stage>
+ <!-- TEZ -->
+ <execute-stage service="TEZ" component="TEZ_CLIENT" title="Updating LD Library Classpath for Tarball" >
+ <task xsi:type="configure" id="hdp_2_6_tez_tarball_ld_library" supports-patch="true"/>
+ </execute-stage>
+
+ <!-- MapR -->
+ <execute-stage service="MAPREDUCE2" component="MAPREDUCE2_CLIENT" title="Updating LD Library Classpath for Tarball">
+ <task xsi:type="configure" id="hdp_2_6_mapreduce_tarball_ld_library" supports-patch="true"/>
+ </execute-stage>
</group>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
index 176143c..5aa2d20 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/upgrade-2.6.xml
@@ -34,6 +34,7 @@
<check>org.apache.ambari.server.checks.YarnRMHighAvailabilityCheck</check>
<check>org.apache.ambari.server.checks.YarnTimelineServerStatePreservingCheck</check>
<check>org.apache.ambari.server.checks.DruidHighAvailabilityCheck</check>
+ <check>org.apache.ambari.server.checks.LZOCheck</check>
<configuration>
<!-- Configuration properties for all pre-reqs including required pre-reqs -->
@@ -675,6 +676,8 @@
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
<summary>Verifying LZO codec path for mapreduce</summary>
</task>
+
+ <task xsi:type="configure" id="hdp_2_6_mapreduce_tarball_ld_library" supports-patch="true"/>
</pre-upgrade>
<pre-downgrade copy-upgrade="true" />
@@ -757,6 +760,8 @@
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixLzoCodecPath">
<summary>Verifying LZO codec path for Tez</summary>
</task>
+
+ <task xsi:type="configure" id="hdp_2_6_tez_tarball_ld_library" supports-patch="true"/>
</pre-upgrade>
<pre-downgrade copy-upgrade="true" />
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
index be4c718..5fb61c8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json
@@ -871,6 +871,12 @@
"current_dir": "{0}/current/atlas-client/conf"
}
],
+ "beacon": [
+ {
+ "conf_dir": "/etc/beacon/conf",
+ "current_dir": "{0}/current/beacon-client/conf"
+ }
+ ],
"druid": [
{
"conf_dir": "/etc/druid/conf",
@@ -937,6 +943,12 @@
"current_dir": "{0}/current/knox-server/conf"
}
],
+ "livy": [
+ {
+ "conf_dir": "/etc/livy/conf",
+ "current_dir": "{0}/current/livy-client/conf"
+ }
+ ],
"mahout": [
{
"conf_dir": "/etc/mahout/conf",
@@ -1051,6 +1063,100 @@
"current_dir": "{0}/current/zookeeper-client/conf"
}
]
- }
+ },
+ "conf-select-patching": {
+ "ACCUMULO": {
+ "packages": ["accumulo"]
+ },
+ "ATLAS": {
+ "packages": ["atlas"]
+ },
+ "BEACON": {
+ "packages": ["beacon"]
+ },
+ "DRUID": {
+ "packages": ["druid", "superset"]
+ },
+ "FALCON": {
+ "packages": ["falcon"]
+ },
+ "FLUME": {
+ "packages": ["flume"]
+ },
+ "HBASE": {
+ "packages": ["hbase"]
+ },
+ "HDFS": {
+ "packages": []
+ },
+ "HIVE": {
+ "packages": ["hive", "hive-hcatalog", "hive2", "tez_hive2"]
+ },
+ "KAFKA": {
+ "packages": ["kafka"]
+ },
+ "KNOX": {
+ "packages": ["knox"]
+ },
+ "MAHOUT": {
+ "packages": ["mahout"]
+ },
+ "MAPREDUCE2": {
+ "packages": []
+ },
+ "OOZIE": {
+ "packages": ["oozie"]
+ },
+ "PIG": {
+ "packages": ["pig"]
+ },
+ "R4ML": {
+ "packages": []
+ },
+ "RANGER": {
+ "packages": ["ranger-admin", "ranger-usersync", "ranger-tagsync"]
+ },
+ "RANGER_KMS": {
+ "packages": ["ranger-kms"]
+ },
+ "SLIDER": {
+ "packages": ["slider"]
+ },
+ "SPARK": {
+ "packages": ["spark", "livy"]
+ },
+ "SPARK2": {
+ "packages": ["spark2", "livy"]
+ },
+ "SQOOP": {
+ "packages": ["sqoop"]
+ },
+ "STORM": {
+ "packages": ["storm", "storm-slider-client"]
+ },
+ "SYSTEMML": {
+ "packages": []
+ },
+ "TEZ": {
+ "packages": ["tez"]
+ },
+ "TITAN": {
+ "packages": []
+ },
+ "YARN": {
+ "packages": []
+ },
+ "ZEPPELIN": {
+ "packages": ["zeppelin"]
+ },
+ "ZOOKEEPER": {
+ "packages": ["zookeeper"]
+ }
+ },
+ "upgrade-dependencies" : {
+ "HIVE": ["TEZ"],
+ "MAHOUT": ["MAPREDUCE2"],
+ "OOZIE": ["MAPREDUCE2"]
+ }
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml
index 95a5f84..775508e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml
@@ -50,19 +50,6 @@
<name>snappy-devel</name>
</package>
<package>
- <name>lzo</name>
- <skipUpgrade>true</skipUpgrade>
- <condition>should_install_lzo</condition>
- </package>
- <package>
- <name>hadooplzo_${stack_version}</name>
- <condition>should_install_lzo</condition>
- </package>
- <package>
- <name>hadooplzo_${stack_version}-native</name>
- <condition>should_install_lzo</condition>
- </package>
- <package>
<name>hadoop_${stack_version}-libhdfs</name>
</package>
</packages>
@@ -84,19 +71,6 @@
<name>snappy-devel</name>
</package>
<package>
- <name>liblzo2-2</name>
- <skipUpgrade>true</skipUpgrade>
- <condition>should_install_lzo</condition>
- </package>
- <package>
- <name>hadooplzo_${stack_version}</name>
- <condition>should_install_lzo</condition>
- </package>
- <package>
- <name>hadooplzo_${stack_version}-native</name>
- <condition>should_install_lzo</condition>
- </package>
- <package>
<name>hadoop_${stack_version}-libhdfs</name>
</package>
</packages>
@@ -130,10 +104,6 @@
<name>libsnappy-dev</name>
</package>
<package>
- <name>hadooplzo-${stack_version}</name>
- <condition>should_install_lzo</condition>
- </package>
- <package>
<name>libhdfs0-${stack_version}</name>
</package>
</packages>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-site.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-site.xml
index 6ce4d72..489754f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-site.xml
@@ -31,7 +31,7 @@
<!-- These configs were inherited from HDP 2.2 -->
<property>
<name>mapreduce.admin.user.env</name>
- <value>LD_LIBRARY_PATH=/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64</value>
+ <value>LD_LIBRARY_PATH=./mr-framework/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native:/usr/hdp/${hdp.version}/hadoop/lib/native/Linux-{{architecture}}-64</value>
<description>
Additional execution environment entries for map and reduce task processes.
This is not an additive property. You must preserve the original value if
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/stacks/PERF/1.0/services/KERBEROS/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/KERBEROS/configuration/kerberos-env.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/KERBEROS/configuration/kerberos-env.xml
index 66e81db..a66a7a6 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/KERBEROS/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/KERBEROS/configuration/kerberos-env.xml
@@ -348,8 +348,8 @@
<on-ambari-upgrade add="true"/>
</property>
<property>
- <name>group</name>
- <display-name>IPA Group</display-name>
+ <name>ipa_user_group</name>
+ <display-name>IPA User Group</display-name>
<description>
The group in IPA user principals should be member of
</description>
@@ -360,36 +360,4 @@
</value-attributes>
<on-ambari-upgrade add="true"/>
</property>
- <property>
- <name>set_password_expiry</name>
- <display-name>Set IPA principal password expiry</display-name>
- <description>
- Indicates whether Ambari should set the password expiry for the principals it creates. By default
- IPA does not allow this. It requires write permission of the admin principal to the krbPasswordExpiry
- attribute. If set IPA principal password expiry is not true it is assumed that a suitable password
- policy is in place for the IPA Group principals are added to.
- </description>
- <value>false</value>
- <value-attributes>
- <type>boolean</type>
- <overridable>false</overridable>
- <visible>false</visible>
- </value-attributes>
- <on-ambari-upgrade add="true"/>
- </property>
- <property>
- <name>password_chat_timeout</name>
- <display-name>Set IPA kinit password chat timeout</display-name>
- <description>
- Indicates the timeout in seconds that Ambari should wait for a response during a password chat. This is
- because it can take some time due to lookups before a response is there.
- </description>
- <value>5</value>
- <value-attributes>
- <visible>false</visible>
- <type>int</type>
- <overridable>false</overridable>
- </value-attributes>
- <on-ambari-upgrade add="true"/>
- </property>
</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/upgrade-pack.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/upgrade-pack.xsd b/ambari-server/src/main/resources/upgrade-pack.xsd
index 79c50a7..9e50a08 100644
--- a/ambari-server/src/main/resources/upgrade-pack.xsd
+++ b/ambari-server/src/main/resources/upgrade-pack.xsd
@@ -87,7 +87,7 @@
</xs:extension>
</xs:complexContent>
</xs:complexType>
-
+
<xs:complexType name="security">
<xs:complexContent>
<xs:extension base="abstract-condition-type">
@@ -345,6 +345,13 @@
</xs:complexContent>
</xs:complexType>
+ <xs:complexType name="create_and_configure">
+ <xs:complexContent>
+ <xs:extension base="configure">
+ </xs:extension>
+ </xs:complexContent>
+ </xs:complexType>
+
<xs:complexType name="configure_function">
<xs:complexContent>
<xs:extension base="abstract-task-type">
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/version_definition.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/version_definition.xsd b/ambari-server/src/main/resources/version_definition.xsd
index eaed31d..db10d67 100644
--- a/ambari-server/src/main/resources/version_definition.xsd
+++ b/ambari-server/src/main/resources/version_definition.xsd
@@ -63,7 +63,13 @@
<xs:enumeration value="suse12" />
</xs:restriction>
</xs:simpleType>
-
+
+ <xs:simpleType name="tag-type">
+ <xs:restriction base="xs:NMTOKEN">
+ <xs:enumeration value="GPL" />
+ </xs:restriction>
+ </xs:simpleType>
+
<xs:complexType name="manifest-service-type">
<xs:annotation>
<xs:documentation>
@@ -113,6 +119,20 @@
</xs:element>
</xs:sequence>
</xs:complexType>
+
+ <xs:complexType name="tags-type">
+ <xs:annotation>
+ <xs:documentation>
+ Tags are a way to mark a repository for special purposes. There
+ may be logic surrounding a tag in the code, or used by agents.
+
+ Tags are NOT meant to be random strings, they must be defined
+ </xs:documentation>
+ </xs:annotation>
+ <xs:sequence>
+ <xs:element name="tag" type="tag-type" minOccurs="0" maxOccurs="unbounded" />
+ </xs:sequence>
+ </xs:complexType>
<xs:complexType name="repository-info-type">
<xs:sequence>
@@ -130,6 +150,7 @@
<xs:element name="components" type="xs:string" minOccurs="0" maxOccurs="1" />
<xs:element name="mirrorslist" type="xs:string" minOccurs="0" maxOccurs="1" />
<xs:element name="unique" type="xs:boolean" minOccurs="0" maxOccurs="1" />
+ <xs:element name="tags" type="tags-type" minOccurs="0" maxOccurs="1" />
</xs:sequence>
</xs:complexType>
</xs:element>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
index eecb92e..50d4733 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java
@@ -1549,7 +1549,7 @@ public class TestHeartbeatHandler {
kerberosIdentityDataFileWriter.writeRecord("c6403.ambari.apache.org", "HDFS", "DATANODE",
"dn/_HOST@_REALM", "service",
"/etc/security/keytabs/dn.service.keytab",
- "hdfs", "r", "hadoop", "", "false", "false");
+ "hdfs", "r", "hadoop", "", "false");
kerberosIdentityDataFileWriter.close();
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
index 959db15..2afbf8a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/commands/StackAdvisorCommandTest.java
@@ -301,9 +301,9 @@ public class StackAdvisorCommandTest {
"items",
list(
map(
- "AmbariConfiguration",
+ "Configuration",
map(
- "data", list(ldapConfigData)
+ "properties", ldapConfigData
)
)
)
@@ -380,7 +380,7 @@ public class StackAdvisorCommandTest {
"items",
list(
map(
- "AmbariConfiguration",
+ "Configuration",
map(
"data",
list(ldapConfigData, ldapConfigData)
@@ -417,7 +417,7 @@ public class StackAdvisorCommandTest {
.build();
Map<String, Object> ldapConfig = map(
- "AmbariConfiguration",
+ "Configuration",
map(
"data",
list(
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/checks/LZOCheckTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/LZOCheckTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/LZOCheckTest.java
new file mode 100644
index 0000000..13f52a5
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/LZOCheckTest.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+ import java.util.HashMap;
+ import java.util.Map;
+
+ import org.apache.ambari.server.configuration.Configuration;
+ import org.apache.ambari.server.controller.PrereqCheckRequest;
+ import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+ import org.apache.ambari.server.state.Cluster;
+ import org.apache.ambari.server.state.Clusters;
+ import org.apache.ambari.server.state.Config;
+ import org.apache.ambari.server.state.DesiredConfig;
+ import org.apache.ambari.server.state.RepositoryType;
+ import org.apache.ambari.server.state.Service;
+ import org.apache.ambari.server.state.repository.ClusterVersionSummary;
+ import org.apache.ambari.server.state.repository.VersionDefinitionXml;
+ import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+ import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+ import org.junit.Assert;
+ import org.junit.Before;
+ import org.junit.Test;
+ import org.junit.runner.RunWith;
+ import org.mockito.Mock;
+ import org.mockito.Mockito;
+ import org.mockito.runners.MockitoJUnitRunner;
+
+ import com.google.inject.Provider;
+
+
+/* Test for LZOCheck */
+@RunWith(MockitoJUnitRunner.class)
+public class LZOCheckTest {
+
+ private final Clusters clusters = Mockito.mock(Clusters.class);
+ private final LZOCheck lZOCheck = new LZOCheck();
+
+ @Mock
+ private ClusterVersionSummary m_clusterVersionSummary;
+
+ @Mock
+ private VersionDefinitionXml m_vdfXml;
+
+ @Mock
+ private RepositoryVersionEntity m_repositoryVersion;
+
+ @Mock
+ private Configuration configuration;
+
+ final Map<String, Service> m_services = new HashMap<>();
+
+ @Before
+ public void setup() throws Exception {
+ lZOCheck.clustersProvider = new Provider<Clusters>() {
+ @Override
+ public Clusters get() {
+ return clusters;
+ }
+ };
+ lZOCheck.config = configuration;
+
+ m_services.clear();
+
+ Mockito.when(m_repositoryVersion.getType()).thenReturn(RepositoryType.STANDARD);
+ Mockito.when(m_repositoryVersion.getRepositoryXml()).thenReturn(m_vdfXml);
+ Mockito.when(m_vdfXml.getClusterSummary(Mockito.any(Cluster.class))).thenReturn(m_clusterVersionSummary);
+ Mockito.when(m_clusterVersionSummary.getAvailableServiceNames()).thenReturn(m_services.keySet());
+ }
+
+ @Test
+ public void testIsApplicable() throws Exception {
+ final Cluster cluster = Mockito.mock(Cluster.class);
+
+ Mockito.when(cluster.getServices()).thenReturn(m_services);
+ Mockito.when(cluster.getClusterId()).thenReturn(1L);
+ Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
+
+ PrereqCheckRequest request = new PrereqCheckRequest("cluster");
+ request.setTargetRepositoryVersion(m_repositoryVersion);
+
+ Assert.assertTrue(lZOCheck.isApplicable(request));
+ }
+
+ @Test
+ public void testPerform() throws Exception {
+ final Cluster cluster = Mockito.mock(Cluster.class);
+ final Map<String, Service> services = new HashMap<>();
+
+ Mockito.when(cluster.getServices()).thenReturn(services);
+ Mockito.when(cluster.getClusterId()).thenReturn(1L);
+ Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
+
+ final DesiredConfig desiredConfig = Mockito.mock(DesiredConfig.class);
+ Mockito.when(desiredConfig.getTag()).thenReturn("tag");
+ Map<String, DesiredConfig> configMap = new HashMap<>();
+ configMap.put("core-site", desiredConfig);
+
+ Mockito.when(cluster.getDesiredConfigs()).thenReturn(configMap);
+ final Config config = Mockito.mock(Config.class);
+ Mockito.when(cluster.getConfig(Mockito.anyString(), Mockito.anyString())).thenReturn(config);
+ final Map<String, String> properties = new HashMap<>();
+ Mockito.when(config.getProperties()).thenReturn(properties);
+ Mockito.when(configuration.getGplLicenseAccepted()).thenReturn(false);
+
+ PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+ lZOCheck.perform(check, new PrereqCheckRequest("cluster"));
+ Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+
+
+ properties.put(LZOCheck.IO_COMPRESSION_CODECS,"test," + LZOCheck.LZO_ENABLE_VALUE);
+ check = new PrerequisiteCheck(null, null);
+ lZOCheck.perform(check, new PrereqCheckRequest("cluster"));
+ Assert.assertEquals(PrereqCheckStatus.WARNING, check.getStatus());
+
+ properties.put(LZOCheck.IO_COMPRESSION_CODECS,"test");
+ check = new PrerequisiteCheck(null, null);
+ lZOCheck.perform(check, new PrereqCheckRequest("cluster"));
+ Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+
+ properties.put(LZOCheck.LZO_ENABLE_KEY, LZOCheck.LZO_ENABLE_VALUE);
+ check = new PrerequisiteCheck(null, null);
+ lZOCheck.perform(check, new PrereqCheckRequest("cluster"));
+ Assert.assertEquals(PrereqCheckStatus.WARNING, check.getStatus());
+
+ properties.put(LZOCheck.LZO_ENABLE_KEY, LZOCheck.LZO_ENABLE_VALUE);
+ properties.put(LZOCheck.IO_COMPRESSION_CODECS,"test," + LZOCheck.LZO_ENABLE_VALUE);
+ check = new PrerequisiteCheck(null, null);
+ lZOCheck.perform(check, new PrereqCheckRequest("cluster"));
+ Assert.assertEquals(PrereqCheckStatus.WARNING, check.getStatus());
+
+ Mockito.when(configuration.getGplLicenseAccepted()).thenReturn(true);
+ check = new PrerequisiteCheck(null, null);
+ lZOCheck.perform(check, new PrereqCheckRequest("cluster"));
+ Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
index f1fc7f5..f2ee106 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
@@ -574,14 +574,15 @@ public class AmbariCustomCommandExecutionHelperTest {
Host host = clusters.getHost("c1-c6401");
AmbariCustomCommandExecutionHelper helper = injector.getInstance(AmbariCustomCommandExecutionHelper.class);
+ RepositoryVersionHelper repoHelper = injector.getInstance(RepositoryVersionHelper.class);
StackDAO stackDAO = injector.getInstance(StackDAO.class);
RepositoryVersionDAO repoVersionDAO = injector.getInstance(RepositoryVersionDAO.class);
ServiceComponentDesiredStateDAO componentDAO = injector.getInstance(ServiceComponentDesiredStateDAO.class);
RepositoryVersionHelper repoVersionHelper = injector.getInstance(RepositoryVersionHelper.class);
- CommandRepository commandRepo = helper.getCommandRepository(cluster, componentRM, host);
+ CommandRepository commandRepo = repoHelper.getCommandRepository(cluster, componentRM, host);
+ Assert.assertEquals(2, commandRepo.getRepositories().size());
- Assert.assertEquals(0, commandRepo.getRepositories().size());
RepositoryInfo ri = new RepositoryInfo();
ri.setBaseUrl("http://foo");
@@ -607,18 +608,18 @@ public class AmbariCustomCommandExecutionHelperTest {
componentEntity.setDesiredRepositoryVersion(repositoryVersion);
componentEntity.addVersion(componentVersionEntity);
- componentEntity = componentDAO.merge(componentEntity);
+ componentDAO.merge(componentEntity);
// !!! make sure the override is set
- commandRepo = helper.getCommandRepository(cluster, componentRM, host);
+ commandRepo = repoHelper.getCommandRepository(cluster, componentRM, host);
Assert.assertEquals(1, commandRepo.getRepositories().size());
CommandRepository.Repository repo = commandRepo.getRepositories().iterator().next();
Assert.assertEquals("http://foo", repo.getBaseUrl());
// verify that ZK has no repositories, since we haven't defined a repo version for ZKC
- commandRepo = helper.getCommandRepository(cluster, componentZKC, host);
- Assert.assertEquals(0, commandRepo.getRepositories().size());
+ commandRepo = repoHelper.getCommandRepository(cluster, componentZKC, host);
+ Assert.assertEquals(2, commandRepo.getRepositories().size());
}
private void createClusterFixture(String clusterName, StackId stackId,
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index e56bc9b..426aba0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -2103,6 +2103,7 @@ public class AmbariManagementControllerImplTest {
expect(configuration.getServerDBName()).andReturn(SERVER_DB_NAME);
expect(configuration.getJavaVersion()).andReturn(8);
expect(configuration.areHostsSysPrepped()).andReturn("true");
+ expect(configuration.getGplLicenseAccepted()).andReturn(false);
expect(configuration.getDatabaseConnectorNames()).andReturn(new HashMap<>()).anyTimes();
expect(configuration.getPreviousDatabaseConnectorNames()).andReturn(new HashMap<>()).anyTimes();
expect(repositoryVersionEntity.getVersion()).andReturn("1234").anyTimes();
@@ -2148,9 +2149,9 @@ public class AmbariManagementControllerImplTest {
f.setAccessible(true);
f.set(helper, gson);
- Map<String, String> defaultHostParams = helper.createDefaultHostParams(cluster, repositoryVersionEntity);
+ Map<String, String> defaultHostParams = helper.createDefaultHostParams(cluster, repositoryVersionEntity.getStackId());
- assertEquals(15, defaultHostParams.size());
+ assertEquals(16, defaultHostParams.size());
assertEquals(MYSQL_JAR, defaultHostParams.get(DB_DRIVER_FILENAME));
assertEquals(SOME_STACK_NAME, defaultHostParams.get(STACK_NAME));
assertEquals(SOME_STACK_VERSION, defaultHostParams.get(STACK_VERSION));
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 92eced2..a0d5815 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.controller;
+
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.createStrictMock;
@@ -155,6 +156,8 @@ import org.apache.ambari.server.utils.StageUtils;
import org.apache.commons.collections.CollectionUtils;
import org.easymock.Capture;
import org.easymock.EasyMock;
+
+
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
@@ -335,11 +338,11 @@ public class AmbariManagementControllerTest {
host.setHostAttributes(hostAttributes);
}
- private void addHost(String hostname) throws AmbariException {
+ private void addHost(String hostname) throws Exception {
addHostToCluster(hostname, null);
}
- private void addHostToCluster(String hostname, String clusterName) throws AmbariException {
+ private void addHostToCluster(String hostname, String clusterName) throws Exception {
if (!clusters.hostExists(hostname)) {
clusters.addHost(hostname);
@@ -352,27 +355,30 @@ public class AmbariManagementControllerTest {
}
}
- private void deleteHost(String hostname) throws AmbariException {
+ private void deleteHost(String hostname) throws Exception {
clusters.deleteHost(hostname);
}
+
+
/**
* Creates a Cluster object, along with its corresponding ClusterVersion based on the stack.
* @param clusterName Cluster name
- * @throws AmbariException
+ * @throws Exception
*/
- private void createCluster(String clusterName) throws AmbariException, AuthorizationException {
+ private void createCluster(String clusterName) throws Exception{
+ RepositoryVersionDAO repoDAO = injector.getInstance(RepositoryVersionDAO.class);
ClusterRequest r = new ClusterRequest(null, clusterName, State.INSTALLED.name(), SecurityType.NONE, "HDP-0.1", null);
controller.createCluster(r);
}
- private void createService(String clusterName, String serviceName, State desiredState) throws AmbariException, AuthorizationException {
+ private void createService(String clusterName, String serviceName, State desiredState) throws Exception, AuthorizationException {
createService(clusterName, serviceName, repositoryVersion02, desiredState);
}
private void createService(String clusterName, String serviceName,
RepositoryVersionEntity repositoryVersion, State desiredState)
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
String dStateStr = null;
if (desiredState != null) {
dStateStr = desiredState.toString();
@@ -390,7 +396,7 @@ public class AmbariManagementControllerTest {
private void createServiceComponent(String clusterName,
String serviceName, String componentName, State desiredState)
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
String dStateStr = null;
if (desiredState != null) {
dStateStr = desiredState.toString();
@@ -405,7 +411,7 @@ public class AmbariManagementControllerTest {
private void createServiceComponentHost(String clusterName,
String serviceName, String componentName, String hostname,
- State desiredState) throws AmbariException, AuthorizationException {
+ State desiredState) throws Exception, AuthorizationException {
String dStateStr = null;
if (desiredState != null) {
dStateStr = desiredState.toString();
@@ -420,7 +426,7 @@ public class AmbariManagementControllerTest {
private void deleteServiceComponentHost(String clusterName,
String serviceName, String componentName, String hostname,
- State desiredState) throws AmbariException, AuthorizationException {
+ State desiredState) throws Exception, AuthorizationException {
String dStateStr = null;
if (desiredState != null) {
dStateStr = desiredState.toString();
@@ -435,7 +441,7 @@ public class AmbariManagementControllerTest {
private Long createConfigGroup(Cluster cluster, String serviceName, String name, String tag,
List<String> hosts, List<Config> configs)
- throws AmbariException {
+ throws Exception {
Map<Long, Host> hostMap = new HashMap<>();
Map<String, Config> configMap = new HashMap<>();
@@ -461,7 +467,7 @@ public class AmbariManagementControllerTest {
private long stopService(String clusterName, String serviceName,
boolean runSmokeTests, boolean reconfigureClients) throws
- AmbariException, AuthorizationException {
+ AmbariException, AuthorizationException {
ServiceRequest r = new ServiceRequest(clusterName, "", serviceName, null, State.INSTALLED.toString(), null);
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -518,7 +524,7 @@ public class AmbariManagementControllerTest {
private long startService(String clusterName, String serviceName,
boolean runSmokeTests, boolean reconfigureClients) throws
- AmbariException, AuthorizationException {
+ Exception, AuthorizationException {
return startService(clusterName, serviceName, runSmokeTests, reconfigureClients, null);
}
@@ -567,7 +573,7 @@ public class AmbariManagementControllerTest {
private long installService(String clusterName, String serviceName,
boolean runSmokeTests, boolean reconfigureClients)
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
return installService(clusterName, serviceName, runSmokeTests, reconfigureClients, null, null);
}
@@ -580,7 +586,7 @@ public class AmbariManagementControllerTest {
boolean runSmokeTests, boolean reconfigureClients,
MaintenanceStateHelper maintenanceStateHelper,
Map<String, String> mapRequestPropsInput)
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
ServiceRequest r = new ServiceRequest(clusterName, "", serviceName, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
@@ -620,7 +626,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateClusterSimple() throws AmbariException, AuthorizationException {
+ public void testCreateClusterSimple() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
Set<ClusterResponse> r =
@@ -633,13 +639,13 @@ public class AmbariManagementControllerTest {
try {
createCluster(cluster1);
fail("Duplicate cluster creation should fail");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// Expected
}
}
@Test
- public void testCreateClusterWithHostMapping() throws AmbariException, AuthorizationException {
+ public void testCreateClusterWithHostMapping() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
String host1 = getUniqueName();
@@ -699,7 +705,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateServicesSimple() throws AmbariException, AuthorizationException {
+ public void testCreateServicesSimple() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
String serviceName = "HDFS";
@@ -724,7 +730,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateServicesWithInvalidRequest() throws AmbariException, AuthorizationException {
+ public void testCreateServicesWithInvalidRequest() throws Exception, AuthorizationException {
// invalid request
// dups in requests
// multi cluster updates
@@ -757,7 +763,7 @@ public class AmbariManagementControllerTest {
set1.add(rInvalid);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
fail("Expected failure for invalid cluster");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// Expected
Assert.assertTrue(checkExceptionType(e, ClusterNotFoundException.class));
}
@@ -829,7 +835,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateServiceWithInvalidInfo() throws AmbariException, AuthorizationException {
+ public void testCreateServiceWithInvalidInfo() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
String serviceName = "HDFS";
@@ -870,7 +876,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateServicesMultiple() throws AmbariException, AuthorizationException {
+ public void testCreateServicesMultiple() throws Exception, AuthorizationException {
Set<ServiceRequest> set1 = new HashSet<>();
String cluster1 = getUniqueName();
@@ -890,7 +896,7 @@ public class AmbariManagementControllerTest {
set1.add(valid2);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
fail("Expected failure for invalid services");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// Expected
Assert.assertTrue(checkExceptionType(e, DuplicateResourceException.class));
}
@@ -902,7 +908,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateServiceComponentSimple() throws AmbariException, AuthorizationException {
+ public void testCreateServiceComponentSimple() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
String serviceName = "HDFS";
@@ -943,7 +949,7 @@ public class AmbariManagementControllerTest {
@Test
public void testCreateServiceComponentWithInvalidRequest()
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
// multiple clusters
// dup objects
// existing components
@@ -1321,8 +1327,6 @@ public class AmbariManagementControllerTest {
RoleCommand.START);
assertEquals(cluster1, ec.getClusterName());
assertNotNull(ec.getCommandParams());
- assertTrue(ec.getCommandParams().containsKey("custom_folder"));
- assertEquals("dashboards", ec.getCommandParams().get("custom_folder"));
assertNotNull(ec.getHostLevelParams());
assertTrue(ec.getHostLevelParams().containsKey(ExecutionCommand.KeyNames.USER_LIST));
assertEquals("[\"myhdfsuser\"]", ec.getHostLevelParams().get(ExecutionCommand.KeyNames.USER_LIST));
@@ -1333,7 +1337,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateServiceComponentMultiple() throws AmbariException, AuthorizationException {
+ public void testCreateServiceComponentMultiple() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
String cluster2 = getUniqueName();
@@ -1381,7 +1385,7 @@ public class AmbariManagementControllerTest {
}
private void createServiceComponentHostSimple(String clusterName, String host1,
- String host2) throws AmbariException, AuthorizationException {
+ String host2) throws Exception, AuthorizationException {
createCluster(clusterName);
clusters.getCluster(clusterName)
@@ -1480,7 +1484,7 @@ public class AmbariManagementControllerTest {
@Test
public void testCreateServiceComponentHostMultiple()
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
String serviceName = "HDFS";
@@ -1538,7 +1542,7 @@ public class AmbariManagementControllerTest {
@Test
public void testCreateServiceComponentHostWithInvalidRequest()
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
// multiple clusters
// dup objects
// existing components
@@ -1665,7 +1669,7 @@ public class AmbariManagementControllerTest {
set1.add(rInvalid);
controller.createHostComponents(set1);
fail("Expected failure for invalid service");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// Expected
}
@@ -1684,7 +1688,7 @@ public class AmbariManagementControllerTest {
set1.add(rInvalid);
controller.createHostComponents(set1);
fail("Expected failure for invalid host");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// Expected
}
@@ -1711,7 +1715,7 @@ public class AmbariManagementControllerTest {
set1.add(rInvalid);
controller.createHostComponents(set1);
fail("Expected failure for invalid host cluster mapping");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// Expected
}
@@ -1800,7 +1804,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateHostSimple() throws AmbariException, AuthorizationException {
+ public void testCreateHostSimple() throws Exception {
String cluster1 = getUniqueName();
String host1 = getUniqueName();
String host2 = getUniqueName();
@@ -1829,7 +1833,7 @@ public class AmbariManagementControllerTest {
try {
HostResourceProviderTest.createHosts(controller, requests);
fail("Create host should fail for invalid clusters");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// Expected
}
@@ -1853,7 +1857,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateHostMultiple() throws AmbariException, AuthorizationException {
+ public void testCreateHostMultiple() throws Exception {
String host1 = getUniqueName();
String host2 = getUniqueName();
String host3 = getUniqueName();
@@ -1889,7 +1893,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testCreateHostWithInvalidRequests() throws AmbariException {
+ public void testCreateHostWithInvalidRequests() throws Exception {
// unknown host
// invalid clusters
// duplicate host
@@ -2200,7 +2204,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetClusters() throws AmbariException, AuthorizationException {
+ public void testGetClusters() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
clusters.addCluster(cluster1, new StackId("HDP-0.1"));
@@ -2231,7 +2235,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetClustersWithFilters() throws AmbariException, AuthorizationException {
+ public void testGetClustersWithFilters() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
String cluster2 = getUniqueName();
String cluster3 = getUniqueName();
@@ -2262,7 +2266,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetServices() throws AmbariException {
+ public void testGetServices() throws Exception {
String cluster1 = getUniqueName();
StackId stackId = new StackId("HDP-0.1");
@@ -2291,7 +2295,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetServicesWithFilters() throws AmbariException {
+ public void testGetServicesWithFilters() throws Exception {
String cluster1 = getUniqueName();
String cluster2 = getUniqueName();
@@ -2371,7 +2375,7 @@ public class AmbariManagementControllerTest {
@Test
- public void testGetServiceComponents() throws AmbariException {
+ public void testGetServiceComponents() throws Exception {
String cluster1 = getUniqueName();
StackId stackId = new StackId("HDP-0.2");
@@ -2408,7 +2412,7 @@ public class AmbariManagementControllerTest {
@Test
- public void testGetServiceComponentsWithFilters() throws AmbariException {
+ public void testGetServiceComponentsWithFilters() throws Exception {
String cluster1 = getUniqueName();
String cluster2 = getUniqueName();
@@ -2529,7 +2533,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetServiceComponentHosts() throws AmbariException, AuthorizationException {
+ public void testGetServiceComponentHosts() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
String host1 = getUniqueName();
@@ -2575,7 +2579,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetServiceComponentHostsWithStaleConfigFilter() throws AmbariException, AuthorizationException {
+ public void testGetServiceComponentHostsWithStaleConfigFilter() throws Exception, AuthorizationException {
final String host1 = getUniqueName();
final String host2 = getUniqueName();
@@ -2763,7 +2767,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testHbaseDecommission() throws AmbariException, AuthorizationException {
+ public void testHbaseDecommission() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
clusters.getCluster(cluster1).setDesiredStackVersion(new StackId("HDP-2.0.7"));
@@ -2905,7 +2909,7 @@ public class AmbariManagementControllerTest {
}
private Cluster setupClusterWithHosts(String clusterName, String stackId, List<String> hosts,
- String osType) throws AmbariException, AuthorizationException {
+ String osType) throws Exception, AuthorizationException {
ClusterRequest r = new ClusterRequest(null, clusterName, stackId, null);
controller.createCluster(r);
Cluster c1 = clusters.getCluster(clusterName);
@@ -2916,7 +2920,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetServiceComponentHostsWithFilters() throws AmbariException, AuthorizationException {
+ public void testGetServiceComponentHostsWithFilters() throws Exception, AuthorizationException {
final String cluster1 = getUniqueName();
final String host1 = getUniqueName();
final String host2 = getUniqueName();
@@ -3065,7 +3069,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetHosts() throws AmbariException, AuthorizationException {
+ public void testGetHosts() throws Exception, AuthorizationException {
final String cluster1 = getUniqueName();
final String cluster2 = getUniqueName();
final String host1 = getUniqueName();
@@ -3139,7 +3143,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testServiceUpdateBasic() throws AmbariException, AuthorizationException {
+ public void testServiceUpdateBasic() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
String serviceName = "HDFS";
@@ -3182,7 +3186,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testServiceUpdateInvalidRequest() throws AmbariException, AuthorizationException {
+ public void testServiceUpdateInvalidRequest() throws Exception, AuthorizationException {
// multiple clusters
// dup services
// multiple diff end states
@@ -3264,7 +3268,7 @@ public class AmbariManagementControllerTest {
}
@Ignore("Something fishy with the stacks here that's causing the RCO to be loaded incorrectly")
- public void testServiceUpdateRecursive() throws AmbariException, AuthorizationException {
+ public void testServiceUpdateRecursive() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
@@ -3519,7 +3523,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testServiceComponentUpdateRecursive() throws AmbariException, AuthorizationException {
+ public void testServiceComponentUpdateRecursive() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
@@ -4057,7 +4061,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testComponentCategorySentWithRestart() throws AmbariException, AuthorizationException {
+ public void testComponentCategorySentWithRestart() throws Exception, AuthorizationException {
final String cluster1 = getUniqueName();
final String host1 = getUniqueName();
@@ -4423,7 +4427,7 @@ public class AmbariManagementControllerTest {
try {
RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
Assert.fail("createAction should fail");
- } catch (AmbariException ex) {
+ } catch (Exception ex) {
LOG.info(ex.getMessage());
if (!ex.getMessage().contains(message)) {
fail(String.format("Expected '%s' to contain '%s'", ex.getMessage(), message));
@@ -5121,7 +5125,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testReConfigureServiceClient() throws AmbariException, AuthorizationException {
+ public void testReConfigureServiceClient() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
Cluster cluster = clusters.getCluster(cluster1);
@@ -5403,7 +5407,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testClientServiceSmokeTests() throws AmbariException, AuthorizationException {
+ public void testClientServiceSmokeTests() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
String serviceName = "PIG";
@@ -5497,7 +5501,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testSkipTaskOnUnhealthyHosts() throws AmbariException, AuthorizationException {
+ public void testSkipTaskOnUnhealthyHosts() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
clusters.getCluster(cluster1)
@@ -5632,7 +5636,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testServiceCheckWhenHostIsUnhealthy() throws AmbariException, AuthorizationException {
+ public void testServiceCheckWhenHostIsUnhealthy() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
clusters.getCluster(cluster1).setDesiredStackVersion(new StackId("HDP-0.1"));
@@ -5724,13 +5728,13 @@ public class AmbariManagementControllerTest {
try {
response = controller.createAction(actionRequest, requestProperties);
assertTrue("Exception should have been raised.", false);
- } catch (AmbariException e) {
+ } catch (Exception e) {
assertTrue(e.getMessage().contains("there were no healthy eligible hosts"));
}
}
@Test
- public void testReInstallForInstallFailedClient() throws AmbariException, AuthorizationException {
+ public void testReInstallForInstallFailedClient() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
clusters.getCluster(cluster1)
@@ -5859,7 +5863,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testReInstallClientComponentFromServiceChange() throws AmbariException, AuthorizationException {
+ public void testReInstallClientComponentFromServiceChange() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
clusters.getCluster(cluster1)
@@ -6096,7 +6100,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testResourceFiltersWithCustomActions() throws AmbariException, AuthorizationException {
+ public void testResourceFiltersWithCustomActions() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
final String host1 = getUniqueName();
final String host2 = getUniqueName();
@@ -6168,7 +6172,7 @@ public class AmbariManagementControllerTest {
RequestStatusResponse response = null;
try {
response = controller.createAction(actionRequest, requestProperties);
- } catch (AmbariException ae) {
+ } catch (Exception ae) {
LOG.info("Expected exception.", ae);
Assert.assertTrue(ae.getMessage().contains("Custom action definition only " +
"allows one resource filter to be specified"));
@@ -6199,7 +6203,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testResourceFiltersWithCustomCommands() throws AmbariException, AuthorizationException {
+ public void testResourceFiltersWithCustomCommands() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
final String host1 = getUniqueName();
final String host2 = getUniqueName();
@@ -6353,7 +6357,7 @@ public class AmbariManagementControllerTest {
@Test
- public void testConfigsAttachedToServiceChecks() throws AmbariException, AuthorizationException {
+ public void testConfigsAttachedToServiceChecks() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
Cluster cluster = clusters.getCluster(cluster1);
@@ -6435,7 +6439,7 @@ public class AmbariManagementControllerTest {
@Test
@Ignore("Unsuported feature !")
- public void testConfigsAttachedToServiceNotCluster() throws AmbariException, AuthorizationException {
+ public void testConfigsAttachedToServiceNotCluster() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
clusters.getCluster(cluster1).setDesiredStackVersion(new StackId("HDP-0.1"));
@@ -6515,7 +6519,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testHostLevelParamsSentWithCommands() throws AmbariException, AuthorizationException {
+ public void testHostLevelParamsSentWithCommands() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
String serviceName = "PIG";
@@ -6572,7 +6576,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testConfigGroupOverridesWithHostActions() throws AmbariException, AuthorizationException {
+ public void testConfigGroupOverridesWithHostActions() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
Cluster cluster = clusters.getCluster(cluster1);
@@ -6729,7 +6733,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testConfigGroupOverridesWithDecommissionDatanode() throws AmbariException, AuthorizationException {
+ public void testConfigGroupOverridesWithDecommissionDatanode() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
Cluster cluster = clusters.getCluster(cluster1);
@@ -6834,7 +6838,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testConfigGroupOverridesWithServiceCheckActions() throws AmbariException, AuthorizationException {
+ public void testConfigGroupOverridesWithServiceCheckActions() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
createCluster(cluster1);
Cluster cluster = clusters.getCluster(cluster1);
@@ -7323,7 +7327,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testUpdateClusterUpgradabilityCheck() throws AmbariException, AuthorizationException {
+ public void testUpdateClusterUpgradabilityCheck() throws Exception, AuthorizationException {
String cluster1 = getUniqueName();
StackId currentStackId = new StackId("HDP-0.2");
@@ -7336,7 +7340,7 @@ public class AmbariManagementControllerTest {
ClusterRequest r = new ClusterRequest(c.getClusterId(), cluster1, "HDP-0.3", null);
try {
controller.updateClusters(Collections.singleton(r), mapRequestProps);
- } catch (AmbariException e) {
+ } catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Illegal request to upgrade to"));
}
@@ -7347,7 +7351,7 @@ public class AmbariManagementControllerTest {
r = new ClusterRequest(c.getClusterId(), cluster1, "HDP-0.2", null);
try {
controller.updateClusters(Collections.singleton(r), mapRequestProps);
- } catch (AmbariException e) {
+ } catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Upgrade is not allowed from"));
}
}
@@ -7615,7 +7619,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testGetTasksByRequestId() throws AmbariException {
+ public void testGetTasksByRequestId() throws Exception {
ActionManager am = injector.getInstance(ActionManager.class);
final long requestId1 = am.getNextRequestId();
@@ -8251,13 +8255,13 @@ public class AmbariManagementControllerTest {
RootServiceRequest request = new RootServiceRequest(null);
Set<RootServiceResponse> responses = controller.getRootServices(Collections.singleton(request));
- Assert.assertEquals(RootServiceResponseFactory.Services.values().length, responses.size());
+ Assert.assertEquals(RootService.values().length, responses.size());
- RootServiceRequest requestWithParams = new RootServiceRequest(RootServiceResponseFactory.Services.AMBARI.toString());
+ RootServiceRequest requestWithParams = new RootServiceRequest(RootService.AMBARI.toString());
Set<RootServiceResponse> responsesWithParams = controller.getRootServices(Collections.singleton(requestWithParams));
Assert.assertEquals(1, responsesWithParams.size());
for (RootServiceResponse responseWithParams: responsesWithParams) {
- Assert.assertEquals(responseWithParams.getServiceName(), RootServiceResponseFactory.Services.AMBARI.toString());
+ Assert.assertEquals(responseWithParams.getServiceName(), RootService.AMBARI.toString());
}
RootServiceRequest invalidRequest = new RootServiceRequest(NON_EXT_VALUE);
@@ -8271,18 +8275,18 @@ public class AmbariManagementControllerTest {
@Test
public void testGetRootServiceComponents() throws Exception {
- RootServiceComponentRequest request = new RootServiceComponentRequest(RootServiceResponseFactory.Services.AMBARI.toString(), null);
+ RootServiceComponentRequest request = new RootServiceComponentRequest(RootService.AMBARI.toString(), null);
Set<RootServiceComponentResponse> responses = controller.getRootServiceComponents(Collections.singleton(request));
- Assert.assertEquals(RootServiceResponseFactory.Services.AMBARI.getComponents().length, responses.size());
+ Assert.assertEquals(RootService.AMBARI.getComponents().length, responses.size());
RootServiceComponentRequest requestWithParams = new RootServiceComponentRequest(
- RootServiceResponseFactory.Services.AMBARI.toString(),
- RootServiceResponseFactory.Services.AMBARI.getComponents()[0].toString());
+ RootService.AMBARI.toString(),
+ RootService.AMBARI.getComponents()[0].toString());
Set<RootServiceComponentResponse> responsesWithParams = controller.getRootServiceComponents(Collections.singleton(requestWithParams));
Assert.assertEquals(1, responsesWithParams.size());
for (RootServiceComponentResponse responseWithParams: responsesWithParams) {
- Assert.assertEquals(responseWithParams.getComponentName(), RootServiceResponseFactory.Services.AMBARI.getComponents()[0].toString());
+ Assert.assertEquals(responseWithParams.getComponentName(), RootService.AMBARI.getComponents()[0].toString());
}
RootServiceComponentRequest invalidRequest = new RootServiceComponentRequest(NON_EXT_VALUE, NON_EXT_VALUE);
@@ -8360,7 +8364,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testExecutionCommandConfiguration() throws AmbariException {
+ public void testExecutionCommandConfiguration() throws Exception {
Map<String, Map<String, String>> config = new HashMap<>();
config.put("type1", new HashMap<>());
config.put("type3", new HashMap<>());
@@ -8403,7 +8407,7 @@ public class AmbariManagementControllerTest {
String tag = "version1";
String type = "core-site";
- AmbariException exception = null;
+ Exception exception = null;
try {
AmbariManagementController amc = injector.getInstance(AmbariManagementController.class);
Clusters clusters = injector.getInstance(Clusters.class);
@@ -8435,7 +8439,7 @@ public class AmbariManagementControllerTest {
amc.createConfiguration(configurationRequest);
amc.createConfiguration(configurationRequest);
- } catch (AmbariException e) {
+ } catch (Exception e) {
exception = e;
}
@@ -9093,7 +9097,7 @@ public class AmbariManagementControllerTest {
private void testRunSmokeTestFlag(Map<String, String> mapRequestProps,
AmbariManagementController amc,
Set<ServiceRequest> serviceRequests)
- throws AmbariException, AuthorizationException {
+ throws Exception, AuthorizationException {
RequestStatusResponse response;//Starting HDFS service. No run_smoke_test flag is set, smoke
String cluster1 = getUniqueName();
@@ -9917,7 +9921,7 @@ public class AmbariManagementControllerTest {
try {
controller.updateClusters(Collections.singleton(cr), new HashMap<>());
Assert.fail("Expect failure when creating a config that exists");
- } catch (AmbariException e) {
+ } catch (Exception e) {
// expected
}
}
@@ -10021,7 +10025,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testConfigAttributesStaleConfigFilter() throws AmbariException, AuthorizationException {
+ public void testConfigAttributesStaleConfigFilter() throws Exception, AuthorizationException {
final String host1 = getUniqueName();
final String host2 = getUniqueName();
@@ -10122,7 +10126,7 @@ public class AmbariManagementControllerTest {
}
@Test
- public void testSecretReferences() throws AmbariException, AuthorizationException {
+ public void testSecretReferences() throws Exception, AuthorizationException {
final String host1 = getUniqueName();
final String host2 = getUniqueName();
@@ -10211,7 +10215,7 @@ public class AmbariManagementControllerTest {
try {
controller.updateClusters(Collections.singleton(crReq), null);
fail("Request need to be failed with wrong secret reference");
- } catch (AmbariException e){
+ } catch (Exception e){
}
// reference to config which does not contain requested property
@@ -10240,7 +10244,7 @@ public class AmbariManagementControllerTest {
try {
controller.updateClusters(Collections.singleton(crReq), null);
fail("Request need to be failed with wrong secret reference");
- } catch (AmbariException e) {
+ } catch (Exception e) {
assertEquals("Error when parsing secret reference. Cluster: " + cluster1 + " ConfigType: hdfs-site ConfigVersion: 4 does not contain property 'test.password'",
e.getMessage());
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 04de1fb..0f99b08 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -189,7 +189,7 @@ public class KerberosHelperTest extends EasyMockSupport {
}
@Override
- public boolean principalExists(String principal) throws KerberosOperationException {
+ public boolean principalExists(String principal, boolean service) throws KerberosOperationException {
return "principal".equals(principal);
}
@@ -199,12 +199,12 @@ public class KerberosHelperTest extends EasyMockSupport {
}
@Override
- public Integer setPrincipalPassword(String principal, String password) throws KerberosOperationException {
+ public Integer setPrincipalPassword(String principal, String password, boolean service) throws KerberosOperationException {
return null;
}
@Override
- public boolean removePrincipal(String principal) throws KerberosOperationException {
+ public boolean removePrincipal(String principal, boolean service) throws KerberosOperationException {
return false;
}
@@ -3458,6 +3458,8 @@ public class KerberosHelperTest extends EasyMockSupport {
if (managingIdentities) {
final Host host = createMockHost("host1");
+ expect(host.getHostId()).andReturn(1l).anyTimes();
+
expect(cluster.getHosts()).andReturn(Collections.singleton(host)).anyTimes();
final ServiceComponentHost schKerberosClient = createMock(ServiceComponentHost.class);
@@ -3465,6 +3467,7 @@ public class KerberosHelperTest extends EasyMockSupport {
expect(schKerberosClient.getServiceComponentName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
expect(schKerberosClient.getHostName()).andReturn("host1").anyTimes();
expect(schKerberosClient.getState()).andReturn(State.INSTALLED).anyTimes();
+ expect(schKerberosClient.getHost()).andReturn(host).anyTimes();
final ServiceComponent serviceComponentKerberosClient = createNiceMock(ServiceComponent.class);
expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
@@ -3624,12 +3627,15 @@ public class KerberosHelperTest extends EasyMockSupport {
private void testDeleteTestIdentity(final PrincipalKeyCredential PrincipalKeyCredential) throws Exception {
KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
+ Host host1 = createMock(Host.class);
+ expect(host1.getHostId()).andReturn(1l).anyTimes();
final ServiceComponentHost schKerberosClient = createMock(ServiceComponentHost.class);
expect(schKerberosClient.getServiceName()).andReturn(Service.Type.KERBEROS.name()).anyTimes();
expect(schKerberosClient.getServiceComponentName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
expect(schKerberosClient.getHostName()).andReturn("host1").anyTimes();
expect(schKerberosClient.getState()).andReturn(State.INSTALLED).anyTimes();
+ expect(schKerberosClient.getHost()).andReturn(host1).anyTimes();
final ServiceComponentHost sch1 = createMock(ServiceComponentHost.class);
expect(sch1.getServiceName()).andReturn("SERVICE1").anyTimes();