You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by yu...@apache.org on 2016/02/12 19:30:47 UTC
[01/19] ambari git commit: Revert "AMBARI-14839 - DEA is not enabled
due to configuration conflict and history log directory can not be changed
(Jeff Zhang via jonathanhurley)"
Repository: ambari
Updated Branches:
refs/heads/2.2.1-maint [created] 7bd0a8777
Revert "AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)"
This reverts commit 981ede5455c02ae1bd4aac7495c2c667bc889dbc.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c2d27f33
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c2d27f33
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c2d27f33
Branch: refs/heads/2.2.1-maint
Commit: c2d27f335110f6f2d880b8959444e81666d0fe77
Parents: 5179e5a
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Jan 29 11:26:44 2016 -0500
Committer: Mahadev Konar <ma...@apache.org>
Committed: Sun Jan 31 18:13:56 2016 -0800
----------------------------------------------------------------------
.../SPARK/1.2.0.2.2/package/scripts/params.py | 4 +--
.../SPARK/configuration/spark-defaults.xml | 4 +--
.../configuration/spark-thrift-sparkconf.xml | 31 ++++----------------
3 files changed, 8 insertions(+), 31 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c2d27f33/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index a25db6f..a681a5c 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -79,9 +79,7 @@ hive_user = status_params.hive_user
spark_group = status_params.spark_group
user_group = status_params.user_group
spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_history_dir = "hdfs:///spark-history"
-if 'spark-defaults' in config['configurations'] and 'spark.history.fs.logDirectory' in config['configurations']['spark-defaults']:
- spark_history_dir = config['configurations']['spark-defaults']['spark.history.fs.logDirectory']
+spark_history_dir = 'hdfs:///spark-history'
spark_history_server_pid_file = status_params.spark_history_server_pid_file
spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file
http://git-wip-us.apache.org/repos/asf/ambari/blob/c2d27f33/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
index 1a6552f..d8af790 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
@@ -31,7 +31,7 @@
</property>
<property>
<name>spark.history.fs.logDirectory</name>
- <value>hdfs:///spark-history</value>
+ <value>{{spark_history_dir}}</value>
<description>
Base directory for history spark application log.
</description>
@@ -45,7 +45,7 @@
</property>
<property>
<name>spark.eventLog.dir</name>
- <value>hdfs:///spark-history</value>
+ <value>{{spark_history_dir}}</value>
<description>
Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
</description>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c2d27f33/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
index 2dbfe51..3b13496 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
@@ -75,17 +75,14 @@
<property>
<name>spark.history.fs.logDirectory</name>
<value>{{spark_history_dir}}</value>
- <final>true</final>
<description>
- Base directory for history spark application log. It is the same value
- as in spark-defaults.xml.
+ Base directory for history spark application log.
</description>
</property>
<property>
<name>spark.eventLog.enabled</name>
<value>true</value>
- <final>true</final>
<description>
Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
</description>
@@ -94,10 +91,8 @@
<property>
<name>spark.eventLog.dir</name>
<value>{{spark_history_dir}}</value>
- <final>true</final>
<description>
- Base directory in which Spark events are logged, if spark.eventLog.enabled is true. It is the same value
- as in spark-defaults.xml.
+ Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
</description>
</property>
@@ -143,26 +138,10 @@
</property>
<property>
- <name>spark.dynamicAllocation.initialExecutors</name>
- <value>0</value>
+ <name>spark.executor.instances</name>
+ <value>2</value>
<description>
- Initial number of executors to run if dynamic allocation is enabled.
- </description>
- </property>
-
- <property>
- <name>spark.dynamicAllocation.maxExecutors</name>
- <value>10</value>
- <description>
- Upper bound for the number of executors if dynamic allocation is enabled.
- </description>
- </property>
-
- <property>
- <name>spark.dynamicAllocation.minExecutors</name>
- <value>0</value>
- <description>
- Lower bound for the number of executors if dynamic allocation is enabled.
+ The number of executor.
</description>
</property>
[11/19] ambari git commit: AMBARI-14973. ambari-agent upstart script
restart triggers the restart of hbase specific JVM processes (aonishuk)
Posted by yu...@apache.org.
AMBARI-14973. ambari-agent upstart script restart triggers the restart of hbase specific JVM processes (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9df03397
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9df03397
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9df03397
Branch: refs/heads/2.2.1-maint
Commit: 9df033974579d1aad7cf80b788689c2c91579869
Parents: 1f4e333
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Feb 9 17:43:54 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Feb 9 17:43:54 2016 +0200
----------------------------------------------------------------------
ambari-agent/src/main/python/ambari_agent/PythonExecutor.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/9df03397/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
index 350c568..cc08127 100644
--- a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
@@ -150,6 +150,9 @@ class PythonExecutor(object):
else:
structured_out = {}
return out, error, structured_out
+
+ def preexec_fn(self):
+ os.setpgid(0, 0)
def launch_python_subprocess(self, command, tmpout, tmperr):
"""
@@ -165,7 +168,7 @@ class PythonExecutor(object):
return subprocess.Popen(command,
stdout=tmpout,
- stderr=tmperr, close_fds=close_fds, env=command_env)
+ stderr=tmperr, close_fds=close_fds, env=command_env, preexec_fn=self.preexec_fn)
def isSuccessfull(self, returncode):
return not self.python_process_has_been_killed and returncode == 0
[18/19] ambari git commit: AMBARI-15007. Make amazon2015 to be part
of redhat6 family (aonishuk)
Posted by yu...@apache.org.
AMBARI-15007. Make amazon2015 to be part of redhat6 family (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/abd26b23
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/abd26b23
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/abd26b23
Branch: refs/heads/2.2.1-maint
Commit: abd26b2353751c9ead51672d65b7a781ec987139
Parents: fac9f36
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Fri Feb 12 13:16:43 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Fri Feb 12 13:16:43 2016 +0200
----------------------------------------------------------------------
ambari-agent/conf/unix/ambari-agent | 8 +-
ambari-agent/conf/unix/install-helper.sh | 26 +++-
ambari-agent/pom.xml | 23 ----
.../src/main/python/ambari_commons/os_check.py | 92 ++++++++++---
.../ambari_commons/resources/os_family.json | 129 ++++++++++---------
.../core/providers/__init__.py | 3 +
.../libraries/providers/__init__.py | 3 +
ambari-common/src/main/unix/ambari-python-wrap | 8 +-
ambari-server/conf/unix/ambari-env.sh | 1 +
ambari-server/conf/unix/install-helper.sh | 24 +++-
ambari-server/pom.xml | 12 --
ambari-server/sbin/ambari-server | 7 +-
.../server/state/stack/JsonOsFamilyRoot.java | 38 ++++++
.../ambari/server/state/stack/OsFamily.java | 8 +-
.../main/python/ambari_server/serverSetup.py | 17 ---
.../src/main/python/ambari_server/utils.py | 3 +-
ambari-server/src/main/python/bootstrap.py | 4 +-
.../AMBARI_METRICS/0.1.0/metainfo.xml | 2 +-
.../KERBEROS/1.10.3-10/metainfo.xml | 2 +-
.../HDP/2.3/services/ACCUMULO/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/ATLAS/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/FALCON/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/FLUME/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/HBASE/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/HDFS/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/HIVE/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/KAFKA/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/KNOX/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/OOZIE/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/PIG/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/RANGER/metainfo.xml | 2 +-
.../HDP/2.3/services/RANGER_KMS/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/SLIDER/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/SPARK/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/SQOOP/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/STORM/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/TEZ/metainfo.xml | 2 +-
.../stacks/HDP/2.3/services/YARN/metainfo.xml | 4 +-
.../HDP/2.3/services/ZOOKEEPER/metainfo.xml | 2 +-
.../HDP/2.4/services/ACCUMULO/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/ATLAS/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/FALCON/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/FLUME/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/HBASE/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/HDFS/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/HIVE/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/KAFKA/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/KNOX/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/OOZIE/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/PIG/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/RANGER/metainfo.xml | 2 +-
.../HDP/2.4/services/RANGER_KMS/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/SLIDER/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/SPARK/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/SQOOP/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/STORM/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/TEZ/metainfo.xml | 2 +-
.../stacks/HDP/2.4/services/YARN/metainfo.xml | 4 +-
.../HDP/2.4/services/ZOOKEEPER/metainfo.xml | 2 +-
ambari-server/src/test/python/TestOSCheck.py | 37 ++++--
ambari-server/src/test/resources/os_family.json | 89 +++++++------
61 files changed, 368 insertions(+), 252 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-agent/conf/unix/ambari-agent
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent b/ambari-agent/conf/unix/ambari-agent
index 27ade60..31e4100 100755
--- a/ambari-agent/conf/unix/ambari-agent
+++ b/ambari-agent/conf/unix/ambari-agent
@@ -40,7 +40,7 @@ export AMBARI_CONF_DIR=/etc/ambari-server/conf:$PATH
export PYTHONPATH=/usr/lib/python2.6/site-packages:$PYTHONPATH
AMBARI_AGENT=ambari-agent
-PYTHON_WRAP=/var/lib/ambari-agent/ambari-python-wrap
+PYTHON_WRAP=/usr/bin/ambari-python-wrap
PIDDIR=/var/run/ambari-agent
PIDFILE=$PIDDIR/$AMBARI_AGENT.pid
OUTFILE=/var/log/ambari-agent/ambari-agent.out
@@ -81,10 +81,10 @@ change_files_permissions() {
ambari-sudo.sh chown $current_user "/usr/lib/ambari-agent/"
}
-# fills $PYTHON
-. $PYTHON_WRAP -V 2>/dev/null
-export PYTHON=$PYTHON
+if [ -z "$PYTHON" ] ; then
+ export PYTHON=`readlink $PYTHON_WRAP`
+fi
# Trying to read the passphrase from an environment
if [ ! -z $AMBARI_PASSPHRASE ]; then
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-agent/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/install-helper.sh b/ambari-agent/conf/unix/install-helper.sh
index 35c67fb..442ca1b 100644
--- a/ambari-agent/conf/unix/install-helper.sh
+++ b/ambari-agent/conf/unix/install-helper.sh
@@ -30,7 +30,6 @@ JINJA_AGENT_DIR="/usr/lib/ambari-agent/lib/ambari_jinja2"
SIMPLEJSON_AGENT_DIR="/usr/lib/ambari-agent/lib/ambari_simplejson"
PYTHON_WRAPER_TARGET="/usr/bin/ambari-python-wrap"
-PYTHON_WRAPER_SOURCE="/var/lib/ambari-agent/ambari-python-wrap"
do_install(){
# setting ambari_commons shared resource
@@ -50,10 +49,6 @@ do_install(){
if [ ! -d "$SIMPLEJSON_DIR" ]; then
ln -s "$SIMPLEJSON_AGENT_DIR" "$SIMPLEJSON_DIR"
fi
- # setting python-wrapper script
- if [ ! -f "$PYTHON_WRAPER_TARGET" ]; then
- ln -s "$PYTHON_WRAPER_SOURCE" "$PYTHON_WRAPER_TARGET"
- fi
# on nano Ubuntu, when umask=027 those folders are created without 'x' bit for 'others'.
# which causes failures when hadoop users try to access tmp_dir
@@ -61,6 +56,27 @@ do_install(){
chmod 777 /var/lib/ambari-agent/tmp
chmod 700 /var/lib/ambari-agent/data
+
+ # remove old python wrapper
+ rm -f "$PYTHON_WRAPER_TARGET"
+
+ AMBARI_PYTHON=""
+ python_binaries=( "/usr/bin/python" "/usr/bin/python2" "/usr/bin/python2.7", "/usr/bin/python2.6" )
+ for python_binary in "${python_binaries[@]}"
+ do
+ $python_binary -c "import sys ; ver = sys.version_info ; sys.exit(not (ver >= (2,6) and ver<(3,0)))" 1>/dev/null 2>/dev/null
+
+ if [ $? -eq 0 ] ; then
+ AMBARI_PYTHON="$python_binary"
+ break;
+ fi
+ done
+
+ if [ -z "$AMBARI_PYTHON" ] ; then
+ >&2 echo "Cannot detect python for ambari to use. Please manually set $PYTHON_WRAPER link to point to correct python binary"
+ else
+ ln -s "$AMBARI_PYTHON" "$PYTHON_WRAPER_TARGET"
+ fi
}
do_remove(){
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 63d6044..0015f58 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -237,18 +237,6 @@
<directoryIncluded>false</directoryIncluded>
<sources>
<source>
- <location>../ambari-common/src/main/unix/ambari-python-wrap</location>
- </source>
- </sources>
- </mapping>
- <mapping>
- <directory>/var/lib/ambari-agent/</directory>
- <filemode>755</filemode>
- <username>root</username>
- <groupname>root</groupname>
- <directoryIncluded>false</directoryIncluded>
- <sources>
- <source>
<location>conf/unix/ambari-sudo.sh</location>
</source>
</sources>
@@ -653,17 +641,6 @@
</mapper>
</data>
<data>
- <src>../ambari-common/src/main/unix/ambari-python-wrap</src>
- <type>file</type>
- <mapper>
- <type>perm</type>
- <prefix>/var/lib/ambari-agent</prefix>
- <user>root</user>
- <group>root</group>
- <filemode>755</filemode>
- </mapper>
- </data>
- <data>
<src>conf/unix/ambari-sudo.sh</src>
<type>file</type>
<mapper>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-common/src/main/python/ambari_commons/os_check.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/os_check.py b/ambari-common/src/main/python/ambari_commons/os_check.py
index 904a23b..d30b7f9 100644
--- a/ambari-common/src/main/python/ambari_commons/os_check.py
+++ b/ambari-common/src/main/python/ambari_commons/os_check.py
@@ -18,6 +18,7 @@ See the License for the specific language governing permissions and
limitations under the License.
'''
+import re
import os
import sys
import platform
@@ -55,6 +56,8 @@ RESOURCES_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "resou
# family JSON data
OSFAMILY_JSON_RESOURCE = "os_family.json"
+JSON_OS_MAPPING = "mapping"
+JSON_OS_ALIASES = "aliases"
JSON_OS_TYPE = "distro"
JSON_OS_VERSION = "versions"
JSON_EXTENDS = "extends"
@@ -75,12 +78,29 @@ VER_NT_SERVER = 3
_IS_ORACLE_LINUX = os.path.exists('/etc/oracle-release')
_IS_REDHAT_LINUX = os.path.exists('/etc/redhat-release')
+SYSTEM_RELEASE_FILE = "/etc/system-release"
+
def _is_oracle_linux():
return _IS_ORACLE_LINUX
def _is_redhat_linux():
return _IS_REDHAT_LINUX
+def advanced_check(distribution):
+ distribution = list(distribution)
+ if os.path.exists(SYSTEM_RELEASE_FILE):
+ with open(SYSTEM_RELEASE_FILE, "rb") as fp:
+ issue_content = fp.read()
+
+ if "Amazon" in issue_content:
+ distribution[0] = "amazon"
+ search_groups = re.search('(\d+\.\d+)', issue_content)
+
+ if search_groups:
+ distribution[1] = search_groups.group(1)
+
+ return tuple(distribution)
+
class OS_CONST_TYPE(type):
@@ -98,16 +118,24 @@ class OS_CONST_TYPE(type):
f = open(os.path.join(RESOURCES_DIR, OSFAMILY_JSON_RESOURCE))
json_data = eval(f.read())
f.close()
- for family in json_data:
+
+ if JSON_OS_MAPPING not in json_data:
+ raise Exception("Invalid {0}".format(OSFAMILY_JSON_RESOURCE))
+
+ json_mapping_data = json_data[JSON_OS_MAPPING]
+
+ for family in json_mapping_data:
cls.FAMILY_COLLECTION += [family]
- cls.OS_COLLECTION += json_data[family][JSON_OS_TYPE]
+ cls.OS_COLLECTION += json_mapping_data[family][JSON_OS_TYPE]
cls.OS_FAMILY_COLLECTION += [{
'name': family,
- 'os_list': json_data[family][JSON_OS_TYPE]
+ 'os_list': json_mapping_data[family][JSON_OS_TYPE]
}]
- if JSON_EXTENDS in json_data[family]:
- cls.OS_FAMILY_COLLECTION[-1][JSON_EXTENDS] = json_data[family][JSON_EXTENDS]
+ if JSON_EXTENDS in json_mapping_data[family]:
+ cls.OS_FAMILY_COLLECTION[-1][JSON_EXTENDS] = json_mapping_data[family][JSON_EXTENDS]
+
+ cls.OS_TYPE_ALIASES = json_data[JSON_OS_ALIASES] if JSON_OS_ALIASES in json_data else {}
except:
raise Exception("Couldn't load '%s' file" % OSFAMILY_JSON_RESOURCE)
@@ -167,13 +195,35 @@ class OSCheck:
distribution = platform.dist()
else:
distribution = platform.linux_distribution()
+
+
- if distribution[0] == '' and platform.system().lower() == 'darwin':
- # mac - used for unit tests
- distribution = ("Darwin", "TestOnly", "1.1.1", "1.1.1", "1.1")
+ if distribution[0] == '':
+ distribution = advanced_check(distribution)
+
+ if platform.system().lower() == 'darwin':
+ # mac - used for unit tests
+ distribution = ("Darwin", "TestOnly", "1.1.1", "1.1.1", "1.1")
return distribution
-
+
+ @staticmethod
+ def get_alias(os_type, os_version):
+ version_parts = os_version.split('.')
+ full_os_and_major_version = os_type + version_parts[0]
+
+ if full_os_and_major_version in OSConst.OS_TYPE_ALIASES:
+ alias = OSConst.OS_TYPE_ALIASES[full_os_and_major_version]
+ re_groups = re.search('(\D+)(\d+)$', alias).groups()
+ os_type = re_groups[0]
+ os_major_version = re_groups[1]
+
+ version_parts[0] = os_major_version
+ os_version = '.'.join(version_parts)
+
+ return os_type, os_version
+
+
@staticmethod
def get_os_type():
"""
@@ -184,6 +234,10 @@ class OSCheck:
In case cannot detect - exit.
"""
+ return OSCheck.get_alias(OSCheck._get_os_type(), OSCheck._get_os_version())[0]
+
+ @staticmethod
+ def _get_os_type():
# Read content from /etc/*-release file
# Full release name
dist = OSCheck.os_distribution()
@@ -191,18 +245,18 @@ class OSCheck:
# special cases
if _is_oracle_linux():
- return 'oraclelinux'
+ operatingSystem = 'oraclelinux'
elif operatingSystem.startswith('suse linux enterprise server'):
- return 'sles'
+ operatingSystem = 'sles'
elif operatingSystem.startswith('red hat enterprise linux'):
- return 'redhat'
+ operatingSystem = 'redhat'
elif operatingSystem.startswith('darwin'):
- return 'mac'
+ operatingSystem = 'mac'
- if operatingSystem != '':
- return operatingSystem
- else:
+ if operatingSystem == '':
raise Exception("Cannot detect os type. Exiting...")
+
+ return operatingSystem
@staticmethod
def get_os_family():
@@ -236,11 +290,15 @@ class OSCheck:
In case cannot detect raises exception.
"""
+ return OSCheck.get_alias(OSCheck._get_os_type(), OSCheck._get_os_version())[1]
+
+ @staticmethod
+ def _get_os_version():
# Read content from /etc/*-release file
# Full release name
dist = OSCheck.os_distribution()
dist = dist[1]
-
+
if dist:
return dist
else:
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-common/src/main/python/ambari_commons/resources/os_family.json
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/resources/os_family.json b/ambari-common/src/main/python/ambari_commons/resources/os_family.json
index c0acc2c..1558c1b 100644
--- a/ambari-common/src/main/python/ambari_commons/resources/os_family.json
+++ b/ambari-common/src/main/python/ambari_commons/resources/os_family.json
@@ -1,64 +1,69 @@
{
- "redhat": {
- "distro": [
- "redhat",
- "fedora",
- "centos",
- "oraclelinux",
- "ascendos",
- "amazon",
- "xenserver",
- "oel",
- "ovs",
- "cloudlinux",
- "slc",
- "scientific",
- "psbm",
- "centos linux"
- ],
- "versions": [
- 6,
- 7
- ]
- },
- "debian": {
- "extends" : "ubuntu",
- "distro": [
- "debian"
- ],
- "versions": [
- 7
- ]
- },
- "ubuntu": {
- "distro": [
- "ubuntu"
- ],
- "versions": [
- 12,
- 14
- ]
- },
- "suse": {
- "distro": [
- "sles",
- "sled",
- "opensuse",
- "suse"
- ],
- "versions": [
- 11
- ]
- },
- "winsrv": {
- "distro": [
- "win2008server",
- "win2008serverr2",
- "win2012server",
- "win2012serverr2"
- ],
- "versions": [
- 6
- ]
- }
+ "mapping": {
+ "redhat": {
+ "distro": [
+ "redhat",
+ "fedora",
+ "centos",
+ "oraclelinux",
+ "amazon",
+ "ascendos",
+ "xenserver",
+ "oel",
+ "ovs",
+ "cloudlinux",
+ "slc",
+ "scientific",
+ "psbm",
+ "centos linux"
+ ],
+ "versions": [
+ 6,
+ 7
+ ]
+ },
+ "debian": {
+ "extends" : "ubuntu",
+ "distro": [
+ "debian"
+ ],
+ "versions": [
+ 7
+ ]
+ },
+ "ubuntu": {
+ "distro": [
+ "ubuntu"
+ ],
+ "versions": [
+ 12,
+ 14
+ ]
+ },
+ "suse": {
+ "distro": [
+ "sles",
+ "sled",
+ "opensuse",
+ "suse"
+ ],
+ "versions": [
+ 11
+ ]
+ },
+ "winsrv": {
+ "distro": [
+ "win2008server",
+ "win2008serverr2",
+ "win2012server",
+ "win2012serverr2"
+ ],
+ "versions": [
+ 6
+ ]
+ }
+ },
+ "aliases": {
+ "amazon2015": "amazon6"
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-common/src/main/python/resource_management/core/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/__init__.py b/ambari-common/src/main/python/resource_management/core/providers/__init__.py
index abb51c3..21ae0d5 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/__init__.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/__init__.py
@@ -41,6 +41,9 @@ class Provider(object):
PROVIDERS = dict(
+ amazon=dict(
+ Package="resource_management.core.providers.package.yumrpm.YumProvider",
+ ),
redhat=dict(
Package="resource_management.core.providers.package.yumrpm.YumProvider",
),
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index 44e9ca1..bd7c98a 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -21,6 +21,9 @@ Ambari Agent
"""
PROVIDERS = dict(
+ amazon=dict(
+ Repository="resource_management.libraries.providers.repository.RhelSuseRepositoryProvider",
+ ),
redhat=dict(
Repository="resource_management.libraries.providers.repository.RhelSuseRepositoryProvider",
),
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-common/src/main/unix/ambari-python-wrap
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/unix/ambari-python-wrap b/ambari-common/src/main/unix/ambari-python-wrap
index 8dab946..e94467c 100755
--- a/ambari-common/src/main/unix/ambari-python-wrap
+++ b/ambari-common/src/main/unix/ambari-python-wrap
@@ -24,14 +24,14 @@ if [ -a /usr/bin/python2 ] && [ -z "$PYTHON" ]; then
PYTHON=/usr/bin/python2
fi
-if [ -a /usr/bin/python2.6 ] && [ -z "$PYTHON" ]; then
- PYTHON=/usr/bin/python2.6
-fi
-
if [ -a /usr/bin/python2.7 ] && [ -z "$PYTHON" ]; then
PYTHON=/usr/bin/python2.7
fi
+if [ -a /usr/bin/python2.6 ] && [ -z "$PYTHON" ]; then
+ PYTHON=/usr/bin/python2.6
+fi
+
# if no preferable python versions found, try to use system one. Hoping it's Python 2
if [[ -z "$PYTHON" ]]; then
PYTHON=/usr/bin/python
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/conf/unix/ambari-env.sh
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/ambari-env.sh b/ambari-server/conf/unix/ambari-env.sh
index e463ca6..bf7ae56 100644
--- a/ambari-server/conf/unix/ambari-env.sh
+++ b/ambari-server/conf/unix/ambari-env.sh
@@ -17,6 +17,7 @@
AMBARI_PASSHPHRASE="DEV"
export AMBARI_JVM_ARGS=$AMBARI_JVM_ARGS' -Xms512m -Xmx2048m -Djava.security.auth.login.config=/etc/ambari-server/conf/krb5JAASLogin.conf -Djava.security.krb5.conf=/etc/krb5.conf -Djavax.security.auth.useSubjectCredsOnly=false'
export PATH=$PATH:/var/lib/ambari-server
+export PYTHONPATH=$PYTHONPATH:/usr/lib/python2.6/site-packages
# customize python binary for ambari
# export PYTHON=/usr/bin/python2
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/conf/unix/install-helper.sh
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/install-helper.sh b/ambari-server/conf/unix/install-helper.sh
index 9dbb2b8..53f6c5e 100644
--- a/ambari-server/conf/unix/install-helper.sh
+++ b/ambari-server/conf/unix/install-helper.sh
@@ -29,7 +29,6 @@ JINJA_SERVER_DIR="/usr/lib/ambari-server/lib/ambari_jinja2"
SIMPLEJSON_SERVER_DIR="/usr/lib/ambari-server/lib/ambari_simplejson"
PYTHON_WRAPER_TARGET="/usr/bin/ambari-python-wrap"
-PYTHON_WRAPER_SOURCE="/var/lib/ambari-server/ambari-python-wrap"
do_install(){
# setting ambari_commons shared resource
@@ -49,9 +48,26 @@ do_install(){
if [ ! -d "$SIMPLEJSON_DIR" ]; then
ln -s "$SIMPLEJSON_SERVER_DIR" "$SIMPLEJSON_DIR"
fi
- # setting python-wrapper script
- if [ ! -f "$PYTHON_WRAPER_TARGET" ]; then
- ln -s "$PYTHON_WRAPER_SOURCE" "$PYTHON_WRAPER_TARGET"
+
+ # remove old python wrapper
+ rm -f "$PYTHON_WRAPER_TARGET"
+
+ AMBARI_PYTHON=""
+ python_binaries=( "/usr/bin/python" "/usr/bin/python2" "/usr/bin/python2.7", "/usr/bin/python2.6" )
+ for python_binary in "${python_binaries[@]}"
+ do
+ $python_binary -c "import sys ; ver = sys.version_info ; sys.exit(not (ver >= (2,6) and ver<(3,0)))" 1>/dev/null 2>/dev/null
+
+ if [ $? -eq 0 ] ; then
+ AMBARI_PYTHON="$python_binary"
+ break;
+ fi
+ done
+
+ if [ -z "$AMBARI_PYTHON" ] ; then
+ >&2 echo "Cannot detect python for ambari to use. Please manually set $PYTHON_WRAPER link to point to correct python binary"
+ else
+ ln -s "$AMBARI_PYTHON" "$PYTHON_WRAPER_TARGET"
fi
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 573d249..f07788f 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -398,18 +398,6 @@
</sources>
</mapping>
<mapping>
- <directory>/var/lib/ambari-server/</directory>
- <filemode>755</filemode>
- <username>root</username>
- <groupname>root</groupname>
- <directoryIncluded>false</directoryIncluded>
- <sources>
- <source>
- <location>../ambari-common/src/main/unix/ambari-python-wrap</location>
- </source>
- </sources>
- </mapping>
- <mapping>
<directory>/etc/ambari-server/conf</directory>
<configuration>true</configuration>
<sources>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/sbin/ambari-server
----------------------------------------------------------------------
diff --git a/ambari-server/sbin/ambari-server b/ambari-server/sbin/ambari-server
index 47e2926..72d94ed 100755
--- a/ambari-server/sbin/ambari-server
+++ b/ambari-server/sbin/ambari-server
@@ -37,10 +37,11 @@ esac
export PATH=/usr/lib/ambari-server/*:$PATH:/sbin/:/usr/sbin
export AMBARI_CONF_DIR=/etc/ambari-server/conf
-PYTHON_WRAP=/var/lib/ambari-server/ambari-python-wrap
+PYTHON_WRAP=/usr/bin/ambari-python-wrap
-# fills $PYTHON
-. $PYTHON_WRAP -V 2>/dev/null
+if [ -z "$PYTHON" ] ; then
+ export PYTHON=`readlink $PYTHON_WRAP`
+fi
if [ -a /var/lib/ambari-server/ambari-env.sh ]; then
. /var/lib/ambari-server/ambari-env.sh
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/java/org/apache/ambari/server/state/stack/JsonOsFamilyRoot.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/JsonOsFamilyRoot.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/JsonOsFamilyRoot.java
new file mode 100644
index 0000000..3f9158f
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/JsonOsFamilyRoot.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.stack;
+
+import java.util.Map;
+
+public class JsonOsFamilyRoot {
+ private Map<String, JsonOsFamilyEntry> mapping;
+ private Map<String, String> aliases;
+
+ public Map<String, JsonOsFamilyEntry> getMapping() {
+ return mapping;
+ }
+ public void setMapping(Map<String, JsonOsFamilyEntry> mapping) {
+ this.mapping = mapping;
+ }
+ public Map<String, String> getAliases() {
+ return aliases;
+ }
+ public void setAliases(Map<String, String> aliases) {
+ this.aliases = aliases;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
index 37a6db3..e494c44 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/OsFamily.java
@@ -48,11 +48,14 @@ public class OsFamily {
private final String os_pattern = "([\\D]+|(?:[\\D]+[\\d]+[\\D]+))([\\d]*)";
private final String OS_DISTRO = "distro";
private final String OS_VERSION = "versions";
+ private final String OS_MAPPING = "mapping";
+ private final String OS_ALIASES = "aliases";
private final String LOAD_CONFIG_MSG = "Could not load OS family definition from %s file";
private final String FILE_NAME = "os_family.json";
private final Logger LOG = LoggerFactory.getLogger(OsFamily.class);
private Map<String, JsonOsFamilyEntry> osMap = null;
+ private JsonOsFamilyRoot jsonOsFamily = null;
/**
* Initialize object
@@ -77,9 +80,10 @@ public class OsFamily {
if (!f.exists()) throw new Exception();
inputStream = new FileInputStream(f);
- Type type = new TypeToken<Map<String, JsonOsFamilyEntry>>() {}.getType();
+ Type type = new TypeToken<JsonOsFamilyRoot>() {}.getType();
Gson gson = new Gson();
- osMap = gson.fromJson(new InputStreamReader(inputStream), type);
+ jsonOsFamily = gson.fromJson(new InputStreamReader(inputStream), type);
+ osMap = jsonOsFamily.getMapping();
} catch (Exception e) {
LOG.error(String.format(LOAD_CONFIG_MSG, new File(SharedResourcesPath, FILE_NAME).toString()));
throw new RuntimeException(e);
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/python/ambari_server/serverSetup.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverSetup.py b/ambari-server/src/main/python/ambari_server/serverSetup.py
index 511da27..c370257 100644
--- a/ambari-server/src/main/python/ambari_server/serverSetup.py
+++ b/ambari-server/src/main/python/ambari_server/serverSetup.py
@@ -432,23 +432,6 @@ class JDKSetup(object):
else:
progress_func = download_progress
- if get_silent():
- if not java_home_var:
- #No java_home_var set, detect if java is already installed
- if os.environ.has_key(JAVA_HOME):
- args.java_home = os.environ[JAVA_HOME]
-
- properties.process_pair(JAVA_HOME_PROPERTY, args.java_home)
- properties.removeOldProp(JDK_NAME_PROPERTY)
- properties.removeOldProp(JCE_NAME_PROPERTY)
-
- self._ensure_java_home_env_var_is_set(args.java_home)
- self.jdk_index = self.custom_jdk_number
- return
- else:
- # For now, changing the existing JDK to make sure we use a supported one
- pass
-
if java_home_var:
change_jdk = get_YN_input("Do you want to change Oracle JDK [y/n] (n)? ", False)
if not change_jdk:
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/python/ambari_server/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/utils.py b/ambari-server/src/main/python/ambari_server/utils.py
index 2afcd1b..e7cdb7b 100644
--- a/ambari-server/src/main/python/ambari_server/utils.py
+++ b/ambari-server/src/main/python/ambari_server/utils.py
@@ -224,8 +224,9 @@ def get_postgre_hba_dir(OS_FAMILY):
os.symlink(glob.glob(get_pg_hba_init_files() + '*')[0],
get_pg_hba_init_files())
+ pg_hba_init_basename = os.path.basename(get_pg_hba_init_files())
# Get postgres_data location (default: /var/lib/pgsql/data)
- cmd = "alias exit=return; source " + get_pg_hba_init_files() + " status &>/dev/null; echo $PGDATA"
+ cmd = "alias basename='echo {0}; true' ; alias exit=return; source {1} status &>/dev/null; echo $PGDATA".format(pg_hba_init_basename, get_pg_hba_init_files())
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/python/bootstrap.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/bootstrap.py b/ambari-server/src/main/python/bootstrap.py
index adde438..270adda 100755
--- a/ambari-server/src/main/python/bootstrap.py
+++ b/ambari-server/src/main/python/bootstrap.py
@@ -18,12 +18,14 @@ See the License for the specific language governing permissions and
limitations under the License.
'''
+import sys
+sys.path.append("/usr/lib/python2.6/site-packages/") # this file can be run with python2.7 that why we need this
+
# On Linux, the bootstrap process is supposed to run on hosts that may have installed Python 2.4 and above (CentOS 5).
# Hence, the whole bootstrap code needs to comply with Python 2.4 instead of Python 2.6. Most notably, @-decorators and
# {}-format() are to be avoided.
import time
-import sys
import logging
import pprint
import os
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/metainfo.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/metainfo.xml
index 48469df..7874fc3 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/metainfo.xml
@@ -66,7 +66,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>ambari-metrics-collector</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/metainfo.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/metainfo.xml
index 72f159a..c1d4db7 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/metainfo.xml
@@ -73,7 +73,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6</osFamily>
<packages>
<package>
<name>krb5-server</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
index 352341b..febfa37 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
@@ -24,7 +24,7 @@
<version>1.7.0.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>accumulo_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/metainfo.xml
index fee10d3..f5e741c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/metainfo.xml
@@ -24,7 +24,7 @@
<version>0.5.0.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>atlas-metadata_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/FALCON/metainfo.xml
index 6fdad8b..1d1e3e3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/FALCON/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.6.1.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>falcon_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/FLUME/metainfo.xml
index 5031f85..a5892e3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/FLUME/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/FLUME/metainfo.xml
@@ -24,7 +24,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>flume_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
index 90a31f5..ff55ee7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
@@ -38,7 +38,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hbase_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
index ca69195..306ca6a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
@@ -57,7 +57,7 @@
</osSpecific>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hadoop_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml
index 0756a54..f8c0cff 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HIVE/metainfo.xml
@@ -33,7 +33,7 @@
</packages>
</osSpecific>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hive_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/metainfo.xml
index 330b542..0ffe982 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.9.0.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>kafka_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/KNOX/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KNOX/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KNOX/metainfo.xml
index d1172b1..c39c559 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KNOX/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KNOX/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.6.0.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>knox_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/metainfo.xml
index ccf5de3..20a46b4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/metainfo.xml
@@ -38,7 +38,7 @@
</packages>
</osSpecific>
<osSpecific>
- <osFamily>redhat5,redhat6,redhat7,suse11</osFamily>
+ <osFamily>redhat5,amazon2015,redhat6,redhat7,suse11</osFamily>
<packages>
<package>
<name>oozie_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/metainfo.xml
index 8ba2426..59782bb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.15.0.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>pig_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/metainfo.xml
index 2fb8a9a..78c6f30 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/metainfo.xml
@@ -29,7 +29,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>ranger_2_3_*-admin</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER_KMS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER_KMS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER_KMS/metainfo.xml
index 5606a26..fc4a488 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER_KMS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER_KMS/metainfo.xml
@@ -26,7 +26,7 @@
<extends>common-services/RANGER_KMS/0.5.0.2.3</extends>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>ranger_2_3_*-kms</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/SLIDER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SLIDER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SLIDER/metainfo.xml
index 8287b50..4b8f522 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SLIDER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SLIDER/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.80.0.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>slider_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
index 2c9680c..39fb5e3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
@@ -27,7 +27,7 @@
<extends>common-services/SPARK/1.4.1.2.3</extends>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>spark_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SQOOP/metainfo.xml
index 1290cde..68e1f3b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SQOOP/metainfo.xml
@@ -32,7 +32,7 @@
</packages>
</osSpecific>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>sqoop_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/metainfo.xml
index 00af620..31291f9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/metainfo.xml
@@ -24,7 +24,7 @@
<version>0.10.0</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>storm_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/metainfo.xml
index 7720a3d..95dae9b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/metainfo.xml
@@ -24,7 +24,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>tez_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/metainfo.xml
index 5a21200..1cb0e9f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/metainfo.xml
@@ -25,7 +25,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hadoop_2_3_*-yarn</name>
@@ -57,7 +57,7 @@
<version>2.7.1.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hadoop_2_3_*-mapreduce</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.3/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ZOOKEEPER/metainfo.xml
index 1f77215..53569e2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ZOOKEEPER/metainfo.xml
@@ -23,7 +23,7 @@
<version>3.4.6.2.3</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>zookeeper_2_3_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/ACCUMULO/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/ACCUMULO/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/ACCUMULO/metainfo.xml
index 9e82a82..0e6dbff 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/ACCUMULO/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/ACCUMULO/metainfo.xml
@@ -33,7 +33,7 @@
</osSpecific>
</osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>accumulo_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/ATLAS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/ATLAS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/ATLAS/metainfo.xml
index 158ef8c..7ff548e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/ATLAS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/ATLAS/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.5.0.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>atlas-metadata_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/FALCON/metainfo.xml
index 183d55c..863b081 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/FALCON/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/FALCON/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.6.1.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>falcon_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/FLUME/metainfo.xml
index b7cd3d1..cd3ccfd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/FLUME/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/FLUME/metainfo.xml
@@ -24,7 +24,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>flume_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HBASE/metainfo.xml
index a1c2bd0..94b1887 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HBASE/metainfo.xml
@@ -24,7 +24,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hbase_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/metainfo.xml
index ce3fb95..401da18 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/metainfo.xml
@@ -33,7 +33,7 @@
</osSpecific>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hadoop_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
index 5ad52b6..c1d3e0f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HIVE/metainfo.xml
@@ -33,7 +33,7 @@
</packages>
</osSpecific>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hive_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/KAFKA/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/KAFKA/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/KAFKA/metainfo.xml
index 90bc472..82338a3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/KAFKA/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/KAFKA/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.9.0.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>kafka_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/KNOX/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/KNOX/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/KNOX/metainfo.xml
index e465d42..c6d38e2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/KNOX/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/KNOX/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.6.0.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>knox_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/OOZIE/metainfo.xml
index 2f09e88..927fc31 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/OOZIE/metainfo.xml
@@ -39,7 +39,7 @@
</packages>
</osSpecific>
<osSpecific>
- <osFamily>redhat5,redhat6,redhat7,suse11</osFamily>
+ <osFamily>redhat5,amazon2015,redhat6,redhat7,suse11</osFamily>
<packages>
<package>
<name>oozie_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/PIG/metainfo.xml
index 9ae1f42..aec3839 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/PIG/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/PIG/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.15.0.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>pig_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER/metainfo.xml
index d76ad5c..b1ddbcd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER/metainfo.xml
@@ -27,7 +27,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>ranger_2_4_*-admin</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER_KMS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER_KMS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER_KMS/metainfo.xml
index a27bf31..2654fd9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER_KMS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/RANGER_KMS/metainfo.xml
@@ -27,7 +27,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>ranger_2_4_*-kms</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/SLIDER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SLIDER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SLIDER/metainfo.xml
index ff998f0..c883bc2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SLIDER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SLIDER/metainfo.xml
@@ -23,7 +23,7 @@
<version>0.80.0.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>slider_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
index 13c6a9b..562c977 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
@@ -40,7 +40,7 @@
</requiredServices>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>spark_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SQOOP/metainfo.xml
index 721cc33..8caabd4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SQOOP/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SQOOP/metainfo.xml
@@ -32,7 +32,7 @@
</packages>
</osSpecific>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>sqoop_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/STORM/metainfo.xml
index 36b01e4..6ce698b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/STORM/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/STORM/metainfo.xml
@@ -24,7 +24,7 @@
<version>0.10.0.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>storm_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/TEZ/metainfo.xml
index 356225e..a92214c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/TEZ/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/TEZ/metainfo.xml
@@ -24,7 +24,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>tez_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/YARN/metainfo.xml
index c61a270..7979b9f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/YARN/metainfo.xml
@@ -25,7 +25,7 @@
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hadoop_2_4_*-yarn</name>
@@ -57,7 +57,7 @@
<version>2.7.1.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>hadoop_2_4_*-mapreduce</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/main/resources/stacks/HDP/2.4/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/ZOOKEEPER/metainfo.xml
index 4bf4198..1ced6dc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/ZOOKEEPER/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/ZOOKEEPER/metainfo.xml
@@ -23,7 +23,7 @@
<version>3.4.6.2.4</version>
<osSpecifics>
<osSpecific>
- <osFamily>redhat7,redhat6,suse11</osFamily>
+ <osFamily>redhat7,amazon2015,redhat6,suse11</osFamily>
<packages>
<package>
<name>zookeeper_2_4_*</name>
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/test/python/TestOSCheck.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestOSCheck.py b/ambari-server/src/test/python/TestOSCheck.py
index ebef706..e14458b 100644
--- a/ambari-server/src/test/python/TestOSCheck.py
+++ b/ambari-server/src/test/python/TestOSCheck.py
@@ -28,7 +28,7 @@ from unittest import TestCase
from mock.mock import patch
from mock.mock import MagicMock
-from ambari_commons import OSCheck
+from ambari_commons import OSCheck, OSConst
import os_check_type
utils = __import__('ambari_server.utils').utils
@@ -48,7 +48,7 @@ class TestOSCheck(TestCase):
# 1 - Any system
mock_is_oracle_linux.return_value = False
- mock_linux_distribution.return_value = ('my_os', '', '')
+ mock_linux_distribution.return_value = ('my_os', '2015.09', '')
result = OSCheck.get_os_type()
self.assertEquals(result, 'my_os')
@@ -64,13 +64,13 @@ class TestOSCheck(TestCase):
# 3 - path exist: '/etc/oracle-release'
mock_is_oracle_linux.return_value = True
- mock_linux_distribution.return_value = ('some_os', '', '')
+ mock_linux_distribution.return_value = ('some_os', '1234', '')
result = OSCheck.get_os_type()
self.assertEquals(result, 'oraclelinux')
# 4 - Common system
mock_is_oracle_linux.return_value = False
- mock_linux_distribution.return_value = ('CenToS', '', '')
+ mock_linux_distribution.return_value = ('CenToS', '4.56', '')
result = OSCheck.get_os_type()
self.assertEquals(result, 'centos')
@@ -97,31 +97,31 @@ class TestOSCheck(TestCase):
# 1 - Any system
mock_exists.return_value = False
- mock_linux_distribution.return_value = ('MY_os', '', '')
+ mock_linux_distribution.return_value = ('MY_os', '5.6.7', '')
result = OSCheck.get_os_family()
self.assertEquals(result, 'my_os')
# 2 - Redhat
mock_exists.return_value = False
- mock_linux_distribution.return_value = ('Centos Linux', '', '')
+ mock_linux_distribution.return_value = ('Centos Linux', '2.4', '')
result = OSCheck.get_os_family()
self.assertEquals(result, 'redhat')
# 3 - Ubuntu
mock_exists.return_value = False
- mock_linux_distribution.return_value = ('Ubuntu', '', '')
+ mock_linux_distribution.return_value = ('Ubuntu', '14.04', '')
result = OSCheck.get_os_family()
self.assertEquals(result, 'ubuntu')
# 4 - Suse
mock_exists.return_value = False
mock_linux_distribution.return_value = (
- 'suse linux enterprise server', '', '')
+ 'suse linux enterprise server', '11.3', '')
result = OSCheck.get_os_family()
self.assertEquals(result, 'suse')
mock_exists.return_value = False
- mock_linux_distribution.return_value = ('SLED', '', '')
+ mock_linux_distribution.return_value = ('SLED', '1.2.3.4.5', '')
result = OSCheck.get_os_family()
self.assertEquals(result, 'suse')
@@ -139,7 +139,7 @@ class TestOSCheck(TestCase):
def test_get_os_version(self, mock_linux_distribution):
# 1 - Any system
- mock_linux_distribution.return_value = ('', '123.45', '')
+ mock_linux_distribution.return_value = ('some_os', '123.45', '')
result = OSCheck.get_os_version()
self.assertEquals(result, '123.45')
@@ -157,7 +157,7 @@ class TestOSCheck(TestCase):
def test_get_os_major_version(self, mock_linux_distribution):
# 1
- mock_linux_distribution.return_value = ('', '123.45.67', '')
+ mock_linux_distribution.return_value = ('abcd_os', '123.45.67', '')
result = OSCheck.get_os_major_version()
self.assertEquals(result, '123')
@@ -165,6 +165,21 @@ class TestOSCheck(TestCase):
mock_linux_distribution.return_value = ('Suse', '11', '')
result = OSCheck.get_os_major_version()
self.assertEquals(result, '11')
+
+ @patch.object(OSCheck, "os_distribution")
+ def test_aliases(self, mock_linux_distribution):
+ OSConst.OS_TYPE_ALIASES['qwerty_os123'] = 'aliased_os5'
+ OSConst.OS_FAMILY_COLLECTION.append({
+ 'name': 'aliased_os_family',
+ 'os_list': ["aliased_os"]
+ })
+
+ mock_linux_distribution.return_value = ('qwerty_os', '123.45.67', '')
+
+ self.assertEquals(OSCheck.get_os_type(), 'aliased_os')
+ self.assertEquals(OSCheck.get_os_major_version(), '5')
+ self.assertEquals(OSCheck.get_os_version(), '5.45.67')
+ self.assertEquals(OSCheck.get_os_family(), 'aliased_os_family')
@patch.object(OSCheck, "os_distribution")
def test_get_os_release_name(self, mock_linux_distribution):
http://git-wip-us.apache.org/repos/asf/ambari/blob/abd26b23/ambari-server/src/test/resources/os_family.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/os_family.json b/ambari-server/src/test/resources/os_family.json
index df55b61..ae6b19e 100644
--- a/ambari-server/src/test/resources/os_family.json
+++ b/ambari-server/src/test/resources/os_family.json
@@ -1,45 +1,50 @@
{
- "redhat": {
- "distro": [
- "redhat",
- "fedora",
- "centos",
- "oraclelinux"
- ],
- "versions": [
- 5,
- 6
- ]
+ "mapping": {
+ "redhat": {
+ "distro": [
+ "redhat",
+ "fedora",
+ "centos",
+ "oraclelinux"
+ ],
+ "versions": [
+ 5,
+ 6
+ ]
+ },
+ "ubuntu": {
+ "distro": [
+ "ubuntu",
+ "debian"
+ ],
+ "versions": [
+ 12
+ ]
+ },
+ "suse": {
+ "distro": [
+ "sles",
+ "sled",
+ "opensuse",
+ "suse"
+ ],
+ "versions": [
+ 11
+ ]
+ },
+ "winsrv": {
+ "distro": [
+ "win2008server",
+ "win2008serverr2",
+ "win2012server",
+ "win2012serverr2"
+ ],
+ "versions": [
+ 6
+ ]
+ }
},
- "ubuntu": {
- "distro": [
- "ubuntu",
- "debian"
- ],
- "versions": [
- 12
- ]
- },
- "suse": {
- "distro": [
- "sles",
- "sled",
- "opensuse",
- "suse"
- ],
- "versions": [
- 11
- ]
- },
- "winsrv": {
- "distro": [
- "win2008server",
- "win2008serverr2",
- "win2012server",
- "win2012serverr2"
- ],
- "versions": [
- 6
- ]
+ "aliases": {
+ "amazon2015": "amazon6"
}
-}
+}
\ No newline at end of file
[02/19] ambari git commit: AMBARI-14839 - DEA is not enabled due to
configuration conflict and history log directory can not be changed (Jeff
Zhang via jonathanhurley)
Posted by yu...@apache.org.
AMBARI-14839 - DEA is not enabled due to configuration conflict and history log directory can not be changed (Jeff Zhang via jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7a0e1a07
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7a0e1a07
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7a0e1a07
Branch: refs/heads/2.2.1-maint
Commit: 7a0e1a07078d5d5d01b73f323c0b1789e78b0b1f
Parents: c2d27f3
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Jan 29 11:27:35 2016 -0500
Committer: Mahadev Konar <ma...@apache.org>
Committed: Sun Jan 31 18:14:14 2016 -0800
----------------------------------------------------------------------
.../SPARK/1.2.0.2.2/package/scripts/params.py | 2 +-
.../SPARK/configuration/spark-defaults.xml | 4 +--
.../configuration/spark-thrift-sparkconf.xml | 31 ++++++++++++++++----
.../stacks/HDP/2.4/upgrades/config-upgrade.xml | 15 ----------
4 files changed, 29 insertions(+), 23 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a0e1a07/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index a681a5c..4fd1d6b 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -79,7 +79,7 @@ hive_user = status_params.hive_user
spark_group = status_params.spark_group
user_group = status_params.user_group
spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_history_dir = 'hdfs:///spark-history'
+spark_history_dir = default('/configurations/spark-defaults/spark.history.fs.logDirectory', "hdfs:///spark-history")
spark_history_server_pid_file = status_params.spark_history_server_pid_file
spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a0e1a07/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
index d8af790..1a6552f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
@@ -31,7 +31,7 @@
</property>
<property>
<name>spark.history.fs.logDirectory</name>
- <value>{{spark_history_dir}}</value>
+ <value>hdfs:///spark-history</value>
<description>
Base directory for history spark application log.
</description>
@@ -45,7 +45,7 @@
</property>
<property>
<name>spark.eventLog.dir</name>
- <value>{{spark_history_dir}}</value>
+ <value>hdfs:///spark-history</value>
<description>
Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
</description>
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a0e1a07/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
index 3b13496..2dbfe51 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
@@ -75,14 +75,17 @@
<property>
<name>spark.history.fs.logDirectory</name>
<value>{{spark_history_dir}}</value>
+ <final>true</final>
<description>
- Base directory for history spark application log.
+ Base directory for history spark application log. It is the same value
+ as in spark-defaults.xml.
</description>
</property>
<property>
<name>spark.eventLog.enabled</name>
<value>true</value>
+ <final>true</final>
<description>
Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
</description>
@@ -91,8 +94,10 @@
<property>
<name>spark.eventLog.dir</name>
<value>{{spark_history_dir}}</value>
+ <final>true</final>
<description>
- Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
+ Base directory in which Spark events are logged, if spark.eventLog.enabled is true. It is the same value
+ as in spark-defaults.xml.
</description>
</property>
@@ -138,10 +143,26 @@
</property>
<property>
- <name>spark.executor.instances</name>
- <value>2</value>
+ <name>spark.dynamicAllocation.initialExecutors</name>
+ <value>0</value>
<description>
- The number of executor.
+ Initial number of executors to run if dynamic allocation is enabled.
+ </description>
+ </property>
+
+ <property>
+ <name>spark.dynamicAllocation.maxExecutors</name>
+ <value>10</value>
+ <description>
+ Upper bound for the number of executors if dynamic allocation is enabled.
+ </description>
+ </property>
+
+ <property>
+ <name>spark.dynamicAllocation.minExecutors</name>
+ <value>0</value>
+ <description>
+ Lower bound for the number of executors if dynamic allocation is enabled.
</description>
</property>
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a0e1a07/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 5949486..ee06577 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -172,9 +172,6 @@
<type>spark-defaults</type>
<transfer operation="delete" delete-key="spark.yarn.services" />
<set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
- <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
- <set key="spark.eventLog.enabled" value="true"/>
- <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
</definition>
</changes>
</component>
@@ -190,18 +187,6 @@
<transfer operation="delete" delete-key="spark.yarn.submit.file.replication" />
<transfer operation="delete" delete-key="spark.yarn.preserve.staging.files" />
<transfer operation="delete" delete-key="spark.yarn.max.executor.failures" />
- <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
- <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
- <set key="spark.eventLog.enabled" value="true"/>
- <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
- <set key="spark.master" value="{{spark_thrift_master}}"/>
- <set key="spark.scheduler.allocation.file" value="{{spark_conf}}/spark-thrift-fairscheduler.xml"/>
- <set key="spark.scheduler.mode" value="FAIR"/>
- <set key="spark.shuffle.service.enabled" value="true"/>
- <set key="spark.dynamicAllocation.enabled" value="true"/>
- <set key="spark.executor.instances" value="2"/>
- <set key="spark.yarn.am.memory" value="512m"/>
- <set key="spark.executor.memory" value="1g"/>
</definition>
</changes>
</component>
[03/19] ambari git commit: AMBARI-14848 : AMS service has critical
alert after upgrade to 2.2.1.0. (avijayan)
Posted by yu...@apache.org.
AMBARI-14848 : AMS service has critical alert after upgrade to 2.2.1.0. (avijayan)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a17c7a6a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a17c7a6a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a17c7a6a
Branch: refs/heads/2.2.1-maint
Commit: a17c7a6a6b8258885d074fabcecc38d3fdf9e894
Parents: 7a0e1a0
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Fri Jan 29 14:05:51 2016 -0800
Committer: Mahadev Konar <ma...@apache.org>
Committed: Sun Jan 31 18:14:28 2016 -0800
----------------------------------------------------------------------
.../server/upgrade/UpgradeCatalog221.java | 7 ++++
.../server/upgrade/UpgradeCatalog221Test.java | 43 ++++++++++++++++++++
2 files changed, 50 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a17c7a6a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
index 2adc8b9..82c56ae 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
@@ -198,6 +198,13 @@ public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
alertDefinitionDAO.merge(alertDefinition);
}
+ final AlertDefinitionEntity amsZookeeperProcessAlertDefinitionEntity = alertDefinitionDAO.findByName(
+ clusterID, "ams_metrics_collector_zookeeper_server_process");
+
+ if (amsZookeeperProcessAlertDefinitionEntity != null) {
+ LOG.info("Removing alert : ams_metrics_collector_zookeeper_server_process");
+ alertDefinitionDAO.remove(amsZookeeperProcessAlertDefinitionEntity);
+ }
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a17c7a6a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
index 906167e..3dab200 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
@@ -41,7 +41,9 @@ import org.apache.ambari.server.controller.MaintenanceStateHelper;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
import org.apache.ambari.server.orm.entities.StackEntity;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
@@ -572,4 +574,45 @@ public class UpgradeCatalog221Test {
String result = (String) updateAmsEnvContent.invoke(upgradeCatalog221, oldContent);
Assert.assertEquals(expectedContent, result);
}
+
+ @Test
+ public void testUpdateAlertDefinitions() {
+ EasyMockSupport easyMockSupport = new EasyMockSupport();
+ long clusterId = 1;
+
+ final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+ final AlertDefinitionDAO mockAlertDefinitionDAO = easyMockSupport.createNiceMock(AlertDefinitionDAO.class);
+ final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+ final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+ final AlertDefinitionEntity mockAmsZookeeperProcessAlertDefinitionEntity = easyMockSupport.createNiceMock(AlertDefinitionEntity.class);
+
+ final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+ @Override
+ protected void configure() {
+ bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+ bind(Clusters.class).toInstance(mockClusters);
+ bind(EntityManager.class).toInstance(entityManager);
+ bind(AlertDefinitionDAO.class).toInstance(mockAlertDefinitionDAO);
+ bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+ bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+ }
+ });
+
+ expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+ expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+ put("normal", mockClusterExpected);
+ }}).atLeastOnce();
+
+ expect(mockClusterExpected.getClusterId()).andReturn(clusterId).anyTimes();
+
+ expect(mockAlertDefinitionDAO.findByName(eq(clusterId), eq("ams_metrics_collector_zookeeper_server_process")))
+ .andReturn(mockAmsZookeeperProcessAlertDefinitionEntity).atLeastOnce();
+
+ mockAlertDefinitionDAO.remove(mockAmsZookeeperProcessAlertDefinitionEntity);
+ expectLastCall().once();
+
+ easyMockSupport.replayAll();
+ mockInjector.getInstance(UpgradeCatalog221.class).updateAlerts();
+ easyMockSupport.verifyAll();
+ }
}
[14/19] ambari git commit: AMBARI-382. Ambari Kafka alerts not
respecting broker port configured in Ambari.(vbrodetskyi)
Posted by yu...@apache.org.
AMBARI-382. Ambari Kafka alerts not respecting broker port configured in Ambari.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6893b5a8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6893b5a8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6893b5a8
Branch: refs/heads/2.2.1-maint
Commit: 6893b5a8b12a1f71815c66d3b6f462e9e2e119c3
Parents: abd26b2
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Feb 12 03:15:24 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Feb 12 03:15:24 2016 +0200
----------------------------------------------------------------------
.../python/ambari_agent/alerts/port_alert.py | 2 +-
.../stacks/HDP/2.3/services/KAFKA/alerts.json | 32 ++++++++++++++++++++
2 files changed, 33 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6893b5a8/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
index 848da65..92d28ad 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/port_alert.py
@@ -102,7 +102,7 @@ class PortAlert(BaseAlert):
host = BaseAlert.get_host_from_url(uri_value)
- if host is None:
+ if host is None or host == "localhost" or host == "0.0.0.0":
host = self.host_name
try:
http://git-wip-us.apache.org/repos/asf/ambari/blob/6893b5a8/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/alerts.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/alerts.json
new file mode 100644
index 0000000..04fb583
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/alerts.json
@@ -0,0 +1,32 @@
+{
+ "KAFKA": {
+ "service": [],
+ "KAFKA_BROKER": [
+ {
+ "name": "kafka_broker_process",
+ "label": "Kafka Broker Process",
+ "description": "This host-level alert is triggered if the Kafka Broker cannot be determined to be up.",
+ "interval": 1,
+ "scope": "HOST",
+ "source": {
+ "type": "PORT",
+ "uri": "{{kafka-broker/listeners}}",
+ "default_port": 6667,
+ "reporting": {
+ "ok": {
+ "text": "TCP OK - {0:.3f}s response on port {1}"
+ },
+ "warning": {
+ "text": "TCP OK - {0:.3f}s response on port {1}",
+ "value": 1.5
+ },
+ "critical": {
+ "text": "Connection failed: {0} to {1}:{2}",
+ "value": 5.0
+ }
+ }
+ }
+ }
+ ]
+ }
+}
[07/19] ambari git commit: AMBARI-14901. NPE when configuring
Kerberos at provisioning secure cluster with Blueprint. (Sandor Magyari via
rnettleton)
Posted by yu...@apache.org.
AMBARI-14901. NPE when configuring Kerberos at provisioning secure cluster with Blueprint. (Sandor Magyari via rnettleton)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4c31108b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4c31108b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4c31108b
Branch: refs/heads/2.2.1-maint
Commit: 4c31108b2182450e44831635bfdb8f88492a6d10
Parents: c579388
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Thu Feb 4 17:56:46 2016 -0500
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Thu Feb 4 17:56:46 2016 -0500
----------------------------------------------------------------------
.../topology/ClusterConfigurationRequest.java | 35 ++++++-
.../ClusterConfigurationRequestTest.java | 101 +++++++++++++++++--
2 files changed, 125 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/4c31108b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
index c719009..464aee7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
@@ -148,9 +148,8 @@ public class ClusterConfigurationRequest {
Map<String, String> clusterConfigProperties = existingConfigurations.get(configType);
Map<String, String> stackDefaultConfigProperties = stackDefaultProps.get(configType);
for (String property : propertyMap.keySet()) {
- if (clusterConfigProperties == null || !clusterConfigProperties.containsKey(property)
- || (clusterConfigProperties.get(property) == null && stackDefaultConfigProperties.get(property) == null)
- || (clusterConfigProperties.get(property) != null && clusterConfigProperties.get(property).equals(stackDefaultConfigProperties.get(property)))) {
+ // update value only if property value configured in Blueprint /ClusterTemplate is not a custom one
+ if (!propertyHasCustomValue(clusterConfigProperties, stackDefaultConfigProperties, property)) {
LOG.debug("Update Kerberos related config property: {} {} {}", configType, property, propertyMap.get
(property));
clusterConfiguration.setProperty(configType, property, propertyMap.get(property));
@@ -166,6 +165,36 @@ public class ClusterConfigurationRequest {
return updatedConfigTypes;
}
+ /**
+ * Returns true if the property exists in clusterConfigProperties and has a custom user defined value. Property has
+ * custom value in case we there's no stack default value for it or it's not equal to stack default value.
+ * @param clusterConfigProperties
+ * @param stackDefaultConfigProperties
+ * @param property
+ * @return
+ */
+ private boolean propertyHasCustomValue(Map<String, String> clusterConfigProperties, Map<String, String>
+ stackDefaultConfigProperties, String property) {
+
+ boolean propertyHasCustomValue = false;
+ if (clusterConfigProperties != null) {
+ String propertyValue = clusterConfigProperties.get(property);
+ if (propertyValue != null) {
+ if (stackDefaultConfigProperties != null) {
+ String stackDefaultValue = stackDefaultConfigProperties.get(property);
+ if (stackDefaultValue != null) {
+ propertyHasCustomValue = !propertyValue.equals(stackDefaultValue);
+ } else {
+ propertyHasCustomValue = true;
+ }
+ } else {
+ propertyHasCustomValue = true;
+ }
+ }
+ }
+ return propertyHasCustomValue;
+ }
+
private Map<String, String> createComponentHostMap(Blueprint blueprint) {
Map<String, String> componentHostsMap = new HashMap<String, String>();
for (String service : blueprint.getServices()) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/4c31108b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
index 8afff46..5967a64 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
@@ -18,13 +18,18 @@
package org.apache.ambari.server.topology;
+import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.api.services.stackadvisor.StackAdvisorBlueprintProcessor;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.ConfigurationRequest;
import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.internal.ConfigurationTopologyException;
import org.apache.ambari.server.controller.internal.Stack;
+import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
import org.easymock.EasyMockRule;
import org.easymock.Mock;
import org.easymock.MockType;
@@ -48,7 +53,10 @@ import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.verify;
+import static org.easymock.EasyMock.capture;
+import static org.junit.Assert.assertEquals;
/**
* ClusterConfigurationRequest unit tests
@@ -88,12 +96,82 @@ public class ClusterConfigurationRequestTest {
@Mock(type = MockType.NICE)
private KerberosHelper kerberosHelper;
+ /**
+ * testConfigType config type should be in updatedConfigTypes, as no custom property in Blueprint
+ * ==> Kerberos config property should be updated
+ * @throws Exception
+ */
@Test
- public void testProcessClusterConfigRequestIncludeKererosConfigs() throws Exception {
+ public void testProcessWithKerberos_UpdateKererosConfigProperty_WithNoCustomValue() throws Exception {
+
+ Capture<? extends Set<String>> captureUpdatedConfigTypes = testProcessWithKerberos(null, "defaultTestValue");
+
+ Set<String> updatedConfigTypes = captureUpdatedConfigTypes.getValue();
+ assertEquals(2, updatedConfigTypes.size());
+ }
+
+ /**
+ * testConfigType config type should be in updatedConfigTypes, as testProperty in Blueprint is equal to stack
+ * default ==> Kerberos config property should be updated
+ * @throws Exception
+ */
+ @Test
+ public void testProcessWithKerberos_UpdateKererosConfigProperty_WithCustomValueEqualToStackDefault() throws
+ Exception {
+
+ Capture<? extends Set<String>> captureUpdatedConfigTypes = testProcessWithKerberos("defaultTestValue", "defaultTestValue");
+
+ Set<String> updatedConfigTypes = captureUpdatedConfigTypes.getValue();
+ assertEquals(2, updatedConfigTypes.size());
+
+ }
+
+ /**
+ * testConfigType config type shouldn't be in updatedConfigTypes, as testProperty in Blueprint is different that
+ * stack default (custom value) ==> Kerberos config property shouldn't be updated
+ * @throws Exception
+ */
+ @Test
+ public void testProcessWithKerberos_DontUpdateKererosConfigProperty_WithCustomValueDifferentThanStackDefault() throws
+ Exception {
+
+ Capture<? extends Set<String>> captureUpdatedConfigTypes = testProcessWithKerberos("testPropertyValue", "defaultTestValue");
+
+ Set<String> updatedConfigTypes = captureUpdatedConfigTypes.getValue();
+ assertEquals(1, updatedConfigTypes.size());
+ }
+
+ /**
+ * testConfigType config type shouldn't be in updatedConfigTypes, as testProperty in Blueprint is a custom value
+ * (no default value in stack for testProperty)
+ * ==> Kerberos config property shouldn't be updated
+ * @throws Exception
+ */
+ @Test
+ public void testProcessWithKerberos_DontUpdateKererosConfigProperty_WithCustomValueNoStackDefault() throws Exception {
+
+ Capture<? extends Set<String>> captureUpdatedConfigTypes = testProcessWithKerberos("testPropertyValue", null);
+
+ Set<String> updatedConfigTypes = captureUpdatedConfigTypes.getValue();
+ assertEquals(1, updatedConfigTypes.size());
+ }
+
+ private Capture<? extends Set<String>> testProcessWithKerberos(String blueprintPropertyValue, String
+ stackPropertyValue) throws AmbariException, KerberosInvalidConfigurationException, ConfigurationTopologyException {
+
Map<String, Map<String, String>> existingConfig = new HashMap<String, Map<String, String>>();
- Configuration stackConfig = new Configuration(existingConfig,
+ Configuration stackDefaultConfig = new Configuration(existingConfig,
new HashMap<String, Map<String, Map<String, String>>>());
+ if (stackPropertyValue != null) {
+ stackDefaultConfig.setProperty("testConfigType", "testProperty", stackPropertyValue);
+ }
+
+ Configuration blueprintConfig = new Configuration(stackDefaultConfig.getFullProperties(),
+ new HashMap<String, Map<String, Map<String, String>>>());
+ if (blueprintPropertyValue != null) {
+ blueprintConfig.setProperty("testConfigType", "testProperty", blueprintPropertyValue);
+ }
PowerMock.mockStatic(AmbariContext.class);
AmbariContext.getController();
@@ -116,7 +194,7 @@ public class ClusterConfigurationRequestTest {
services.add("KERBEROS");
services.add("ZOOKEPER");
expect(blueprint.getServices()).andReturn(services).anyTimes();
- expect(stack.getConfiguration(services)).andReturn(stackConfig).once();
+ expect(stack.getConfiguration(services)).andReturn(stackDefaultConfig).once();
List<String> hdfsComponents = new ArrayList<>();
hdfsComponents.add("NAMENODE");
@@ -131,7 +209,7 @@ public class ClusterConfigurationRequestTest {
expect(topology.getConfigRecommendationStrategy()).andReturn(ConfigRecommendationStrategy.NEVER_APPLY).anyTimes();
expect(topology.getBlueprint()).andReturn(blueprint).anyTimes();
- expect(topology.getConfiguration()).andReturn(stackConfig).anyTimes();
+ expect(topology.getConfiguration()).andReturn(blueprintConfig).anyTimes();
expect(topology.getHostGroupInfo()).andReturn(Collections.<String, HostGroupInfo>emptyMap());
expect(topology.getClusterId()).andReturn(Long.valueOf(1)).anyTimes();
expect(ambariContext.getClusterName(Long.valueOf(1))).andReturn("testCluster").anyTimes();
@@ -140,17 +218,22 @@ public class ClusterConfigurationRequestTest {
Map<String, Map<String, String>> kerberosConfig = new HashMap<String, Map<String, String>>();
Map<String, String> properties = new HashMap<>();
- properties.put("testPorperty", "testValue");
+ properties.put("testProperty", "KERBEROStestValue");
kerberosConfig.put("testConfigType", properties);
expect(kerberosHelper.ensureHeadlessIdentities(anyObject(Cluster.class), anyObject(Map.class), anyObject
(Set.class))).andReturn(true).once();
expect(kerberosHelper.getServiceConfigurationUpdates(anyObject(Cluster.class), anyObject(Map.class), anyObject
(Set.class), anyBoolean(), anyBoolean())).andReturn(kerberosConfig).once();
+ Capture<? extends String> captureClusterName = newCapture(CaptureType.ALL);
+ Capture<? extends Set<String>> captureUpdatedConfigTypes = newCapture(CaptureType.ALL);
+ ambariContext.waitForConfigurationResolution(capture(captureClusterName), capture
+ (captureUpdatedConfigTypes));
+ expectLastCall();
PowerMock.replay(stack, blueprint, topology, controller, clusters, kerberosHelper, ambariContext,
AmbariContext
- .class);
+ .class);
ClusterConfigurationRequest clusterConfigurationRequest = new ClusterConfigurationRequest(
ambariContext, topology, false, stackAdvisorBlueprintProcessor, true);
@@ -158,6 +241,10 @@ public class ClusterConfigurationRequestTest {
verify(blueprint, topology, ambariContext, controller, kerberosHelper);
+
+ String clusterName = captureClusterName.getValue();
+ assertEquals("testCluster", clusterName);
+ return captureUpdatedConfigTypes;
}
@Test
@@ -220,6 +307,4 @@ public class ClusterConfigurationRequestTest {
}
-
-
}
[13/19] ambari git commit: AMBARI-14982 : Change AMS configs to deal
with slow writes to WASB (avijayan)
Posted by yu...@apache.org.
AMBARI-14982 : Change AMS configs to deal with slow writes to WASB (avijayan)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9120682e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9120682e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9120682e
Branch: refs/heads/2.2.1-maint
Commit: 9120682ed5de26c6163c6bc07935782fb645ff32
Parents: 984d9e9
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Thu Feb 11 10:16:48 2016 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Thu Feb 11 10:16:48 2016 -0800
----------------------------------------------------------------------
.../timeline/HBaseTimelineMetricStore.java | 18 +++++-----
.../metrics/timeline/PhoenixHBaseAccessor.java | 21 ++++++-----
.../timeline/TimelineMetricConfiguration.java | 10 ++++++
.../0.1.0/configuration/ams-site.xml | 37 +++++++++++++++-----
.../stacks/HDP/2.0.6/services/stack_advisor.py | 5 ++-
.../stacks/2.2/common/test_stack_advisor.py | 5 ++-
6 files changed, 69 insertions(+), 27 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/9120682e/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
index c4e946a..8678071 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
@@ -142,14 +142,16 @@ public class HBaseTimelineMetricStore extends AbstractService implements Timelin
aggregatorDailyThread.start();
}
- int initDelay = configuration.getTimelineMetricsServiceWatcherInitDelay();
- int delay = configuration.getTimelineMetricsServiceWatcherDelay();
- // Start the watchdog
- executorService.scheduleWithFixedDelay(
- new TimelineMetricStoreWatcher(this, configuration), initDelay, delay,
- TimeUnit.SECONDS);
- LOG.info("Started watchdog for timeline metrics store with initial " +
- "delay = " + initDelay + ", delay = " + delay);
+ if (!configuration.isTimelineMetricsServiceWatcherDisabled()) {
+ int initDelay = configuration.getTimelineMetricsServiceWatcherInitDelay();
+ int delay = configuration.getTimelineMetricsServiceWatcherDelay();
+ // Start the watchdog
+ executorService.scheduleWithFixedDelay(
+ new TimelineMetricStoreWatcher(this, configuration), initDelay, delay,
+ TimeUnit.SECONDS);
+ LOG.info("Started watchdog for timeline metrics store with initial " +
+ "delay = " + initDelay + ", delay = " + delay);
+ }
isInitialized = true;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/9120682e/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index 4dc1a95..60ac06f 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -244,14 +244,14 @@ public class PhoenixHBaseAccessor {
String encoding = metricsConf.get(HBASE_ENCODING_SCHEME, DEFAULT_ENCODING);
String compression = metricsConf.get(HBASE_COMPRESSION_SCHEME, DEFAULT_TABLE_COMPRESSION);
- String precisionTtl = metricsConf.get(PRECISION_TABLE_TTL, "86400"); //1 day
- String hostMinTtl = metricsConf.get(HOST_MINUTE_TABLE_TTL, "604800"); //7 days
- String hostHourTtl = metricsConf.get(HOST_HOUR_TABLE_TTL, "2592000"); //30 days
- String hostDailyTtl = metricsConf.get(HOST_DAILY_TABLE_TTL, "31536000"); //1 year
- String clusterSecTtl = metricsConf.get(CLUSTER_SECOND_TABLE_TTL, "2592000"); //7 days
- String clusterMinTtl = metricsConf.get(CLUSTER_MINUTE_TABLE_TTL, "7776000"); //30 days
- String clusterHourTtl = metricsConf.get(CLUSTER_HOUR_TABLE_TTL, "31536000"); //1 year
- String clusterDailyTtl = metricsConf.get(CLUSTER_DAILY_TABLE_TTL, "63072000"); //2 years
+ String precisionTtl = getDaysInSeconds(metricsConf.get(PRECISION_TABLE_TTL, "1")); //1 day
+ String hostMinTtl = getDaysInSeconds(metricsConf.get(HOST_MINUTE_TABLE_TTL, "7")); //7 days
+ String hostHourTtl = getDaysInSeconds(metricsConf.get(HOST_HOUR_TABLE_TTL, "30")); //30 days
+ String hostDailyTtl = getDaysInSeconds(metricsConf.get(HOST_DAILY_TABLE_TTL, "365")); //1 year
+ String clusterSecTtl = getDaysInSeconds(metricsConf.get(CLUSTER_SECOND_TABLE_TTL, "7")); //7 days
+ String clusterMinTtl = getDaysInSeconds(metricsConf.get(CLUSTER_MINUTE_TABLE_TTL, "30")); //30 days
+ String clusterHourTtl = getDaysInSeconds(metricsConf.get(CLUSTER_HOUR_TABLE_TTL, "365")); //1 year
+ String clusterDailyTtl = getDaysInSeconds(metricsConf.get(CLUSTER_DAILY_TABLE_TTL, "730")); //2 years
try {
LOG.info("Initializing metrics schema...");
@@ -1023,4 +1023,9 @@ public class PhoenixHBaseAccessor {
"thread = " + Thread.currentThread().getName());
}
}
+
+ private String getDaysInSeconds(String daysString) {
+ double days = Double.valueOf(daysString.trim());
+ return String.valueOf((int)(days*86400));
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/9120682e/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index 6311f9f..c0ab6f1 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -181,6 +181,9 @@ public class TimelineMetricConfiguration {
public static final String HANDLER_THREAD_COUNT =
"timeline.metrics.service.handler.thread.count";
+ public static final String WATCHER_DISABLED =
+ "timeline.metrics.service.watcher.disabled";
+
public static final String WATCHER_INITIAL_DELAY =
"timeline.metrics.service.watcher.initial.delay";
@@ -261,6 +264,13 @@ public class TimelineMetricConfiguration {
return 20;
}
+ public boolean isTimelineMetricsServiceWatcherDisabled() {
+ if (metricsConf != null) {
+ return Boolean.parseBoolean(metricsConf.get(WATCHER_DISABLED, "false"));
+ }
+ return false;
+ }
+
public int getTimelineMetricsServiceWatcherInitDelay() {
if (metricsConf != null) {
return Integer.parseInt(metricsConf.get(WATCHER_INITIAL_DELAY, "600"));
http://git-wip-us.apache.org/repos/asf/ambari/blob/9120682e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
index 8173743..5f66063 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
@@ -283,59 +283,65 @@
</property>
<property>
<name>timeline.metrics.host.aggregator.daily.ttl</name>
- <value>31536000</value>
+ <value>365</value>
<description>
Host based daily resolution data purge interval. Default is 1 year.
</description>
</property>
<property>
<name>timeline.metrics.host.aggregator.hourly.ttl</name>
- <value>2592000</value>
+ <value>30</value>
<description>
Host based hourly resolution data purge interval. Default is 30 days.
</description>
</property>
<property>
<name>timeline.metrics.host.aggregator.minute.ttl</name>
- <value>604800</value>
+ <value>7</value>
<description>
Host based minute resolution data purge interval. Default is 7 days.
</description>
</property>
<property>
<name>timeline.metrics.cluster.aggregator.second.ttl</name>
- <value>2592000</value>
+ <value>7</value>
<description>
Cluster wide second resolution data purge interval. Default is 7 days.
</description>
</property>
<property>
<name>timeline.metrics.cluster.aggregator.minute.ttl</name>
- <value>7776000</value>
+ <value>30</value>
<description>
Cluster wide minute resolution data purge interval. Default is 30 days.
</description>
</property>
<property>
<name>timeline.metrics.cluster.aggregator.hourly.ttl</name>
- <value>31536000</value>
+ <value>365</value>
<description>
Cluster wide hourly resolution data purge interval. Default is 1 year.
</description>
</property>
<property>
<name>timeline.metrics.cluster.aggregator.daily.ttl</name>
- <value>63072000</value>
+ <value>730</value>
<description>
Cluster wide daily resolution data purge interval. Default is 2 years.
</description>
</property>
<property>
<name>timeline.metrics.host.aggregator.ttl</name>
- <value>86400</value>
+ <value>1</value>
<description>
- 1 minute resolution data purge interval. Default is 1 day.
+ 1 minute resolution data purge interval. Default is 1 day for embedded mode and 7 days for Distributed mode.
</description>
+ <depends-on>
+ <property>
+ <type>ams-site</type>
+ <name>timeline.metrics.service.operation.mode</name>
+ </property>
+ </depends-on>
</property>
<property>
<name>timeline.metrics.hbase.data.block.encoding</name>
@@ -492,6 +498,19 @@
</description>
</property>
<property>
+ <name>timeline.metrics.service.watcher.disabled</name>
+ <value>false</value>
+ <description>
+ Disable Timeline Metric Store watcher thread. Disabled by default in AMS distributed mode.
+ </description>
+ <depends-on>
+ <property>
+ <type>ams-site</type>
+ <name>timeline.metrics.service.operation.mode</name>
+ </property>
+ </depends-on>
+ </property>
+ <property>
<name>timeline.metrics.service.watcher.initial.delay</name>
<value>600</value>
<description>
http://git-wip-us.apache.org/repos/asf/ambari/blob/9120682e/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index e8df97b..07cc73b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -470,8 +470,12 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
operatingMode = services["configurations"]["ams-site"]["properties"]["timeline.metrics.service.operation.mode"]
if operatingMode == "distributed":
+ putAmsSiteProperty("timeline.metrics.service.watcher.disabled", 'true')
+ putAmsSiteProperty("timeline.metrics.host.aggregator.ttl", 7)
putAmsHbaseSiteProperty("hbase.cluster.distributed", 'true')
else:
+ putAmsSiteProperty("timeline.metrics.service.watcher.disabled", 'false')
+ putAmsSiteProperty("timeline.metrics.host.aggregator.ttl", 1)
putAmsHbaseSiteProperty("hbase.cluster.distributed", 'false')
rootDir = "file:///var/lib/ambari-metrics-collector/hbase"
@@ -527,7 +531,6 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
putAmsHbaseSiteProperty("hbase.hregion.memstore.flush.size", 134217728)
putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.upperLimit", 0.35)
putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.lowerLimit", 0.3)
- putAmsSiteProperty("timeline.metrics.host.aggregator.ttl", 86400)
if len(amsCollectorHosts) > 1:
pass
http://git-wip-us.apache.org/repos/asf/ambari/blob/9120682e/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 86d560a..6984e0e 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -2011,7 +2011,8 @@ class TestHDP22StackAdvisor(TestCase):
"properties": {
"timeline.metrics.cluster.aggregate.splitpoints": " ",
"timeline.metrics.host.aggregate.splitpoints": " ",
- "timeline.metrics.host.aggregator.ttl": "86400"
+ "timeline.metrics.host.aggregator.ttl": "1",
+ 'timeline.metrics.service.watcher.disabled': 'false'
}
}
}
@@ -2189,6 +2190,8 @@ class TestHDP22StackAdvisor(TestCase):
expected['ams-hbase-env']['properties']['hbase_regionserver_heapsize'] = '512'
expected["ams-hbase-env"]['properties']['hbase_master_xmn_size'] = '102'
expected['ams-hbase-env']['properties']['regionserver_xmn_size'] = '384'
+ expected['ams-site']['properties']['timeline.metrics.host.aggregator.ttl'] = '7'
+ expected['ams-site']['properties']['timeline.metrics.service.watcher.disabled'] = 'true'
self.stackAdvisor.recommendAmsConfigurations(configurations, clusterData, services, hosts)
self.assertEquals(configurations, expected)
[16/19] ambari git commit: AMBARI-14715. TimelineServer configuration
is missing in yarn-env.xml. (Akira Ajisaka via yusaku)
Posted by yu...@apache.org.
AMBARI-14715. TimelineServer configuration is missing in yarn-env.xml. (Akira Ajisaka via yusaku)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/116f016f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/116f016f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/116f016f
Branch: refs/heads/2.2.1-maint
Commit: 116f016f6b7d59fd59eb6cee245f141966846199
Parents: 2ceaced
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Thu Feb 11 19:10:13 2016 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Feb 11 19:11:18 2016 -0800
----------------------------------------------------------------------
.../stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/116f016f/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
index 699463b..74fd4f6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
@@ -102,14 +102,14 @@
# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
export YARN_NODEMANAGER_HEAPSIZE={{nodemanager_heapsize}}
- # Specify the max Heapsize for the HistoryManager using a numerical value
+ # Specify the max Heapsize for the timeline server using a numerical value
# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
# the value to 1024.
# This value will be overridden by an Xmx setting specified in either YARN_OPTS
- # and/or YARN_HISTORYSERVER_OPTS.
+ # and/or YARN_TIMELINESERVER_OPTS.
# If not specified, the default value will be picked from either YARN_HEAPMAX
# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
- export YARN_HISTORYSERVER_HEAPSIZE={{apptimelineserver_heapsize}}
+ export YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}
# Specify the JVM options to be used when starting the NodeManager.
# These options will be appended to the options specified as YARN_OPTS
[12/19] ambari git commit: AMBARI-14989. Inconsistent HIVE CBO
setting presentation. (Jaimin)
Posted by yu...@apache.org.
AMBARI-14989. Inconsistent HIVE CBO setting presentation. (Jaimin)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/984d9e90
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/984d9e90
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/984d9e90
Branch: refs/heads/2.2.1-maint
Commit: 984d9e90b28da343d4fb80abfdceca3ce6b4dc65
Parents: 9df0339
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Thu Feb 11 15:02:09 2016 +0530
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Feb 11 07:57:10 2016 -0800
----------------------------------------------------------------------
.../services/HIVE/configuration/hive-env.xml | 20 ---------
.../services/HIVE/configuration/hive-site.xml | 14 ------
.../stacks/HDP/2.2/services/stack_advisor.py | 13 +++---
.../services/HIVE/configuration/hive-env.xml | 20 ---------
.../services/HIVE/configuration/hive-site.xml | 8 ----
.../stacks/HDPWIN/2.2/services/stack_advisor.py | 13 +++---
.../2.1/configs/hive-metastore-upgrade.json | 3 +-
.../stacks/2.2/common/test_stack_advisor.py | 12 +++--
.../2.3/common/services-sparkts-hive.json | 47 --------------------
.../stacks/2.3/common/test_stack_advisor.py | 7 ++-
10 files changed, 19 insertions(+), 138 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
index f84bc01..77eace4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml
@@ -127,26 +127,6 @@ export METASTORE_PORT={{hive_metastore_port}}
</property>
<property>
- <name>cost_based_optimizer</name>
- <display-name>Cost Based Optimizer</display-name>
- <value>Off</value>
- <value-attributes>
- <type>value-list</type>
- <entries>
- <entry>
- <value>On</value>
- <label>On</label>
- </entry>
- <entry>
- <value>Off</value>
- <label>Off</label>
- </entry>
- </entries>
- <selection-cardinality>1</selection-cardinality>
- </value-attributes>
- </property>
-
- <property>
<name>hive_security_authorization</name>
<display-name>Choose Authorization</display-name>
<value>None</value>
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
index cc3e75f..7d6c490 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
@@ -450,12 +450,6 @@ limitations under the License.
<name>hive.cbo.enable</name>
<value>true</value>
<description>Flag to control enabling Cost Based Optimizations using Calcite framework.</description>
- <depends-on>
- <property>
- <type>hive-env</type>
- <name>cost_based_optimizer</name>
- </property>
- </depends-on>
</property>
<property>
@@ -799,10 +793,6 @@ limitations under the License.
</value-attributes>
<depends-on>
<property>
- <type>hive-env</type>
- <name>cost_based_optimizer</name>
- </property>
- <property>
<type>hive-site</type>
<name>hive.cbo.enable</name>
</property>
@@ -834,10 +824,6 @@ limitations under the License.
</value-attributes>
<depends-on>
<property>
- <type>hive-env</type>
- <name>cost_based_optimizer</name>
- </property>
- <property>
<type>hive-site</type>
<name>hive.cbo.enable</name>
</property>
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index 9be6410..3ebb25f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -347,14 +347,11 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
putHiveSiteProperty("hive.exec.reducers.bytes.per.reducer", "67108864")
# CBO
- putHiveEnvProperty("cost_based_optimizer", "On")
- if str(configurations["hive-env"]["properties"]["cost_based_optimizer"]).lower() == "on":
- putHiveSiteProperty("hive.cbo.enable", "true")
- else:
- putHiveSiteProperty("hive.cbo.enable", "false")
- hive_cbo_enable = configurations["hive-site"]["properties"]["hive.cbo.enable"]
- putHiveSiteProperty("hive.stats.fetch.partition.stats", hive_cbo_enable)
- putHiveSiteProperty("hive.stats.fetch.column.stats", hive_cbo_enable)
+ if "hive-site" in services["configurations"] and "hive.cbo.enable" in services["configurations"]["hive-site"]["properties"]:
+ hive_cbo_enable = services["configurations"]["hive-site"]["properties"]["hive.cbo.enable"]
+ putHiveSiteProperty("hive.stats.fetch.partition.stats", hive_cbo_enable)
+ putHiveSiteProperty("hive.stats.fetch.column.stats", hive_cbo_enable)
+
putHiveSiteProperty("hive.compute.query.using.stats", "true")
# Interactive Query
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-env.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-env.xml
index 3d1b549..b5a9124 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-env.xml
@@ -74,26 +74,6 @@
</property>
<property>
- <name>cost_based_optimizer</name>
- <display-name>Cost Based Optimizer</display-name>
- <value>Off</value>
- <value-attributes>
- <type>value-list</type>
- <entries>
- <entry>
- <value>On</value>
- <label>On</label>
- </entry>
- <entry>
- <value>Off</value>
- <label>Off</label>
- </entry>
- </entries>
- <selection-cardinality>1</selection-cardinality>
- </value-attributes>
- </property>
-
- <property>
<name>hive_security_authorization</name>
<display-name>Choose Authorization</display-name>
<value>None</value>
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
index 7e13436..8acc4dc 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HIVE/configuration/hive-site.xml
@@ -772,10 +772,6 @@ limitations under the License.
</value-attributes>
<depends-on>
<property>
- <type>hive-env</type>
- <name>cost_based_optimizer</name>
- </property>
- <property>
<type>hive-site</type>
<name>hive.cbo.enable</name>
</property>
@@ -807,10 +803,6 @@ limitations under the License.
</value-attributes>
<depends-on>
<property>
- <type>hive-env</type>
- <name>cost_based_optimizer</name>
- </property>
- <property>
<type>hive-site</type>
<name>hive.cbo.enable</name>
</property>
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
index c6941f0..bf50aac 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py
@@ -360,14 +360,11 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor):
putHiveSiteProperty("hive.exec.reducers.bytes.per.reducer", "67108864")
# CBO
- putHiveEnvProperty("cost_based_optimizer", "On")
- if str(configurations["hive-env"]["properties"]["cost_based_optimizer"]).lower() == "on":
- putHiveSiteProperty("hive.cbo.enable", "true")
- else:
- putHiveSiteProperty("hive.cbo.enable", "false")
- hive_cbo_enable = configurations["hive-site"]["properties"]["hive.cbo.enable"]
- putHiveSiteProperty("hive.stats.fetch.partition.stats", hive_cbo_enable)
- putHiveSiteProperty("hive.stats.fetch.column.stats", hive_cbo_enable)
+ if "hive-site" in services["configurations"] and "hive.cbo.enable" in services["configurations"]["hive-site"]["properties"]:
+ hive_cbo_enable = services["configurations"]["hive-site"]["properties"]["hive.cbo.enable"]
+ putHiveSiteProperty("hive.stats.fetch.partition.stats", hive_cbo_enable)
+ putHiveSiteProperty("hive.stats.fetch.column.stats", hive_cbo_enable)
+
putHiveSiteProperty("hive.compute.query.using.stats", "true")
# Interactive Query
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json b/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
index 7829f05..0fbc282 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/hive-metastore-upgrade.json
@@ -77,8 +77,7 @@
"hive_user": "hive",
"hive_database": "New MySQL Database",
"hive.heapsize": "703",
- "hcat_pid_dir": "/var/run/webhcat",
- "cost_based_optimizer": "On",
+ "hcat_pid_dir": "/var/run/webhcat",
"webhcat_user": "hcat",
"hive_security_authorization": "None",
"hive_exec_orc_storage_strategy": "SPEED",
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 904688a..86d560a 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -983,7 +983,6 @@ class TestHDP22StackAdvisor(TestCase):
},
'hive-env': {
'properties': {
- 'cost_based_optimizer': 'On',
'hive_exec_orc_storage_strategy': 'SPEED',
'hive_security_authorization': 'None',
'hive_timeline_logging_enabled': 'true',
@@ -997,7 +996,6 @@ class TestHDP22StackAdvisor(TestCase):
'hive.server2.tez.initialize.default.sessions': 'false',
'hive.server2.tez.sessions.per.default.queue': '1',
'hive.auto.convert.join.noconditionaltask.size': '268435456',
- 'hive.cbo.enable': 'true',
'hive.compactor.initiator.on': 'false',
'hive.compactor.worker.threads': '0',
'hive.compute.query.using.stats': 'true',
@@ -1140,7 +1138,8 @@ class TestHDP22StackAdvisor(TestCase):
"hive.server2.authentication.kerberos.keytab": "",
"hive.server2.authentication.kerberos.principal": "",
"hive.server2.authentication.pam.services": "",
- "hive.server2.custom.authentication.class": ""
+ "hive.server2.custom.authentication.class": "",
+ "hive.cbo.enable": "true"
}
},
"hiveserver2-site": {
@@ -1215,7 +1214,8 @@ class TestHDP22StackAdvisor(TestCase):
"hive.server2.authentication.kerberos.keytab": "",
"hive.server2.authentication.kerberos.principal": "",
"hive.server2.authentication.pam.services": "",
- "hive.server2.custom.authentication.class": ""
+ "hive.server2.custom.authentication.class": "",
+ "hive.cbo.enable": "true"
}
},
"hiveserver2-site": {
@@ -1279,10 +1279,8 @@ class TestHDP22StackAdvisor(TestCase):
#test recommendations
services["configurations"]["hive-site"]["properties"]["hive.cbo.enable"] = "false"
services["configurations"]["hive-env"]["properties"]["hive_security_authorization"] = "sqlstdauth"
- services["changed-configurations"] = [{"type": "hive-site", "name": "hive.cbo.enable"},
- {"type": "hive-env", "name": "hive_security_authorization"}]
+ services["changed-configurations"] = [{"type": "hive-env", "name": "hive_security_authorization"}]
expected["hive-env"]["properties"]["hive_security_authorization"] = "sqlstdauth"
- expected["hive-site"]["properties"]["hive.cbo.enable"] = "false"
expected["hive-site"]["properties"]["hive.stats.fetch.partition.stats"]="false"
expected["hive-site"]["properties"]["hive.stats.fetch.column.stats"]="false"
expected["hive-site"]["properties"]["hive.security.authorization.enabled"]="true"
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/test/python/stacks/2.3/common/services-sparkts-hive.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-sparkts-hive.json b/ambari-server/src/test/python/stacks/2.3/common/services-sparkts-hive.json
index 51ae36a..20a672e 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-sparkts-hive.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-sparkts-hive.json
@@ -2402,47 +2402,6 @@
},
"dependencies" : [ ]
}, {
- "href" : "/api/v1/stacks/HDP/versions/2.3/services/HIVE/configurations/cost_based_optimizer",
- "StackConfigurations" : {
- "property_depends_on" : [ ],
- "property_name" : "cost_based_optimizer",
- "service_name" : "HIVE",
- "stack_name" : "HDP",
- "stack_version" : "2.3",
- "type" : "hive-env.xml"
- },
- "dependencies" : [ {
- "href" : "/api/v1/stacks/HDP/versions/2.3/services/HIVE/configurations/cost_based_optimizer/dependencies/hive.cbo.enable",
- "StackConfigurationDependency" : {
- "dependency_name" : "hive.cbo.enable",
- "dependency_type" : "hive-site",
- "property_name" : "cost_based_optimizer",
- "service_name" : "HIVE",
- "stack_name" : "HDP",
- "stack_version" : "2.3"
- }
- }, {
- "href" : "/api/v1/stacks/HDP/versions/2.3/services/HIVE/configurations/cost_based_optimizer/dependencies/hive.stats.fetch.column.stats",
- "StackConfigurationDependency" : {
- "dependency_name" : "hive.stats.fetch.column.stats",
- "dependency_type" : "hive-site",
- "property_name" : "cost_based_optimizer",
- "service_name" : "HIVE",
- "stack_name" : "HDP",
- "stack_version" : "2.3"
- }
- }, {
- "href" : "/api/v1/stacks/HDP/versions/2.3/services/HIVE/configurations/cost_based_optimizer/dependencies/hive.stats.fetch.partition.stats",
- "StackConfigurationDependency" : {
- "dependency_name" : "hive.stats.fetch.partition.stats",
- "dependency_type" : "hive-site",
- "property_name" : "cost_based_optimizer",
- "service_name" : "HIVE",
- "stack_name" : "HDP",
- "stack_version" : "2.3"
- }
- } ]
- }, {
"href" : "/api/v1/stacks/HDP/versions/2.3/services/HIVE/configurations/datanucleus.autoCreateSchema",
"StackConfigurations" : {
"property_depends_on" : [ ],
@@ -4199,9 +4158,6 @@
"href" : "/api/v1/stacks/HDP/versions/2.3/services/HIVE/configurations/hive.stats.fetch.column.stats",
"StackConfigurations" : {
"property_depends_on" : [ {
- "type" : "hive-env",
- "name" : "cost_based_optimizer"
- }, {
"type" : "hive-site",
"name" : "hive.cbo.enable"
} ],
@@ -4216,9 +4172,6 @@
"href" : "/api/v1/stacks/HDP/versions/2.3/services/HIVE/configurations/hive.stats.fetch.partition.stats",
"StackConfigurations" : {
"property_depends_on" : [ {
- "type" : "hive-env",
- "name" : "cost_based_optimizer"
- }, {
"type" : "hive-site",
"name" : "hive.cbo.enable"
} ],
http://git-wip-us.apache.org/repos/asf/ambari/blob/984d9e90/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index 1e6a1eb..d14e23e 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -870,7 +870,6 @@ class TestHDP23StackAdvisor(TestCase):
},
'hive-env': {
'properties': {
- 'cost_based_optimizer': 'On',
'hive_exec_orc_storage_strategy': 'SPEED',
'hive_security_authorization': 'None',
'hive_timeline_logging_enabled': 'true',
@@ -884,7 +883,6 @@ class TestHDP23StackAdvisor(TestCase):
'hive.server2.tez.initialize.default.sessions': 'false',
'hive.server2.tez.sessions.per.default.queue': '1',
'hive.auto.convert.join.noconditionaltask.size': '268435456',
- 'hive.cbo.enable': 'true',
'hive.compactor.initiator.on': 'false',
'hive.compactor.worker.threads': '0',
'hive.compute.query.using.stats': 'true',
@@ -1028,7 +1026,8 @@ class TestHDP23StackAdvisor(TestCase):
"hive.server2.authentication.kerberos.keytab": "",
"hive.server2.authentication.kerberos.principal": "",
"hive.server2.authentication.pam.services": "",
- "hive.server2.custom.authentication.class": ""
+ "hive.server2.custom.authentication.class": "",
+ "hive.cbo.enable": "true"
}
},
"hiveserver2-site": {
@@ -1508,4 +1507,4 @@ class TestHDP23StackAdvisor(TestCase):
recommendedConfigurations = {}
self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData, services, None)
- self.assertEquals(recommendedConfigurations['ranger-admin-site']['properties']['ranger.audit.solr.zookeepers'], 'NONE')
\ No newline at end of file
+ self.assertEquals(recommendedConfigurations['ranger-admin-site']['properties']['ranger.audit.solr.zookeepers'], 'NONE')
[05/19] ambari git commit: AMBARI-14786. Localhost configuration
value for multiple host properties does not work properly. (Daniel Gergely
via rnettleton)
Posted by yu...@apache.org.
AMBARI-14786. Localhost configuration value for multiple host properties does not work properly. (Daniel Gergely via rnettleton)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/81c58bbb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/81c58bbb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/81c58bbb
Branch: refs/heads/2.2.1-maint
Commit: 81c58bbb6d036e2ca98c6312157f36f6cf7c3afd
Parents: 90c0293
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Thu Feb 4 14:35:11 2016 -0500
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Thu Feb 4 14:35:11 2016 -0500
----------------------------------------------------------------------
.../BlueprintConfigurationProcessor.java | 206 ++++++++++------
.../BlueprintConfigurationProcessorTest.java | 243 ++++++++++++++++++-
2 files changed, 370 insertions(+), 79 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/81c58bbb/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 93afe0d..82cff93 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -125,6 +125,11 @@ public class BlueprintConfigurationProcessor {
private static Pattern HOSTGROUP_PORT_REGEX = Pattern.compile("%HOSTGROUP::(\\S+?)%:?(\\d+)?");
/**
+ * Compiled regex for hostgroup token with port information.
+ */
+ private static Pattern LOCALHOST_PORT_REGEX = Pattern.compile("localhost:?(\\d+)?");
+
+ /**
* Statically-defined set of properties that can support HA using a nameservice name
* in the configuration, rather than just a host name.
* This set also contains other HA properties that will be exported if the
@@ -1578,7 +1583,9 @@ public class BlueprintConfigurationProcessor {
*/
private final boolean usePrefixForEachHost;
- private final Set<String> setOfKnownURLSchemes = Collections.singleton("thrift://");
+ private final boolean useSuffixForEachHost;
+
+ private final boolean usePortForEachHost;
/**
* Constructor.
@@ -1586,7 +1593,7 @@ public class BlueprintConfigurationProcessor {
* @param component component name associated with the property
*/
public MultipleHostTopologyUpdater(String component) {
- this(component, DEFAULT_SEPARATOR, false);
+ this(component, DEFAULT_SEPARATOR, false, false, true);
}
/**
@@ -1596,10 +1603,12 @@ public class BlueprintConfigurationProcessor {
* @param separator the separator character to use when multiple hosts
* are specified in a property or URL
*/
- public MultipleHostTopologyUpdater(String component, Character separator, boolean userPrefixForEachHost) {
+ public MultipleHostTopologyUpdater(String component, Character separator, boolean usePrefixForEachHost, boolean useSuffixForEachHost, boolean usePortForEachHost) {
this.component = component;
this.separator = separator;
- this.usePrefixForEachHost = userPrefixForEachHost;
+ this.usePrefixForEachHost = usePrefixForEachHost;
+ this.useSuffixForEachHost = useSuffixForEachHost;
+ this.usePortForEachHost = usePortForEachHost;
}
/**
@@ -1620,36 +1629,101 @@ public class BlueprintConfigurationProcessor {
StringBuilder sb = new StringBuilder();
- if (!origValue.contains("%HOSTGROUP") &&
- (!origValue.contains("localhost"))) {
+ if (!origValue.contains("%HOSTGROUP") && (!origValue.contains("localhost"))) {
// this property must contain FQDNs specified directly by the user
// of the Blueprint, so the processor should not attempt to update them
return origValue;
}
- if (origValue.contains("localhost") && topology.getHostGroupsForComponent(component).size() == 1) {
- return origValue.replace("localhost", topology.getHostAssignmentsForComponent(component).iterator().next());
+ Collection<String> hostStrings = getHostStrings(origValue, topology);
+ hostStrings.addAll(getHostStringsFromLocalhost(origValue, topology));
+
+ return resolveHostGroupPlaceholder(origValue, hostStrings);
+ }
+
+ /**
+ * Gets the prefix for hosts
+ * @param value property value
+ * @return prefix
+ */
+ private String getPrefix(String value) {
+ Matcher localhostMatcher = LOCALHOST_PORT_REGEX.matcher(value);
+ Matcher hostGroupMatcher = HOSTGROUP_PORT_REGEX.matcher(value);
+ String prefixCandidate = null;
+
+ if(localhostMatcher.find()) {
+ prefixCandidate = value.substring(0,localhostMatcher.start());
+ } else if(hostGroupMatcher.find()) {
+ prefixCandidate = value.substring(0,hostGroupMatcher.start());
+ } else {
+ return prefixCandidate;
}
- String prefix = null;
- Collection<String> hostStrings = getHostStrings(origValue, topology);
- if (hostStrings.isEmpty()) {
- //default non-exported original value
- String port;
- for (String urlScheme : setOfKnownURLSchemes) {
- if (origValue.startsWith(urlScheme)) {
- prefix = urlScheme;
- }
- }
+ // remove YAML array notation
+ if(prefixCandidate.startsWith("[")) {
+ prefixCandidate = prefixCandidate.substring(1);
+ }
+ // remove YAML string notation
+ if(prefixCandidate.startsWith("'")) {
+ prefixCandidate = prefixCandidate.substring(1);
+ }
- if (prefix != null) {
- String valueWithoutPrefix = origValue.substring(prefix.length());
- port = calculatePort(valueWithoutPrefix);
- sb.append(prefix);
- } else {
- port = calculatePort(origValue);
- }
+ return prefixCandidate;
+ }
+
+ /**
+ * Gets the suffix for hosts
+ * @param value property value
+ * @return suffix
+ */
+ private String getSuffix(String value) {
+ Matcher localhostMatcher = LOCALHOST_PORT_REGEX.matcher(value);
+ Matcher hostGroupMatcher = HOSTGROUP_PORT_REGEX.matcher(value);
+
+
+ Matcher activeMatcher = null;
+ if(localhostMatcher.find()) {
+ activeMatcher = localhostMatcher;
+ } else if(hostGroupMatcher.find()) {
+ activeMatcher = hostGroupMatcher;
+ } else {
+ return null;
+ }
+
+ String suffixCandidate = null;
+ int indexOfEnd;
+ do {
+ indexOfEnd = activeMatcher.end();
+ } while (activeMatcher.find());
+ suffixCandidate = value.substring(indexOfEnd);
+
+ // remove YAML array notation
+ if(suffixCandidate.endsWith("]")) {
+ suffixCandidate = suffixCandidate.substring(0, suffixCandidate.length()-1);
+ }
+ // remove YAML string notation
+ if(suffixCandidate.endsWith("'")) {
+ suffixCandidate = suffixCandidate.substring(0, suffixCandidate.length()-1);
+ }
+
+ return suffixCandidate;
+ }
+
+ /**
+ * Resolves localhost value to "host:port" elements (port is optional)
+ * @param origValue property value
+ * @param topology cluster topology
+ * @return list of hosts that have the given components
+ */
+ private Collection<String> getHostStringsFromLocalhost(String origValue, ClusterTopology topology) {
+ Set<String> hostStrings = new HashSet<String>();
+ if(origValue.contains("localhost")) {
+ Matcher localhostMatcher = LOCALHOST_PORT_REGEX.matcher(origValue);
+ String port = null;
+ if(localhostMatcher.find()) {
+ port = calculatePort(localhostMatcher.group());
+ }
for (String host : topology.getHostAssignmentsForComponent(component)) {
if (port != null) {
host += ":" + port;
@@ -1657,62 +1731,52 @@ public class BlueprintConfigurationProcessor {
hostStrings.add(host);
}
}
-
- return sb.append(resolveHostGroupPlaceholder(origValue, prefix, hostStrings)).toString();
+ return hostStrings;
}
/**
* Resolves the host group place holders in the passed in original value.
* @param originalValue The original value containing the place holders to be resolved.
- * @param prefix The prefix to be added to the returned value.
* @param hostStrings The collection of host names that are mapped to the host groups to be resolved
* @return The new value with place holders resolved.
*/
- protected String resolveHostGroupPlaceholder(String originalValue, String prefix, Collection<String> hostStrings) {
- String suffix = null;
- StringBuilder sb = new StringBuilder();
+ protected String resolveHostGroupPlaceholder(String originalValue, Collection<String> hostStrings) {
+ String prefix = getPrefix(originalValue);
+ String suffix = getSuffix(originalValue);
+ String port = removePorts(hostStrings);
- // parse out prefix if one exists
- Matcher matcher = HOSTGROUP_PORT_REGEX.matcher(originalValue);
- if (matcher.find()) {
- int indexOfStart = matcher.start();
- // handle the case of a YAML config property
- if ((indexOfStart > 0) && (!originalValue.substring(0, indexOfStart).equals("['")) && (!originalValue.substring(0, indexOfStart).equals("[")) ) {
- // append prefix before adding host names
- prefix = originalValue.substring(0, indexOfStart);
- sb.append(prefix);
- }
+ String sep = (useSuffixForEachHost ? suffix : "") + separator + (usePrefixForEachHost ? prefix : "");
+ String combinedHosts = (usePrefixForEachHost ? prefix : "") + StringUtils.join(hostStrings, sep);
- // parse out suffix if one exists
- int indexOfEnd;
- do {
- indexOfEnd = matcher.end();
- } while (matcher.find());
+ return (usePrefixForEachHost ? "" : prefix) + combinedHosts + (usePortForEachHost || port == null ? "" : ":" + port) + suffix;
+ }
- if (indexOfEnd < (originalValue.length())) {
- suffix = originalValue.substring(indexOfEnd);
- }
- }
+ /**
+ * Removes "port" part of the hosts and returns it
+ * @param hostStrings list of "host:port" strings (port is optional)
+ * @return the port
+ */
+ private String removePorts(Collection<String> hostStrings) {
+ String port = null;
+ if(!usePortForEachHost && !hostStrings.isEmpty()) {
+ Set<String> temp = new HashSet<String>();
- // add hosts to property, using the specified separator
- boolean firstHost = true;
- for (String host : hostStrings) {
- if (!firstHost) {
- sb.append(separator);
- // support config properties that use a list of full URIs
- if (usePrefixForEachHost && (prefix != null)) {
- sb.append(prefix);
+ // extract port
+ Iterator<String> i = hostStrings.iterator();
+ do {
+ port = calculatePort(i.next());
+ } while (i.hasNext() && port == null);
+
+ // update hosts
+ if(port != null) {
+ for(String host : hostStrings) {
+ temp.add(host.replace(":"+port,""));
}
- } else {
- firstHost = false;
}
- sb.append(host);
+ hostStrings.clear();
+ hostStrings.addAll(temp);
}
-
- if ((suffix != null) && (!suffix.equals("']")) && (!suffix.equals("]")) ) {
- sb.append(suffix);
- }
- return sb.toString();
+ return port;
}
private static String calculatePort(String origValue) {
@@ -1983,7 +2047,7 @@ public class BlueprintConfigurationProcessor {
TempletonHivePropertyUpdater() {
// the only known property that requires hostname substitution is hive.metastore.uris,
// but this updater should be flexible enough for other properties in the future.
- mapOfKeysToUpdaters.put("hive.metastore.uris", new MultipleHostTopologyUpdater("HIVE_METASTORE", ',', true));
+ mapOfKeysToUpdaters.put("hive.metastore.uris", new MultipleHostTopologyUpdater("HIVE_METASTORE", ',', true, false, true));
}
@Override
@@ -2172,8 +2236,8 @@ public class BlueprintConfigurationProcessor {
hbaseSiteMap.put("hbase.rootdir", new SingleHostTopologyUpdater("NAMENODE"));
accumuloSiteMap.put("instance.volumes", new SingleHostTopologyUpdater("NAMENODE"));
// HDFS shared.edits JournalNode Quorum URL uses semi-colons as separators
- multiHdfsSiteMap.put("dfs.namenode.shared.edits.dir", new MultipleHostTopologyUpdater("JOURNALNODE", ';', false));
- multiHdfsSiteMap.put("dfs.encryption.key.provider.uri", new MultipleHostTopologyUpdater("RANGER_KMS_SERVER", ';', false));
+ multiHdfsSiteMap.put("dfs.namenode.shared.edits.dir", new MultipleHostTopologyUpdater("JOURNALNODE", ';', false, false, true));
+ multiHdfsSiteMap.put("dfs.encryption.key.provider.uri", new MultipleHostTopologyUpdater("RANGER_KMS_SERVER", ';', false, false, false));
// SECONDARY_NAMENODE
hdfsSiteMap.put("dfs.secondary.http.address", new SingleHostTopologyUpdater("SECONDARY_NAMENODE"));
@@ -2206,14 +2270,14 @@ public class BlueprintConfigurationProcessor {
// HIVE_SERVER
- multiHiveSiteMap.put("hive.metastore.uris", new MultipleHostTopologyUpdater("HIVE_METASTORE", ',', true));
+ multiHiveSiteMap.put("hive.metastore.uris", new MultipleHostTopologyUpdater("HIVE_METASTORE", ',', true, true, true));
dbHiveSiteMap.put("javax.jdo.option.ConnectionURL",
new DBTopologyUpdater("MYSQL_SERVER", "hive-env", "hive_database"));
multiCoreSiteMap.put("hadoop.proxyuser.hive.hosts", new MultipleHostTopologyUpdater("HIVE_SERVER"));
multiCoreSiteMap.put("hadoop.proxyuser.HTTP.hosts", new MultipleHostTopologyUpdater("WEBHCAT_SERVER"));
multiCoreSiteMap.put("hadoop.proxyuser.hcat.hosts", new MultipleHostTopologyUpdater("WEBHCAT_SERVER"));
multiCoreSiteMap.put("hadoop.proxyuser.yarn.hosts", new MultipleHostTopologyUpdater("RESOURCEMANAGER"));
- multiCoreSiteMap.put("hadoop.security.key.provider.path", new MultipleHostTopologyUpdater("RANGER_KMS_SERVER", ';', false));
+ multiCoreSiteMap.put("hadoop.security.key.provider.path", new MultipleHostTopologyUpdater("RANGER_KMS_SERVER", ';', false, false, true));
multiWebhcatSiteMap.put("templeton.hive.properties", new TempletonHivePropertyUpdater());
multiWebhcatSiteMap.put("templeton.kerberos.principal", new MultipleHostTopologyUpdater("WEBHCAT_SERVER"));
hiveEnvMap.put("hive_hostname", new SingleHostTopologyUpdater("HIVE_SERVER"));
http://git-wip-us.apache.org/repos/asf/ambari/blob/81c58bbb/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
index a447ff1..71c8f60 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java
@@ -41,6 +41,7 @@ import org.apache.ambari.server.topology.HostGroupInfo;
import org.apache.ambari.server.topology.InvalidTopologyException;
import org.apache.ambari.server.utils.CollectionPresentationUtils;
import org.junit.After;
+import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@@ -2354,6 +2355,116 @@ public class BlueprintConfigurationProcessorTest {
}
@Test
+ public void testMultipleHostTopologyUpdater__localhost__singleHost() throws Exception {
+
+ final String typeName = "hbase-site";
+ final String propertyName = "hbase.zookeeper.quorum";
+ final String originalValue = "localhost";
+ final String component1 = "ZOOKEEPER_SERVER";
+ final String component2 = "ZOOKEEPER_CLIENT";
+
+ Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+ Map<String, String> typeProps = new HashMap<String, String>();
+ typeProps.put(propertyName, originalValue);
+ properties.put(typeName, typeProps);
+
+ Configuration clusterConfig = new Configuration(properties, Collections.<String, Map<String, Map<String, String>>>emptyMap());
+
+ Collection<String> hgComponents = new HashSet<String>();
+ hgComponents.add(component1);
+ Set<String> hosts1 = new HashSet<String>();
+ hosts1.add("testhost1a");
+ TestHostGroup group1 = new TestHostGroup("group1", hgComponents, hosts1);
+
+ Collection<String> hgComponents2 = new HashSet<String>();
+ hgComponents2.add(component2);
+ Set<String> hosts2 = new HashSet<String>();
+ hosts2.add("testhost2");
+ TestHostGroup group2 = new TestHostGroup("group2", hgComponents2, hosts2);
+
+ Collection<TestHostGroup> hostGroups = new HashSet<TestHostGroup>();
+ hostGroups.add(group1);
+ hostGroups.add(group2);
+
+ ClusterTopology topology = createClusterTopology(bp, clusterConfig, hostGroups);
+
+ BlueprintConfigurationProcessor.MultipleHostTopologyUpdater mhtu = new BlueprintConfigurationProcessor.MultipleHostTopologyUpdater(component1);
+ String newValue = mhtu.updateForClusterCreate(propertyName, originalValue, properties, topology);
+
+ assertEquals("testhost1a", newValue);
+ }
+
+ @Test
+ public void testMultipleHostTopologyUpdater__localhost__singleHostGroup() throws Exception {
+
+ final String typeName = "hbase-site";
+ final String propertyName = "hbase.zookeeper.quorum";
+ final String originalValue = "localhost";
+ final String component1 = "ZOOKEEPER_SERVER";
+
+ Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+ Map<String, String> typeProps = new HashMap<String, String>();
+ typeProps.put(propertyName, originalValue);
+ properties.put(typeName, typeProps);
+
+ Configuration clusterConfig = new Configuration(properties, Collections.<String, Map<String, Map<String, String>>>emptyMap());
+
+ Collection<String> hgComponents = new HashSet<String>();
+ hgComponents.add(component1);
+ Set<String> hosts1 = new HashSet<String>();
+ hosts1.add("testhost1a");
+ hosts1.add("testhost1b");
+ hosts1.add("testhost1c");
+ TestHostGroup group1 = new TestHostGroup("group1", hgComponents, hosts1);
+
+ Collection<TestHostGroup> hostGroups = new HashSet<TestHostGroup>();
+ hostGroups.add(group1);
+
+ ClusterTopology topology = createClusterTopology(bp, clusterConfig, hostGroups);
+
+ BlueprintConfigurationProcessor.MultipleHostTopologyUpdater mhtu = new BlueprintConfigurationProcessor.MultipleHostTopologyUpdater(component1);
+ String newValue = mhtu.updateForClusterCreate(propertyName, originalValue, properties, topology);
+
+ List<String> hostArray = Arrays.asList(newValue.split(","));
+ Assert.assertTrue(hostArray.containsAll(hosts1) && hosts1.containsAll(hostArray));
+ }
+
+ @Test
+ public void testMultipleHostTopologyUpdater__hostgroup__singleHostGroup() throws Exception {
+
+ final String typeName = "hbase-site";
+ final String propertyName = "hbase.zookeeper.quorum";
+ final String originalValue = "%HOSTGROUP::group1%";
+ final String component1 = "ZOOKEEPER_SERVER";
+
+ Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+ Map<String, String> typeProps = new HashMap<String, String>();
+ typeProps.put(propertyName, originalValue);
+ properties.put(typeName, typeProps);
+
+ Configuration clusterConfig = new Configuration(properties, Collections.<String, Map<String, Map<String, String>>>emptyMap());
+
+ Collection<String> hgComponents = new HashSet<String>();
+ hgComponents.add(component1);
+ Set<String> hosts1 = new HashSet<String>();
+ hosts1.add("testhost1a");
+ hosts1.add("testhost1b");
+ hosts1.add("testhost1c");
+ TestHostGroup group1 = new TestHostGroup("group1", hgComponents, hosts1);
+
+ Collection<TestHostGroup> hostGroups = new HashSet<TestHostGroup>();
+ hostGroups.add(group1);
+
+ ClusterTopology topology = createClusterTopology(bp, clusterConfig, hostGroups);
+
+ BlueprintConfigurationProcessor.MultipleHostTopologyUpdater mhtu = new BlueprintConfigurationProcessor.MultipleHostTopologyUpdater(component1);
+ String newValue = mhtu.updateForClusterCreate(propertyName, originalValue, properties, topology);
+
+ List<String> hostArray = Arrays.asList(newValue.split(","));
+ Assert.assertTrue(hostArray.containsAll(hosts1) && hosts1.containsAll(hostArray));
+ }
+
+ @Test
public void testDoUpdateForClusterVerifyRetrySettingsDefault() throws Exception {
Map<String, Map<String, String>> configProperties =
new HashMap<String, Map<String, String>>();
@@ -5799,8 +5910,16 @@ public class BlueprintConfigurationProcessorTest {
// When
configProcessor.doUpdateForClusterCreate();
+ String updatedVal = clusterConfig.getPropertyValue(configType, "dfs.encryption.key.provider.uri");
+ Assert.assertTrue(updatedVal.startsWith("kms://http@"));
+ Assert.assertTrue(updatedVal.endsWith(":9292/kms"));
+ String hostsString = updatedVal.substring(11,updatedVal.length()-9);
+
+ List<String> hostArray = Arrays.asList(hostsString.split(";"));
+ List<String> expected = Arrays.asList("host1","host2");
+
// Then
- assertEquals("kms://http@host1;host2:9292/kms", clusterConfig.getPropertyValue(configType, "dfs.encryption.key.provider.uri"));
+ Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
}
@@ -5894,6 +6013,114 @@ public class BlueprintConfigurationProcessorTest {
assertEquals("kms://http@host1:9292/kms", clusterConfig.getPropertyValue(configType, "dfs.encryption.key.provider.uri"));
}
+ @Test
+ public void testHdfsWithRangerKmsServer__multiple_hosts__localhost() throws Exception {
+ // Given
+ final String configType = "hdfs-site";
+ Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+ Map<String, String> configProperties = new HashMap<String, String>();
+
+ properties.put(configType, configProperties);
+ configProperties.put("dfs.encryption.key.provider.uri", "kms://http@localhost:9292/kms");
+
+
+ Map<String, Map<String, String>> parentProperties = new HashMap<String, Map<String, String>>();
+ Configuration parentClusterConfig = new Configuration(parentProperties,
+ Collections.<String, Map<String, Map<String, String>>>emptyMap());
+ Configuration clusterConfig = new Configuration(properties,
+ Collections.<String, Map<String, Map<String, String>>>emptyMap(), parentClusterConfig);
+
+
+ Collection<String> kmsServerComponents = new HashSet<String>();
+ kmsServerComponents.add("RANGER_KMS_SERVER");
+
+ Collection<String> hdfsComponents = new HashSet<String>();
+ hdfsComponents.add("NAMENODE");
+ hdfsComponents.add("DATANODE");
+
+ Collection<String> hosts = new HashSet<String>();
+ hosts.add("host1");
+ hosts.add("host2");
+
+ TestHostGroup group1 = new TestHostGroup("group1", kmsServerComponents, hosts);
+ group1.components.add("DATANODE");
+
+ TestHostGroup group2 = new TestHostGroup("group2", hdfsComponents, Collections.singleton("host3"));
+
+ Collection<TestHostGroup> hostGroups = Lists.newArrayList(group1, group2);
+
+ ClusterTopology topology = createClusterTopology(bp, clusterConfig, hostGroups);
+ BlueprintConfigurationProcessor configProcessor = new BlueprintConfigurationProcessor(topology);
+
+ // When
+ configProcessor.doUpdateForClusterCreate();
+
+ String updatedVal = clusterConfig.getPropertyValue(configType, "dfs.encryption.key.provider.uri");
+ Assert.assertTrue(updatedVal.startsWith("kms://http@"));
+ Assert.assertTrue(updatedVal.endsWith(":9292/kms"));
+ String hostsString = updatedVal.substring(11,updatedVal.length()-9);
+
+ List<String> hostArray = Arrays.asList(hostsString.split(";"));
+ List<String> expected = Arrays.asList("host1","host2");
+
+ // Then
+ Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
+ }
+
+ @Test
+ public void testHdfsWithRangerKmsServer__multiple_hosts__hostgroup() throws Exception {
+ // Given
+ final String configType = "hdfs-site";
+ Map<String, Map<String, String>> properties = new HashMap<String, Map<String, String>>();
+ Map<String, String> configProperties = new HashMap<String, String>();
+
+ properties.put(configType, configProperties);
+ configProperties.put("dfs.encryption.key.provider.uri", "kms://http@%HOSTGROUP::group1%:9292/kms");
+
+
+ Map<String, Map<String, String>> parentProperties = new HashMap<String, Map<String, String>>();
+ Configuration parentClusterConfig = new Configuration(parentProperties,
+ Collections.<String, Map<String, Map<String, String>>>emptyMap());
+ Configuration clusterConfig = new Configuration(properties,
+ Collections.<String, Map<String, Map<String, String>>>emptyMap(), parentClusterConfig);
+
+
+ Collection<String> kmsServerComponents = new HashSet<String>();
+ kmsServerComponents.add("RANGER_KMS_SERVER");
+
+ Collection<String> hdfsComponents = new HashSet<String>();
+ hdfsComponents.add("NAMENODE");
+ hdfsComponents.add("DATANODE");
+
+ Collection<String> hosts = new HashSet<String>();
+ hosts.add("host1");
+ hosts.add("host2");
+
+ TestHostGroup group1 = new TestHostGroup("group1", kmsServerComponents, hosts);
+ group1.components.add("DATANODE");
+
+ TestHostGroup group2 = new TestHostGroup("group2", hdfsComponents, Collections.singleton("host3"));
+
+ Collection<TestHostGroup> hostGroups = Lists.newArrayList(group1, group2);
+
+ ClusterTopology topology = createClusterTopology(bp, clusterConfig, hostGroups);
+ BlueprintConfigurationProcessor configProcessor = new BlueprintConfigurationProcessor(topology);
+
+ // When
+ configProcessor.doUpdateForClusterCreate();
+
+ String updatedVal = clusterConfig.getPropertyValue(configType, "dfs.encryption.key.provider.uri");
+ Assert.assertTrue(updatedVal.startsWith("kms://http@"));
+ Assert.assertTrue(updatedVal.endsWith(":9292/kms"));
+ String hostsString = updatedVal.substring(11,updatedVal.length()-9);
+
+ List<String> hostArray = Arrays.asList(hostsString.split(";"));
+ List<String> expected = Arrays.asList("host1","host2");
+
+ // Then
+ Assert.assertTrue(hostArray.containsAll(expected) && expected.containsAll(hostArray));
+ }
+
@Test
public void testHadoopWithRangerKmsServer() throws Exception {
@@ -6160,8 +6387,8 @@ public class BlueprintConfigurationProcessorTest {
String propertyOriginalValue2 = "[%HOSTGROUP::group_1%]";
// When
- String updatedValue1 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue1, null, ImmutableList.<String>of("host1:100"));
- String updatedValue2 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue2, null, ImmutableList.<String>of("host1:100"));
+ String updatedValue1 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue1, ImmutableList.<String>of("host1:100"));
+ String updatedValue2 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue2, ImmutableList.<String>of("host1:100"));
// Then
assertEquals("host1:100", updatedValue1);
@@ -6181,8 +6408,8 @@ public class BlueprintConfigurationProcessorTest {
String propertyOriginalValue2 = "[%HOSTGROUP::group_1%, %HOSTGROUP::group_2%]";
// When
- String updatedValue1 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue1, null, ImmutableList.<String>of("host1:100", "host2:200"));
- String updatedValue2 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue2, null, ImmutableList.<String>of("host1:100", "host2:200"));
+ String updatedValue1 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue1, ImmutableList.<String>of("host1:100", "host2:200"));
+ String updatedValue2 = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue2, ImmutableList.<String>of("host1:100", "host2:200"));
// Then
assertEquals("host1:100,host2:200", updatedValue1);
@@ -6200,7 +6427,7 @@ public class BlueprintConfigurationProcessorTest {
String propertyOriginalValue = "http://%HOSTGROUP::group_1%#";
// When
- String updatedValue = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue, null, ImmutableList.<String>of("host1:100"));
+ String updatedValue = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue, ImmutableList.<String>of("host1:100"));
// Then
assertEquals("http://host1:100#", updatedValue);
@@ -6215,7 +6442,7 @@ public class BlueprintConfigurationProcessorTest {
String propertyOriginalValue = "http://%HOSTGROUP::group_1,HOSTGROUP::group_2%/resource";
// When
- String updatedValue = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue, null, ImmutableList.<String>of("host1:100", "host2:200"));
+ String updatedValue = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue, ImmutableList.<String>of("host1:100", "host2:200"));
// Then
assertEquals("http://host1:100,host2:200/resource", updatedValue);
@@ -6230,7 +6457,7 @@ public class BlueprintConfigurationProcessorTest {
String propertyOriginalValue = "%HOSTGROUP::group_1%:11,%HOSTGROUP::group_2%:11";
// When
- String updatedValue = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue, null, ImmutableList.<String>of("host1:100", "host2:200"));
+ String updatedValue = mhtu.resolveHostGroupPlaceholder(propertyOriginalValue, ImmutableList.<String>of("host1:100", "host2:200"));
// Then
assertEquals("host1:100,host2:200", updatedValue);
[09/19] ambari git commit: AMBARI-14941 Convert Storm and Kafka to
use enhanced dashboard widgets. (atkach)
Posted by yu...@apache.org.
AMBARI-14941 Convert Storm and Kafka to use enhanced dashboard widgets. (atkach)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/66c35e01
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/66c35e01
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/66c35e01
Branch: refs/heads/2.2.1-maint
Commit: 66c35e0180f043d3be50bc81121628722f13e967
Parents: 88dab3b
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Fri Feb 5 17:15:14 2016 +0200
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Mon Feb 8 12:24:26 2016 -0800
----------------------------------------------------------------------
.../stacks/HDP/2.3/services/KAFKA/widgets.json | 182 +++++++++++++++++++
.../stacks/HDP/2.3/services/STORM/widgets.json | 127 +++++++++++++
ambari-web/app/assets/test/tests.js | 1 -
ambari-web/app/data/service_graph_config.js | 15 --
ambari-web/app/messages.js | 28 ---
.../mixins/common/chart/storm_linear_time.js | 74 --------
.../app/mixins/common/widgets/widget_mixin.js | 2 +-
ambari-web/app/views.js | 9 -
.../app/views/common/chart/linear_time.js | 20 --
.../service/info/metrics/kafka/broker_topic.js | 50 -----
.../info/metrics/kafka/controller_status.js | 49 -----
.../info/metrics/kafka/kafka_controller.js | 41 -----
.../info/metrics/kafka/replica_fetcher.js | 41 -----
.../info/metrics/kafka/replica_manager.js | 46 -----
.../info/metrics/storm/executors_metric.js | 34 ----
.../info/metrics/storm/slots_number_metric.js | 42 -----
.../service/info/metrics/storm/tasks_metric.js | 34 ----
.../info/metrics/storm/topologies_metric.js | 34 ----
.../common/chart/storm_linear_time_test.js | 80 --------
19 files changed, 310 insertions(+), 599 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/widgets.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/widgets.json
new file mode 100644
index 0000000..7237236
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/widgets.json
@@ -0,0 +1,182 @@
+{
+ "layouts": [
+ {
+ "layout_name": "default_kafka_dashboard",
+ "display_name": "Standard Kafka Dashboard",
+ "section_name": "KAFKA_SUMMARY",
+ "widgetLayoutInfo": [
+ {
+ "widget_name": "Broker Topics",
+ "description": "Broker Topics",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "kafka.server.BrokerTopicMetrics.BytesInPerSec.1MinuteRate",
+ "metric_path": "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesInPerSec/1MinuteRate",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ },
+ {
+ "name": "kafka.server.BrokerTopicMetrics.BytesOutPerSec.1MinuteRate",
+ "metric_path": "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesOutPerSec/1MinuteRate",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ },
+ {
+ "name": "kafka.server.BrokerTopicMetrics.MessagesInPerSec.1MinuteRate",
+ "metric_path": "metrics/kafka/server/BrokerTopicMetrics/AllTopicsMessagesInPerSec/1MinuteRate",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ }
+ ],
+ "values": [
+ {
+ "name": "Bytes In",
+ "value": "${kafka.server.BrokerTopicMetrics.BytesInPerSec.1MinuteRate}"
+ },
+ {
+ "name": "Bytes Out",
+ "value": "${kafka.server.BrokerTopicMetrics.BytesOutPerSec.1MinuteRate}"
+ },
+ {
+ "name": "Messages In",
+ "value": "${kafka.server.BrokerTopicMetrics.MessagesInPerSec.1MinuteRate}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ },
+ {
+ "widget_name": "Active Controller Count",
+ "description": "Active Controller Count",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "kafka.controller.KafkaController.ActiveControllerCount",
+ "metric_path": "metrics/kafka/controller/KafkaController/ActiveControllerCount",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ }
+ ],
+ "values": [
+ {
+ "name": "Active Controller Count",
+ "value": "${kafka.controller.KafkaController.ActiveControllerCount}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ },
+ {
+ "widget_name": "Controller Status",
+ "description": "Controller Status",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.1MinuteRate",
+ "metric_path": "metrics/kafka/controller/ControllerStats/LeaderElectionRateAndTimeMs/1MinuteRate",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ },
+ {
+ "name": "kafka.controller.ControllerStats.UncleanLeaderElectionsPerSec.1MinuteRate",
+ "metric_path": "metrics/kafka/controller/ControllerStats/UncleanLeaderElectionsPerSec/1MinuteRate",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ }
+ ],
+ "values": [
+ {
+ "name": "Leader Election Rate And Time",
+ "value": "${kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.1MinuteRate}"
+ },
+ {
+ "name": "Unclean Leader Election",
+ "value": "${kafka.controller.ControllerStats.UncleanLeaderElectionsPerSec.1MinuteRate}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ },
+ {
+ "widget_name": "Replica MaxLag",
+ "description": "Replica MaxLag",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "kafka.server.ReplicaFetcherManager.MaxLag.clientId.Replica",
+ "metric_path": "metrics/kafka/server/ReplicaFetcherManager/Replica-MaxLag",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ }
+ ],
+ "values": [
+ {
+ "name": "Replica MaxLag",
+ "value": "${kafka.server.ReplicaFetcherManager.MaxLag.clientId.Replica}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ },
+ {
+ "widget_name": "Replica Manager",
+ "description": "Replica Manager",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "kafka.server.ReplicaManager.PartitionCount",
+ "metric_path": "metrics/kafka/server/ReplicaManager/PartitionCount",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ },
+ {
+ "name": "kafka.server.ReplicaManager.UnderReplicatedPartitions",
+ "metric_path": "metrics/kafka/server/ReplicaManager/UnderReplicatedPartitions",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ },
+ {
+ "name": "kafka.server.ReplicaManager.LeaderCount",
+ "metric_path": "metrics/kafka/server/ReplicaManager/LeaderCount",
+ "service_name": "KAFKA",
+ "component_name": "KAFKA_BROKER"
+ }
+ ],
+ "values": [
+ {
+ "name": "Partitions count",
+ "value": "${kafka.server.ReplicaManager.PartitionCount}"
+ },
+ {
+ "name": "Under Replicated Partitions",
+ "value": "${kafka.server.ReplicaManager.UnderReplicatedPartitions}"
+ },
+ {
+ "name": "Leader Count",
+ "value": "${kafka.server.ReplicaManager.LeaderCount}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ }
+
+ ]
+ }
+ ]
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/widgets.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/widgets.json
new file mode 100644
index 0000000..d22a1ed
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/STORM/widgets.json
@@ -0,0 +1,127 @@
+{
+ "layouts": [
+ {
+ "layout_name": "default_storm_dashboard",
+ "display_name": "Standard Storm Dashboard",
+ "section_name": "STORM_SUMMARY",
+ "widgetLayoutInfo": [
+ {
+ "widget_name": "Number of Slots",
+ "description": "Number of Slots",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "Used Slots",
+ "metric_path": "metrics/storm/nimbus/usedslots",
+ "service_name": "STORM",
+ "component_name": "NIMBUS"
+ },
+ {
+ "name": "Free Slots",
+ "metric_path": "metrics/storm/nimbus/freeslots",
+ "service_name": "STORM",
+ "component_name": "NIMBUS"
+ },
+ {
+ "name": "Total Slots",
+ "metric_path": "metrics/storm/nimbus/totalslots",
+ "service_name": "STORM",
+ "component_name": "NIMBUS"
+ }
+ ],
+ "values": [
+ {
+ "name": "Used slots",
+ "value": "${Used Slots}"
+ },
+ {
+ "name": "Free slots",
+ "value": "${Free Slots}"
+ },
+ {
+ "name": "Total slots",
+ "value": "${Total Slots}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ },
+ {
+ "widget_name": "Number of executors",
+ "description": "Number of executors",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "Total Executors",
+ "metric_path": "metrics/storm/nimbus/totalexecutors",
+ "service_name": "STORM",
+ "component_name": "NIMBUS"
+ }
+ ],
+ "values": [
+ {
+ "name": "Total executors",
+ "value": "${Total Executors}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ },
+ {
+ "widget_name": "Number of topologies",
+ "description": "Number of topologies",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "Topologies",
+ "metric_path": "metrics/storm/nimbus/topologies",
+ "service_name": "STORM",
+ "component_name": "NIMBUS"
+ }
+ ],
+ "values": [
+ {
+ "name": "Total topologies",
+ "value": "${Topologies}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ },
+ {
+ "widget_name": "Number of tasks",
+ "description": "Number of tasks",
+ "widget_type": "GRAPH",
+ "is_visible": true,
+ "metrics": [
+ {
+ "name": "Total Tasks",
+ "metric_path": "metrics/storm/nimbus/totaltasks",
+ "service_name": "STORM",
+ "component_name": "NIMBUS"
+ }
+ ],
+ "values": [
+ {
+ "name": "Total tasks",
+ "value": "${Total Tasks}"
+ }
+ ],
+ "properties": {
+ "graph_type": "LINE",
+ "time_range": "1"
+ }
+ }
+ ]
+ }
+ ]
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index db29947..0cdfdec 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -142,7 +142,6 @@ var files = ['test/init_model_test',
'test/mixins/common/configs/enhanced_configs_test',
'test/mixins/common/configs/configs_saver_test',
'test/mixins/common/configs/toggle_isrequired_test',
- 'test/mixins/common/chart/storm_linear_time_test',
'test/mixins/common/widgets/export_metrics_mixin_test',
'test/mixins/common/widgets/time_range_mixin_test',
'test/mixins/common/widgets/widget_section_test',
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/data/service_graph_config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/service_graph_config.js b/ambari-web/app/data/service_graph_config.js
index 1ae4a03..0d9fa9d 100644
--- a/ambari-web/app/data/service_graph_config.js
+++ b/ambari-web/app/data/service_graph_config.js
@@ -44,20 +44,5 @@ module.exports = {
'Flume_IncommingSum',
'Flume_OutgoingMMA',
'Flume_OutgoingSum'
- ],
-
- 'storm': [
- 'STORM_SlotsNumber',
- 'STORM_Executors',
- 'STORM_Topologies',
- 'STORM_Tasks'
- ],
-
- 'kafka': [
- 'Kafka_BrokerTopicMetrics',
- 'Kafka_Controller',
- 'Kafka_ControllerStatus',
- 'Kafka_ReplicaManager',
- 'Kafka_ReplicaFetcher'
]
};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index a44b26a..e885410 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -452,16 +452,6 @@ Em.I18n.translations = {
'services.tez.description':'Tez is the next generation Hadoop Query Processing framework written on top of YARN',
'services.falcon.description': 'Falcon mirroring engine',
'services.storm.description': 'Apache Hadoop Stream processing framework',
- 'services.storm.slots.metrics.title': 'Number of slots',
- 'services.storm.slots.metrics.free': 'Free slots',
- 'services.storm.slots.metrics.total': 'Total slots',
- 'services.storm.slots.metrics.used': 'Used slots',
- 'services.storm.executors.metrics.title': 'Number of executors',
- 'services.storm.executors.metrics.total': 'Total executors',
- 'services.storm.topology.metrics.title': 'Number of topologies',
- 'services.storm.topology.metrics.total': 'Total topologies',
- 'services.storm.tasks.metrics.title': 'Number of tasks',
- 'services.storm.tasks.metrics.total': 'Total tasks',
'services.storm.configs.range-plugin-enable.dialog.title': 'Enable Ranger for STORM',
'services.storm.configs.range-plugin-enable.dialog.message': 'Enabling Ranger plugin for STORM is effective only on a secured cluster.',
@@ -1837,24 +1827,6 @@ Em.I18n.translations = {
'services.service.info.metrics.yarn.apps.states.running': 'Running',
'services.service.info.metrics.yarn.apps.states.submitted': 'Submitted',
- 'services.service.info.metrics.kafka.server.brokerTopic.title': 'Broker Topics',
- 'services.service.info.metrics.kafka.server.brokerTopic.displayNames.AllTopicsBytesOutPerSec': 'Bytes Out',
- 'services.service.info.metrics.kafka.server.brokerTopic.displayNames.AllTopicsBytesInPerSec': 'Bytes In',
- 'services.service.info.metrics.kafka.server.brokerTopic.displayNames.AllTopicsMessagesInPerSec': 'Messages In',
- 'services.service.info.metrics.kafka.server.ReplicaManager.title': 'Replica Manager',
- 'services.service.info.metrics.kafka.server.ReplicaManager.displayNames.PartitionCount': 'Partitions count',
- 'services.service.info.metrics.kafka.server.ReplicaManager.displayNames.UnderReplicatedPartitions': 'Under Replicated Partitions',
- 'services.service.info.metrics.kafka.server.ReplicaManager.displayNames.LeaderCount': 'Leader Count',
- 'services.service.info.metrics.kafka.controller.ControllerStats.title': 'Controller Status',
- 'services.service.info.metrics.kafka.controller.ControllerStats.displayNames.LeaderElectionRateAndTimeMs': 'Leader Election Rate And Time',
- 'services.service.info.metrics.kafka.controller.ControllerStats.displayNames.UncleanLeaderElectionsPerSec': 'Unclean Leader Election',
- 'services.service.info.metrics.kafka.controller.KafkaController.title': 'Active Controller Count',
- 'services.service.info.metrics.kafka.controller.KafkaController.displayNames.ActiveControllerCount': 'Active Controller Count',
- 'services.service.info.metrics.kafka.log.LogFlushStats.title': 'Log Flush Status',
- 'services.service.info.metrics.kafka.log.LogFlushStats.displayNames.LogFlushRateAndTimeMs': 'Log Flush Rate amd Time',
- 'services.service.info.metrics.kafka.server.ReplicaFetcherManager.title': 'Replica MaxLag',
- 'services.service.info.metrics.kafka.server.ReplicaFetcherManager.displayNames.Replica-MaxLag': 'Replica MaxLag',
-
'services.service.info.menu.summary':'Summary',
'services.service.info.menu.configs':'Configs',
'services.service.info.menu.heatmaps':'Heatmaps',
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/mixins/common/chart/storm_linear_time.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/chart/storm_linear_time.js b/ambari-web/app/mixins/common/chart/storm_linear_time.js
deleted file mode 100644
index 616f470..0000000
--- a/ambari-web/app/mixins/common/chart/storm_linear_time.js
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-var App = require('app');
-
-App.StormLinearTimeChartMixin = Em.Mixin.create({
- ajaxIndex: 'service.metrics.storm.nimbus',
- metricsTemplate: 'metrics/storm/nimbus/{0}[{1},{2},{3}]',
-
- getDataForAjaxRequest: function() {
- var fromSeconds,
- toSeconds,
- index = this.get('isPopup') ? this.get('currentTimeIndex') : this.get('parentView.currentTimeRangeIndex'),
- customStartTime = this.get('isPopup') ? this.get('customStartTime') : this.get('parentView.customStartTime'),
- customEndTime = this.get('isPopup') ? this.get('customEndTime') : this.get('parentView.customEndTime');
- if (index === 8 && !Em.isNone(customStartTime) && !Em.isNone(customEndTime)) {
- // Custom start and end time is specified by user
- fromSeconds = customStartTime / 1000;
- toSeconds = customEndTime / 1000;
- } else {
- // Preset time range is specified by user
- toSeconds = Math.round(App.dateTime() / 1000);
- fromSeconds = toSeconds - this.get('timeUnitSeconds')
- }
- var metricTemplate = [];
- this.get('stormChartDefinition').forEach(function(chartInfo) {
- metricTemplate.push(
- this.get('metricsTemplate').format(chartInfo.field, fromSeconds, toSeconds, 15)
- );
- }, this);
- return {
- metricsTemplate: metricTemplate.join(',')
- };
- },
-
- getData: function (jsonData) {
- var dataArray = [],
- pathKeys = ['metrics','storm','nimbus'],
- validPath = true;
- pathKeys.forEach(function(key) {
- if (!jsonData[key]) {
- validPath = false;
- } else {
- jsonData = jsonData[key];
- }
- });
- if (!validPath) {
- return dataArray;
- }
- this.get('stormChartDefinition').forEach(function(chart){
- dataArray.push({
- name: chart.name,
- data: jsonData[chart.field]
- });
- }, this);
- return dataArray;
- }
-});
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/mixins/common/widgets/widget_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/widgets/widget_mixin.js b/ambari-web/app/mixins/common/widgets/widget_mixin.js
index c639c77..ee1e6a5 100644
--- a/ambari-web/app/mixins/common/widgets/widget_mixin.js
+++ b/ambari-web/app/mixins/common/widgets/widget_mixin.js
@@ -42,7 +42,7 @@ App.WidgetMixin = Ember.Mixin.create({
* @type {RegExp}
* @const
*/
- VALUE_NAME_REGEX: /[\w\.\,\:\=\[\]]+/g,
+ VALUE_NAME_REGEX: /(\w+\s+\w+)?[\w\.\,\:\=\[\]]+/g,
/**
* @type {string}
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 859d602..66f6365 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -277,15 +277,6 @@ require('views/main/service/info/metrics/flume/jvm_threads_runnable');
require('views/main/service/info/metrics/flume/cpu_user');
require('views/main/service/info/metrics/flume/flume_metric_graph');
require('views/main/service/info/metrics/flume/flume_metric_graphs');
-require('views/main/service/info/metrics/storm/slots_number_metric');
-require('views/main/service/info/metrics/storm/executors_metric');
-require('views/main/service/info/metrics/storm/tasks_metric');
-require('views/main/service/info/metrics/storm/topologies_metric');
-require('views/main/service/info/metrics/kafka/broker_topic');
-require('views/main/service/info/metrics/kafka/kafka_controller');
-require('views/main/service/info/metrics/kafka/controller_status');
-require('views/main/service/info/metrics/kafka/replica_manager');
-require('views/main/service/info/metrics/kafka/replica_fetcher');
require('views/main/service/add_view');
require('views/main/service/reassign_view');
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/common/chart/linear_time.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/chart/linear_time.js b/ambari-web/app/views/common/chart/linear_time.js
index 2cda391..b405b7c 100644
--- a/ambari-web/app/views/common/chart/linear_time.js
+++ b/ambari-web/app/views/common/chart/linear_time.js
@@ -265,26 +265,6 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
},
/**
- * Maps server data for Kafka Broker Topic and Controller Status metrics
- * into series format ready for export to graph and JSON formats
- * @param jsonData
- * @returns {Array}
- */
- getKafkaData: function (jsonData) {
- var dataArray = [],
- template = this.get('seriesTemplate'),
- data = Em.get(jsonData, template.path);
- for (var name in data) {
- var displayName = template.displayName(name);
- dataArray.push({
- name: displayName,
- data: Em.get(data, name + '.1MinuteRate')
- });
- }
- return dataArray;
- },
-
- /**
* Function to map data into graph series
* @param jsonData
* @returns {Array}
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/kafka/broker_topic.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/kafka/broker_topic.js b/ambari-web/app/views/main/service/info/metrics/kafka/broker_topic.js
deleted file mode 100644
index d584181..0000000
--- a/ambari-web/app/views/main/service/info/metrics/kafka/broker_topic.js
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-
-/**
- * @class
- *
- * This is a view for showing Kafka_BrokerTopicMetrics
- *
- * @extends App.ChartLinearTimeView
- * @extends Ember.Object
- * @extends Ember.View
- */
-App.ChartServiceMetricsKafka_BrokerTopicMetrics = App.ChartLinearTimeView.extend({
- id: "service-metrics-kafka-broker-topic-metrics",
- title: Em.I18n.t('services.service.info.metrics.kafka.server.brokerTopic.title'),
- renderer: 'line',
- ajaxIndex: 'service.metrics.kafka.broker.topic',
-
- seriesTemplate: {
- path: 'metrics.kafka.server.BrokerTopicMetrics',
- displayName: function (name) {
- var displayNameMap = {
- AllTopicsBytesOutPerSec: Em.I18n.t('services.service.info.metrics.kafka.server.brokerTopic.displayNames.AllTopicsBytesOutPerSec'),
- AllTopicsBytesInPerSec: Em.I18n.t('services.service.info.metrics.kafka.server.brokerTopic.displayNames.AllTopicsBytesInPerSec'),
- AllTopicsMessagesInPerSec: Em.I18n.t('services.service.info.metrics.kafka.server.brokerTopic.displayNames.AllTopicsMessagesInPerSec')
- };
- return displayNameMap[name];
- }
- },
-
- getData: function (jsonData) {
- return this.getKafkaData(jsonData);
- }
-});
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/kafka/controller_status.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/kafka/controller_status.js b/ambari-web/app/views/main/service/info/metrics/kafka/controller_status.js
deleted file mode 100644
index 8610968..0000000
--- a/ambari-web/app/views/main/service/info/metrics/kafka/controller_status.js
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-
-/**
- * @class
- *
- * This is a view for showing Kafka_ControllerStatus
- *
- * @extends App.ChartLinearTimeView
- * @extends Ember.Object
- * @extends Ember.View
- */
-App.ChartServiceMetricsKafka_ControllerStatus = App.ChartLinearTimeView.extend({
- id: "service-metrics-kafka-controler-status-metrics",
- title: Em.I18n.t('services.service.info.metrics.kafka.controller.ControllerStats.title'),
- renderer: 'line',
- ajaxIndex: 'service.metrics.kafka.controller.ControllerStats',
-
- seriesTemplate: {
- path: 'metrics.kafka.controller.ControllerStats',
- displayName: function (name) {
- var displayNameMap = {
- LeaderElectionRateAndTimeMs: Em.I18n.t('services.service.info.metrics.kafka.controller.ControllerStats.displayNames.LeaderElectionRateAndTimeMs'),
- UncleanLeaderElectionsPerSec: Em.I18n.t('services.service.info.metrics.kafka.controller.ControllerStats.displayNames.UncleanLeaderElectionsPerSec')
- };
- return displayNameMap[name];
- }
- },
-
- getData: function (jsonData) {
- return this.getKafkaData(jsonData);
- }
-});
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/kafka/kafka_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/kafka/kafka_controller.js b/ambari-web/app/views/main/service/info/metrics/kafka/kafka_controller.js
deleted file mode 100644
index 2f4fdd1..0000000
--- a/ambari-web/app/views/main/service/info/metrics/kafka/kafka_controller.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-
-/**
- * @class
- *
- * This is a view for showing Kafka_Controller
- *
- * @extends App.ChartLinearTimeView
- * @extends Ember.Object
- * @extends Ember.View
- */
-App.ChartServiceMetricsKafka_Controller = App.ChartLinearTimeView.extend({
- id: "service-metrics-kafka-controller-metrics",
- title: Em.I18n.t('services.service.info.metrics.kafka.controller.KafkaController.title'),
- renderer: 'line',
- ajaxIndex: 'service.metrics.kafka.controller.KafkaController',
-
- seriesTemplate: {
- path: 'metrics.kafka.controller.KafkaController',
- displayName: function () {
- return Em.I18n.t('services.service.info.metrics.kafka.controller.KafkaController.displayNames.ActiveControllerCount');
- }
- }
-});
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/kafka/replica_fetcher.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/kafka/replica_fetcher.js b/ambari-web/app/views/main/service/info/metrics/kafka/replica_fetcher.js
deleted file mode 100644
index 7068c17..0000000
--- a/ambari-web/app/views/main/service/info/metrics/kafka/replica_fetcher.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-
-/**
- * @class
- *
- * This is a view for showing Kafka_ReplicaFetcher
- *
- * @extends App.ChartLinearTimeView
- * @extends Ember.Object
- * @extends Ember.View
- */
-App.ChartServiceMetricsKafka_ReplicaFetcher = App.ChartLinearTimeView.extend({
- id: "service-metrics-kafka-replica-fetcher-metrics",
- title: Em.I18n.t('services.service.info.metrics.kafka.server.ReplicaFetcherManager.title'),
- renderer: 'line',
- ajaxIndex: 'service.metrics.kafka.server.ReplicaFetcherManager',
-
- seriesTemplate: {
- path: 'metrics.kafka.server.ReplicaFetcherManager',
- displayName: function () {
- return Em.I18n.t('services.service.info.metrics.kafka.server.ReplicaFetcherManager.displayNames.Replica-MaxLag');
- }
- }
-});
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/kafka/replica_manager.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/kafka/replica_manager.js b/ambari-web/app/views/main/service/info/metrics/kafka/replica_manager.js
deleted file mode 100644
index 7452718..0000000
--- a/ambari-web/app/views/main/service/info/metrics/kafka/replica_manager.js
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-
-/**
- * @class
- *
- * This is a view for showing Kafka_BrokerTopicMetrics
- *
- * @extends App.ChartLinearTimeView
- * @extends Ember.Object
- * @extends Ember.View
- */
-App.ChartServiceMetricsKafka_ReplicaManager = App.ChartLinearTimeView.extend({
- id: "service-metrics-kafka-replica-manager-metrics",
- title: Em.I18n.t('services.service.info.metrics.kafka.server.ReplicaManager.title'),
- renderer: 'line',
- ajaxIndex: 'service.metrics.kafka.server.ReplicaManager',
-
- seriesTemplate: {
- path: 'metrics.kafka.server.ReplicaManager',
- displayName: function (name) {
- var displayNameMap = {
- LeaderCount: Em.I18n.t('services.service.info.metrics.kafka.server.ReplicaManager.displayNames.LeaderCount'),
- UnderReplicatedPartitions: Em.I18n.t('services.service.info.metrics.kafka.server.ReplicaManager.displayNames.UnderReplicatedPartitions'),
- PartitionCount: Em.I18n.t('services.service.info.metrics.kafka.server.ReplicaManager.displayNames.PartitionCount')
- };
- return displayNameMap[name];
- }
- }
-});
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/storm/executors_metric.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/storm/executors_metric.js b/ambari-web/app/views/main/service/info/metrics/storm/executors_metric.js
deleted file mode 100644
index 8eea33a..0000000
--- a/ambari-web/app/views/main/service/info/metrics/storm/executors_metric.js
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('mixins/common/chart/storm_linear_time');
-
-App.ChartServiceMetricsSTORM_Executors = App.ChartLinearTimeView.extend(App.StormLinearTimeChartMixin, {
- id: "service-metrics-storm-executors",
- title: Em.I18n.t('services.storm.executors.metrics.title'),
- renderer: 'line',
- yAxisFormatter: App.ChartLinearTimeView.DefaultFormatter,
-
- stormChartDefinition: [
- {
- name: Em.I18n.t('services.storm.executors.metrics.total'),
- field: 'totalexecutors'
- }
- ]
-
-});
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/storm/slots_number_metric.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/storm/slots_number_metric.js b/ambari-web/app/views/main/service/info/metrics/storm/slots_number_metric.js
deleted file mode 100644
index 543508b..0000000
--- a/ambari-web/app/views/main/service/info/metrics/storm/slots_number_metric.js
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('mixins/common/chart/storm_linear_time');
-
-App.ChartServiceMetricsSTORM_SlotsNumber = App.ChartLinearTimeView.extend(App.StormLinearTimeChartMixin, {
- id: "service-metrics-storm-supervisor-allocated",
- title: Em.I18n.t('services.storm.slots.metrics.title'),
- renderer: 'line',
- yAxisFormatter: App.ChartLinearTimeView.DefaultFormatter,
-
- stormChartDefinition: [
- {
- name: Em.I18n.t('services.storm.slots.metrics.total'),
- field: 'totalslots'
- },
- {
- name: Em.I18n.t('services.storm.slots.metrics.free'),
- field: 'freeslots'
- },
- {
- name: Em.I18n.t('services.storm.slots.metrics.used'),
- field: 'usedslots'
- }
- ]
-
-});
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/storm/tasks_metric.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/storm/tasks_metric.js b/ambari-web/app/views/main/service/info/metrics/storm/tasks_metric.js
deleted file mode 100644
index a70d358..0000000
--- a/ambari-web/app/views/main/service/info/metrics/storm/tasks_metric.js
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('mixins/common/chart/storm_linear_time');
-
-App.ChartServiceMetricsSTORM_Tasks = App.ChartLinearTimeView.extend(App.StormLinearTimeChartMixin, {
- id: "service-metrics-storm-tasks",
- title: Em.I18n.t('services.storm.tasks.metrics.title'),
- renderer: 'line',
- yAxisFormatter: App.ChartLinearTimeView.DefaultFormatter,
-
- stormChartDefinition: [
- {
- name: Em.I18n.t('services.storm.tasks.metrics.total'),
- field: 'totaltasks'
- }
- ]
-
-});
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/app/views/main/service/info/metrics/storm/topologies_metric.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/metrics/storm/topologies_metric.js b/ambari-web/app/views/main/service/info/metrics/storm/topologies_metric.js
deleted file mode 100644
index d831453..0000000
--- a/ambari-web/app/views/main/service/info/metrics/storm/topologies_metric.js
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-var App = require('app');
-require('mixins/common/chart/storm_linear_time');
-
-App.ChartServiceMetricsSTORM_Topologies = App.ChartLinearTimeView.extend(App.StormLinearTimeChartMixin, {
- id: "service-metrics-storm-topologies",
- title: Em.I18n.t('services.storm.topology.metrics.title'),
- renderer: 'line',
- yAxisFormatter: App.ChartLinearTimeView.DefaultFormatter,
-
- stormChartDefinition: [
- {
- name: Em.I18n.t('services.storm.topology.metrics.total'),
- field: 'topologies'
- }
- ]
-
-});
http://git-wip-us.apache.org/repos/asf/ambari/blob/66c35e01/ambari-web/test/mixins/common/chart/storm_linear_time_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/common/chart/storm_linear_time_test.js b/ambari-web/test/mixins/common/chart/storm_linear_time_test.js
deleted file mode 100644
index 9a573a4..0000000
--- a/ambari-web/test/mixins/common/chart/storm_linear_time_test.js
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-var App = require('app');
-
-require('mixins/common/chart/storm_linear_time');
-
-var slt,
- template,
- series,
- jsonDataFalse = {
- metrics: {
- id: 'metrics'
- }
- },
- jsonDataTrue = {
- metrics: {
- storm: {
- nimbus: {
- name: 'nimbus'
- }
- }
- }
- };
-
-describe('App.StormLinearTimeChartMixin', function () {
-
- beforeEach(function () {
- slt = Em.Object.create(App.StormLinearTimeChartMixin, {
- stormChartDefinition: [
- {
- field: 'name',
- name: 'nimbus'
- }
- ]
- });
- });
-
- describe('#getDataForAjaxRequest', function () {
- it('should take data from stormChartDefinition', function () {
- template = slt.getDataForAjaxRequest().metricsTemplate;
- expect(template).to.contain('metrics');
- expect(template).to.contain('storm');
- expect(template).to.contain('nimbus');
- });
- });
-
- describe('#getData', function () {
- it('should be empty', function () {
- expect(slt.getData(jsonDataFalse)).to.be.empty;
- });
- it('should take one element from data', function () {
- slt.set('transformData', function (data, name) {
- return name + ': ' + JSON.stringify(data);
- });
- series = slt.getData(jsonDataTrue);
- expect(series).to.have.length(1);
- expect(series[0]).to.eql({
- name: 'nimbus',
- data: 'nimbus'
- });
- });
- });
-
-});
[17/19] ambari git commit: AMBARI-14842. ambari agent upstart support
(aonishuk)
Posted by yu...@apache.org.
AMBARI-14842. ambari agent upstart support (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fac9f36f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fac9f36f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fac9f36f
Branch: refs/heads/2.2.1-maint
Commit: fac9f36fc1e7822ee840bd35cc0820bd570a2af5
Parents: 116f016
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Fri Feb 12 11:44:32 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Fri Feb 12 11:45:09 2016 +0200
----------------------------------------------------------------------
ambari-agent/conf/unix/ambari-agent | 13 +++++++++++
ambari-agent/etc/init/ambari-agent.conf | 34 ++++++++++++++++++++++++++++
ambari-agent/pom.xml | 22 ++++++++++++++++++
3 files changed, 69 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/fac9f36f/ambari-agent/conf/unix/ambari-agent
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent b/ambari-agent/conf/unix/ambari-agent
index 50d13b1..27ade60 100755
--- a/ambari-agent/conf/unix/ambari-agent
+++ b/ambari-agent/conf/unix/ambari-agent
@@ -153,7 +153,14 @@ case "$1" in
change_files_permissions
echo "Starting ambari-agent"
+
+ if [ "$AMBARI_AGENT_RUN_IN_FOREGROUND" == true ] ; then
+ $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1
+ exit $?
+ fi
+
nohup $PYTHON $AMBARI_AGENT_PY_SCRIPT "$@" > $OUTFILE 2>&1 &
+
sleep 2
PID=$!
echo "Verifying $AMBARI_AGENT process status..."
@@ -215,6 +222,12 @@ case "$1" in
echo "Stopping $AMBARI_AGENT"
change_files_permissions
$PYTHON $AGENT_SCRIPT stop
+
+ status ambari-agent 2>/dev/null | grep start 1>/dev/null
+ if [ "$?" -eq 0 ] ; then
+ echo "Stopping $AMBARI_AGENT upstart job"
+ stop ambari-agent > /dev/null
+ fi
fi
echo "Removing PID file at $PIDFILE"
ambari-sudo.sh rm -f $PIDFILE
http://git-wip-us.apache.org/repos/asf/ambari/blob/fac9f36f/ambari-agent/etc/init/ambari-agent.conf
----------------------------------------------------------------------
diff --git a/ambari-agent/etc/init/ambari-agent.conf b/ambari-agent/etc/init/ambari-agent.conf
new file mode 100644
index 0000000..021eb3b
--- /dev/null
+++ b/ambari-agent/etc/init/ambari-agent.conf
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific
+
+#ambari-agent
+description "ambari agent"
+
+stop on runlevel [06]
+
+env PIDFILE=/var/run/ambari-agent/ambari-agent.pid
+
+kill signal SIGKILL
+respawn
+
+script
+ . /etc/environment
+
+ export AMBARI_AGENT_RUN_IN_FOREGROUND=true
+ exec /etc/init.d/ambari-agent start
+end script
+
+post-stop script
+ rm -f $PIDFILE
+end script
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/fac9f36f/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index b8f0407..63d6044 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -409,6 +409,17 @@
<groupname>root</groupname>
</mapping>
<mapping>
+ <directory>/etc/init</directory>
+ <filemode>755</filemode>
+ <username>root</username>
+ <groupname>root</groupname>
+ <sources>
+ <source>
+ <location>etc/init/ambari-agent.conf</location>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
<directory>${init.d.dir}</directory>
<filemode>755</filemode>
<username>root</username>
@@ -620,6 +631,17 @@
</mapper>
</data>
<data>
+ <src>etc/init/ambari-agent.conf</src>
+ <type>file</type>
+ <mapper>
+ <type>perm</type>
+ <prefix>/etc/init</prefix>
+ <user>root</user>
+ <group>root</group>
+ <filemode>755</filemode>
+ </mapper>
+ </data>
+ <data>
<src>${basedir}/target/src/version</src>
<type>file</type>
<mapper>
[10/19] ambari git commit: AMBARI-14949: Ambaripreupload should skip
uploading oozie shared lib for upgrade (jluniya)
Posted by yu...@apache.org.
AMBARI-14949: Ambaripreupload should skip uploading oozie shared lib for upgrade (jluniya)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f4e3338
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f4e3338
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f4e3338
Branch: refs/heads/2.2.1-maint
Commit: 1f4e3338815cb6fe91ac1ef0f4effb53b192348c
Parents: 66c35e0
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Mon Feb 8 14:36:00 2016 -0800
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Mon Feb 8 16:10:28 2016 -0800
----------------------------------------------------------------------
.../main/resources/scripts/Ambaripreupload.py | 31 ++++++++++++--------
1 file changed, 18 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1f4e3338/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 591b7d2..5a20698 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -85,8 +85,10 @@ with Environment() as env:
parser = OptionParser()
parser.add_option("-v", "--hdp-version", dest="hdp_version", default="",
help="hdp-version used in path of tarballs")
-
+ parser.add_option("-u", "--upgrade", dest="upgrade", action="store_true",
+ help="flag to indicate script is being run for upgrade", default=False)
(options, args) = parser.parse_args()
+
# See if hdfs path prefix is provided on the command line. If yes, use that value, if no
# use empty string as default.
@@ -273,19 +275,22 @@ with Environment() as env:
oozie_hdfs_user_dir = format("{hdfs_path_prefix}/user/{oozie_user}")
kinit_if_needed = ''
- params.HdfsResource(format("{oozie_hdfs_user_dir}/share/"),
- action="delete_on_execute",
- type = 'directory'
- )
+ if options.upgrade:
+ Logger.info("Skipping uploading oozie shared lib during upgrade")
+ else:
+ params.HdfsResource(format("{oozie_hdfs_user_dir}/share/"),
+ action="delete_on_execute",
+ type = 'directory'
+ )
- params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
- action="create_on_execute",
- type = 'directory',
- mode=0755,
- recursive_chmod = True,
- owner=oozie_user,
- source = oozie_shared_lib,
- )
+ params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
+ action="create_on_execute",
+ type = 'directory',
+ mode=0755,
+ recursive_chmod = True,
+ owner=oozie_user,
+ source = oozie_shared_lib,
+ )
print "Copying tarballs..."
copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/hadoop/mapreduce.tar.gz"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
[15/19] ambari git commit: AMBARI-15004. RU/EU: Upgrading Oozie
database fails since new configs are not yet written to
/usr/hdp/current/oozie-server/conf (alejandro)
Posted by yu...@apache.org.
AMBARI-15004. RU/EU: Upgrading Oozie database fails since new configs are not yet written to /usr/hdp/current/oozie-server/conf (alejandro)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2ceacede
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2ceacede
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2ceacede
Branch: refs/heads/2.2.1-maint
Commit: 2ceacedeb1ebdfa5da138f6b1c2918dcb342696f
Parents: 9120682
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Thu Feb 11 17:31:32 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Thu Feb 11 17:31:32 2016 -0800
----------------------------------------------------------------------
.../state/stack/upgrade/ClusterGrouping.java | 7 ++++
.../state/stack/upgrade/ExecuteHostType.java | 7 ++++
.../state/stack/upgrade/TaskWrapperBuilder.java | 15 +++++++-
.../4.0.0.2.0/package/scripts/oozie_server.py | 36 ++++++++++++++------
.../package/scripts/oozie_server_upgrade.py | 4 ++-
.../4.0.0.2.0/package/scripts/params_linux.py | 3 ++
.../HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml | 3 +-
.../HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml | 5 ++-
.../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml | 5 ++-
.../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml | 5 ++-
.../stacks/HDP/2.2/upgrades/upgrade-2.2.xml | 5 ++-
.../stacks/HDP/2.2/upgrades/upgrade-2.3.xml | 5 ++-
.../stacks/HDP/2.2/upgrades/upgrade-2.4.xml | 5 ++-
.../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml | 5 ++-
.../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 5 ++-
.../stacks/HDP/2.3/upgrades/upgrade-2.3.xml | 5 ++-
.../stacks/HDP/2.3/upgrades/upgrade-2.4.xml | 5 ++-
.../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml | 5 ++-
.../stacks/HDP/2.4/upgrades/upgrade-2.4.xml | 5 ++-
19 files changed, 109 insertions(+), 26 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
index 5e21da5..8fb6ef5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
@@ -232,6 +232,13 @@ public class ClusterGrouping extends Grouping {
realHosts = Collections.singleton(hosts.hosts.iterator().next());
}
+ // Pick the first host sorted alphabetically (case insensitive)
+ if (ExecuteHostType.FIRST == et.hosts && !hosts.hosts.isEmpty()) {
+ List<String> sortedHosts = new ArrayList<>(hosts.hosts);
+ Collections.sort(sortedHosts, String.CASE_INSENSITIVE_ORDER);
+ realHosts = Collections.singleton(sortedHosts.get(0));
+ }
+
// !!! cannot execute against empty hosts (safety net)
if (realHosts.isEmpty()) {
return null;
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ExecuteHostType.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ExecuteHostType.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ExecuteHostType.java
index b36dca4..80deb60 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ExecuteHostType.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ExecuteHostType.java
@@ -42,6 +42,13 @@ public enum ExecuteHostType {
ANY,
/**
+ * Run on a single host that is picked by alphabetically sorting all hosts that satisfy the condition of the {@link ExecuteTask}
+ * .
+ */
+ @XmlEnumValue("first")
+ FIRST,
+
+ /**
* Run on all of the hosts.
*/
@XmlEnumValue("all")
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
index 81a3a4d..f2ef8f0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/TaskWrapperBuilder.java
@@ -75,7 +75,20 @@ public class TaskWrapperBuilder {
collection.add(new TaskWrapper(service, component, Collections.singleton(hostsType.hosts.iterator().next()), params, t));
continue;
} else {
- LOG.error(MessageFormat.format("Found an Execute task for {0} and {1} meant to run on a any host but could not find host to run on. Skipping this task.", service, component));
+ LOG.error(MessageFormat.format("Found an Execute task for {0} and {1} meant to run on any host but could not find host to run on. Skipping this task.", service, component));
+ continue;
+ }
+ }
+
+ // Pick the first host sorted alphabetically (case insensitive).
+ if (et.hosts == ExecuteHostType.FIRST) {
+ if (hostsType.hosts != null && !hostsType.hosts.isEmpty()) {
+ List<String> sortedHosts = new ArrayList<>(hostsType.hosts);
+ Collections.sort(sortedHosts, String.CASE_INSENSITIVE_ORDER);
+ collection.add(new TaskWrapper(service, component, Collections.singleton(sortedHosts.get(0)), params, t));
+ continue;
+ } else {
+ LOG.error(MessageFormat.format("Found an Execute task for {0} and {1} meant to run on the first host sorted alphabetically but could not find host to run on. Skipping this task.", service, component));
continue;
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
index 35975df..35a5281 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
@@ -24,6 +24,8 @@ from resource_management.libraries.functions import compare_versions
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import hdp_select
from resource_management.libraries.functions import format_hdp_stack_version
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import default
from resource_management.libraries.functions.constants import Direction
from resource_management.libraries.functions.security_commons import build_expectations
from resource_management.libraries.functions.security_commons import cached_kinit_executor
@@ -33,6 +35,7 @@ from resource_management.libraries.functions.security_commons import FILE_TYPE_X
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
+from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING, UPGRADE_TYPE_ROLLING
from oozie import oozie
from oozie_service import oozie_service
@@ -51,19 +54,30 @@ class OozieServer(Script):
def configure(self, env, upgrade_type=None):
import params
- if upgrade_type == "nonrolling" and params.upgrade_direction == Direction.UPGRADE and \
- params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
- conf_select.select(params.stack_name, "oozie", params.version)
- # In order for the "/usr/hdp/current/oozie-<client/server>" point to the new version of
- # oozie, we need to create the symlinks both for server and client.
- # This is required as both need to be pointing to new installed oozie version.
+ # The configure command doesn't actually receive the upgrade_type from Script.py, so get it from the config dictionary
+ if upgrade_type is None:
+ restart_type = default("/commandParams/restart_type", "")
+ if restart_type.lower() == "rolling_upgrade":
+ upgrade_type = UPGRADE_TYPE_ROLLING
+ elif restart_type.lower() == "nonrolling_upgrade":
+ upgrade_type = UPGRADE_TYPE_NON_ROLLING
+
+ if upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE and params.version is not None:
+ Logger.info(format("Configuring Oozie during upgrade type: {upgrade_type}, direction: {params.upgrade_direction}, and version {params.version}"))
+ if compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ # In order for the "/usr/hdp/current/oozie-<client/server>" point to the new version of
+ # oozie, we need to create the symlinks both for server and client.
+ # This is required as both need to be pointing to new installed oozie version.
+
+ # Sets the symlink : eg: /usr/hdp/current/oozie-client -> /usr/hdp/2.3.x.y-<version>/oozie
+ hdp_select.select("oozie-client", params.version)
+ # Sets the symlink : eg: /usr/hdp/current/oozie-server -> /usr/hdp/2.3.x.y-<version>/oozie
+ hdp_select.select("oozie-server", params.version)
+
+ if compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0:
+ conf_select.select(params.stack_name, "oozie", params.version)
- # Sets the symlink : eg: /usr/hdp/current/oozie-client -> /usr/hdp/2.3.x.y-<version>/oozie
- hdp_select.select("oozie-client", params.version)
- # Sets the symlink : eg: /usr/hdp/current/oozie-server -> /usr/hdp/2.3.x.y-<version>/oozie
- hdp_select.select("oozie-server", params.version)
env.set_params(params)
-
oozie(is_server=True)
def start(self, env, upgrade_type=None):
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
index 326e76c..bbc1ee4 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
@@ -234,6 +234,8 @@ class OozieUpgrade(Script):
import params
env.set_params(params)
+ Logger.info("Will upgrade the Oozie database")
+
# get the kerberos token if necessary to execute commands as oozie
if params.security_enabled:
oozie_principal_with_host = params.oozie_principal.replace("_HOST", params.hostname)
@@ -247,7 +249,7 @@ class OozieUpgrade(Script):
stack_version = upgrade_stack[1]
# upgrade oozie DB
- Logger.info('Upgrading the Oozie database...')
+ Logger.info(format('Upgrading the Oozie database, using version {stack_version}'))
# the database upgrade requires the db driver JAR, but since we have
# not yet run hdp-select to upgrade the current points, we have to use
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 89cf8e7..3c34254 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -106,7 +106,10 @@ execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
oozie_user = config['configurations']['oozie-env']['oozie_user']
smokeuser = config['configurations']['cluster-env']['smokeuser']
smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+
+# This config actually contains {oozie_user}
oozie_admin_users = format(config['configurations']['oozie-env']['oozie_admin_users'])
+
user_group = config['configurations']['cluster-env']['user_group']
jdk_location = config['hostLevelParams']['jdk_location']
check_db_connection_jar_name = "DBConnectionVerification.jar"
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml
index e581506..0951a43 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/upgrades/nonrolling-upgrade-2.3.xml
@@ -791,10 +791,11 @@
<!-- We need to set up the "/etc/oozie/conf" symlink before upgrading, as the HDP 2.1 directory
pertaining to oozie has been deleted as part of HDP 2.1 removal in Express Upgrade
from HDP 2.1->2.3.
+ This configure task should run on all Oozie Servers.
-->
<task xsi:type="configure_function"/>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
index 098682b..688c890 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.2.xml
@@ -749,7 +749,10 @@
<service name="OOZIE">
<component name="OOZIE_SERVER">
<pre-upgrade>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
index fe9d97c..d7b5b18 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
@@ -1043,7 +1043,10 @@
<service name="OOZIE">
<component name="OOZIE_SERVER">
<pre-upgrade>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
index b756deb..b5a2462 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
@@ -1134,7 +1134,10 @@
<service name="OOZIE">
<component name="OOZIE_SERVER">
<pre-upgrade>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
index d2f6f48..c79a71c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
@@ -599,7 +599,10 @@
<function>stop</function>
</task>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
index c058536..94b19d5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
@@ -754,7 +754,10 @@
<function>stop</function>
</task>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
index d279ec1..c80fa67 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
@@ -761,7 +761,10 @@
<function>stop</function>
</task>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
index e3b8546..77c54f3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml
@@ -860,7 +860,10 @@
<service name="OOZIE">
<component name="OOZIE_SERVER">
<pre-upgrade>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index 9b36ec9..f56be53 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -890,7 +890,10 @@
<service name="OOZIE">
<component name="OOZIE_SERVER">
<pre-upgrade>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
index 5851119..03a7bd1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.3.xml
@@ -699,7 +699,10 @@
<function>stop</function>
</task>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index 4ce6dc3..9bbb6e0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -684,7 +684,10 @@
<function>stop</function>
</task>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
index 2600eae..025b966 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml
@@ -855,7 +855,10 @@
<service name="OOZIE">
<component name="OOZIE_SERVER">
<pre-upgrade>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
http://git-wip-us.apache.org/repos/asf/ambari/blob/2ceacede/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
index b91e27a..b8bb32c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.4.xml
@@ -692,7 +692,10 @@
<function>stop</function>
</task>
- <task xsi:type="execute" hosts="any" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
+ <!-- It is extremely important that both of these tasks run on the exact same host. Hence, pick the first alphabetically. -->
+ <task xsi:type="configure_function" hosts="first" />
+
+ <task xsi:type="execute" hosts="first" sequential="true" summary="Upgrading the Oozie database and creating a new sharelib">
<script>scripts/oozie_server_upgrade.py</script>
<function>upgrade_oozie_database_and_sharelib</function>
</task>
[08/19] ambari git commit: AMBARI-14745. Ambari server throws error
when unused configs are present in blueprint. (Oliver Szabo via rnettleton)
Posted by yu...@apache.org.
AMBARI-14745. Ambari server throws error when unused configs are present in blueprint. (Oliver Szabo via rnettleton)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/88dab3b4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/88dab3b4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/88dab3b4
Branch: refs/heads/2.2.1-maint
Commit: 88dab3b48e8067613e12a1972742d820eb7b48ed
Parents: 4c31108
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Fri Feb 5 11:44:23 2016 -0500
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Fri Feb 5 11:44:23 2016 -0500
----------------------------------------------------------------------
.../topology/ClusterConfigurationRequest.java | 28 ++++++++
.../ambari/server/topology/Configuration.java | 15 ++++
.../ClusterConfigurationRequestTest.java | 75 +++++++++++++++++---
.../ClusterInstallWithoutStartTest.java | 2 +
.../server/topology/ConfigurationTest.java | 14 ++++
.../server/topology/TopologyManagerTest.java | 2 +
6 files changed, 128 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/88dab3b4/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
index 464aee7..a995a3b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
@@ -30,6 +30,7 @@ import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurati
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.SecurityType;
import org.apache.ambari.server.utils.StageUtils;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -74,11 +75,38 @@ public class ClusterConfigurationRequest {
// set initial configuration (not topology resolved)
this.configurationProcessor = new BlueprintConfigurationProcessor(clusterTopology);
this.stackAdvisorBlueprintProcessor = stackAdvisorBlueprintProcessor;
+ removeOrphanConfigTypes(clusterTopology);
if (setInitial) {
setConfigurationsOnCluster(clusterTopology, TopologyManager.INITIAL_CONFIG_TAG, Collections.<String>emptySet());
}
}
+ /**
+ * Remove config-types, if there is no any services related to them (except cluster-env and global).
+ */
+ private void removeOrphanConfigTypes(ClusterTopology clusterTopology) {
+ Configuration configuration = clusterTopology.getConfiguration();
+ Collection<String> configTypes = configuration.getAllConfigTypes();
+ for (String configType : configTypes) {
+ if (!configType.equals("cluster-env") && !configType.equals("global")) {
+ String service = clusterTopology.getBlueprint().getStack().getServiceForConfigType(configType);
+ if (!clusterTopology.getBlueprint().getServices().contains(service)) {
+ configuration.removeConfigType(configType);
+ LOG.info("Not found any service for config type '{}'. It will be removed from configuration.", configType);
+ Map<String, HostGroupInfo> hostGroupInfoMap = clusterTopology.getHostGroupInfo();
+ if (MapUtils.isNotEmpty(hostGroupInfoMap)) {
+ for (Map.Entry<String, HostGroupInfo> hostGroupInfo : hostGroupInfoMap.entrySet()) {
+ if (hostGroupInfo.getValue().getConfiguration() != null) {
+ hostGroupInfo.getValue().getConfiguration().removeConfigType(configType);
+ LOG.info("Not found any service for config type '{}'. It will be removed from host group scoped configuration.", configType);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
public ClusterConfigurationRequest(AmbariContext ambariContext, ClusterTopology topology, boolean setInitial, StackAdvisorBlueprintProcessor stackAdvisorBlueprintProcessor, boolean configureSecurity) {
this(ambariContext, topology, setInitial, stackAdvisorBlueprintProcessor);
this.configureSecurity = configureSecurity;
http://git-wip-us.apache.org/repos/asf/ambari/blob/88dab3b4/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java
index 108ff74..79281b4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java
@@ -344,4 +344,19 @@ public class Configuration {
public void setParentConfiguration(Configuration parent) {
parentConfiguration = parent;
}
+
+ /**
+ * Remove all occurrences of a config type
+ */
+ public void removeConfigType(String configType) {
+ if (properties != null && properties.containsKey(configType)) {
+ properties.remove(configType);
+ }
+ if (attributes != null && attributes.containsKey(configType)) {
+ attributes.remove(configType);
+ }
+ if (parentConfiguration != null) {
+ parentConfiguration.removeConfigType(configType);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/88dab3b4/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
index 5967a64..3ed8ed5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java
@@ -28,6 +28,17 @@ import org.apache.ambari.server.controller.internal.Stack;
import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import com.google.common.collect.Maps;
+import org.easymock.EasyMock;
import org.easymock.Capture;
import org.easymock.CaptureType;
import org.easymock.EasyMockRule;
@@ -40,14 +51,6 @@ import org.powermock.api.easymock.PowerMock;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
import static org.easymock.EasyMock.anyBoolean;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
@@ -55,6 +58,8 @@ import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import static org.easymock.EasyMock.capture;
import static org.junit.Assert.assertEquals;
@@ -183,6 +188,7 @@ public class ClusterConfigurationRequestTest {
expect(clusters.getCluster("testCluster")).andReturn(cluster).anyTimes();
expect(blueprint.getStack()).andReturn(stack).anyTimes();
+ expect(stack.getServiceForConfigType(anyString())).andReturn("KERBEROS").anyTimes();
expect(stack.getAllConfigurationTypes(anyString())).andReturn(Collections.<String>singletonList("testConfigType")
).anyTimes();
expect(stack.getExcludedConfigurationTypes(anyString())).andReturn(Collections.<String>emptySet()).anyTimes();
@@ -307,4 +313,57 @@ public class ClusterConfigurationRequestTest {
}
+ @Test
+ public void testProcessClusterConfigRequestRemoveUnusedConfigTypes() {
+ // GIVEN
+ Configuration configuration = createConfigurations();
+ Set<String> services = new HashSet<String>();
+ services.add("HDFS");
+ services.add("RANGER");
+ Map<String, HostGroupInfo> hostGroupInfoMap = Maps.newHashMap();
+ HostGroupInfo hg1 = new HostGroupInfo("hg1");
+ hg1.setConfiguration(createConfigurations());
+ hostGroupInfoMap.put("hg1", hg1);
+
+ expect(topology.getConfiguration()).andReturn(configuration).anyTimes();
+ expect(topology.getBlueprint()).andReturn(blueprint).anyTimes();
+ expect(topology.getHostGroupInfo()).andReturn(hostGroupInfoMap);
+ expect(blueprint.getStack()).andReturn(stack).anyTimes();
+ expect(blueprint.getServices()).andReturn(services).anyTimes();
+ expect(stack.getServiceForConfigType("hdfs-site")).andReturn("HDFS").anyTimes();
+ expect(stack.getServiceForConfigType("admin-properties")).andReturn("RANGER").anyTimes();
+ expect(stack.getServiceForConfigType("yarn-site")).andReturn("YARN").anyTimes();
+
+ EasyMock.replay(stack, blueprint, topology);
+ // WHEN
+ new ClusterConfigurationRequest(ambariContext, topology, false, stackAdvisorBlueprintProcessor);
+ // THEN
+ assertFalse(configuration.getFullProperties().containsKey("yarn-site"));
+ assertFalse(configuration.getFullAttributes().containsKey("yarn-site"));
+ assertTrue(configuration.getFullAttributes().containsKey("hdfs-site"));
+ assertTrue(configuration.getFullProperties().containsKey("cluster-env"));
+ assertTrue(configuration.getFullProperties().containsKey("global"));
+ assertFalse(hg1.getConfiguration().getFullAttributes().containsKey("yarn-site"));
+ verify(stack, blueprint, topology);
+ }
+
+ private Configuration createConfigurations() {
+ Map<String, Map<String, String>> firstLevelConfig = Maps.newHashMap();
+ firstLevelConfig.put("hdfs-site", new HashMap<String, String>());
+ firstLevelConfig.put("yarn-site", new HashMap<String, String>());
+ firstLevelConfig.put("cluster-env", new HashMap<String, String>());
+ firstLevelConfig.put("global", new HashMap<String, String>());
+
+ Map<String, Map<String, Map<String, String>>> firstLevelAttributes = Maps.newHashMap();
+ firstLevelAttributes.put("hdfs-site", new HashMap<String, Map<String, String>>());
+
+ Map<String, Map<String, String>> secondLevelConfig = Maps.newHashMap();
+ secondLevelConfig.put("admin-properties", new HashMap<String, String>());
+ Map<String, Map<String, Map<String, String>>> secondLevelAttributes = Maps.newHashMap();
+ secondLevelAttributes.put("admin-properties", new HashMap<String, Map<String, String>>());
+ secondLevelAttributes.put("yarn-site", new HashMap<String, Map<String, String>>());
+
+ Configuration secondLevelConf = new Configuration(secondLevelConfig, secondLevelAttributes);
+ return new Configuration(firstLevelConfig, firstLevelAttributes, secondLevelConf);
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/88dab3b4/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
index dd66b1b..156580a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
@@ -251,6 +251,8 @@ public class ClusterInstallWithoutStartTest {
expect(stack.getConfiguration()).andReturn(stackConfig).anyTimes();
expect(stack.getName()).andReturn(STACK_NAME).anyTimes();
expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes();
+ expect(stack.getServiceForConfigType("service1-site")).andReturn("service1");
+ expect(stack.getServiceForConfigType("service2-site")).andReturn("service2");
expect(stack.getExcludedConfigurationTypes("service1")).andReturn(Collections.<String>emptySet()).anyTimes();
expect(stack.getExcludedConfigurationTypes("service2")).andReturn(Collections.<String>emptySet()).anyTimes();
http://git-wip-us.apache.org/repos/asf/ambari/blob/88dab3b4/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigurationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigurationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigurationTest.java
index e971e03..c4d70b5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigurationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigurationTest.java
@@ -346,6 +346,20 @@ public class ConfigurationTest {
assertNull(configuration.getPropertyValue("type1", "XXXXX"));
}
+ @Test
+ public void testRemoveConfigTypes() {
+ Configuration configuration = createConfigurationWithParents_PropsOnly();
+ configuration.removeConfigType("type1");
+ assertNull(configuration.getProperties().get("type1"));
+ }
+
+ @Test
+ public void testRemoveConfigTypesForAttributes() {
+ Configuration configuration = createConfigurationWithParents_PropsOnly();
+ configuration.removeConfigType("type1");
+ assertNull(configuration.getAttributes().get("type1"));
+ }
+
private Configuration createConfigurationWithParents_PropsOnly() {
// parents parent config properties
Map<String, Map<String, String>> parentParentProperties = new HashMap<String, Map<String, String>>();
http://git-wip-us.apache.org/repos/asf/ambari/blob/88dab3b4/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
index 7810f92..69c1935 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
@@ -228,6 +228,8 @@ public class TopologyManagerTest {
expect(stack.getComponents()).andReturn(serviceComponents).anyTimes();
expect(stack.getComponents("service1")).andReturn(serviceComponents.get("service1")).anyTimes();
expect(stack.getComponents("service2")).andReturn(serviceComponents.get("service2")).anyTimes();
+ expect(stack.getServiceForConfigType("service1-site")).andReturn("service1");
+ expect(stack.getServiceForConfigType("service2-site")).andReturn("service2");
expect(stack.getConfiguration()).andReturn(stackConfig).anyTimes();
expect(stack.getName()).andReturn(STACK_NAME).anyTimes();
expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes();
[06/19] ambari git commit: AMBARI-14810 When their are multiple Job
History Servers in the cluster,
QuickLinks should show the URL for all instances. (atkach)
Posted by yu...@apache.org.
AMBARI-14810 When their are multiple Job History Servers in the cluster, QuickLinks should show the URL for all instances. (atkach)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c579388e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c579388e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c579388e
Branch: refs/heads/2.2.1-maint
Commit: c579388e64aa32ea4b3df1126ebd7beb1759c1cb
Parents: 81c58bb
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Wed Jan 27 17:40:21 2016 +0200
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Feb 4 11:36:39 2016 -0800
----------------------------------------------------------------------
.../app/views/common/quick_view_link_view.js | 18 +++++++++++++++++-
1 file changed, 17 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c579388e/ambari-web/app/views/common/quick_view_link_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/quick_view_link_view.js b/ambari-web/app/views/common/quick_view_link_view.js
index 7825c09..ece034c 100644
--- a/ambari-web/app/views/common/quick_view_link_view.js
+++ b/ambari-web/app/views/common/quick_view_link_view.js
@@ -228,6 +228,9 @@ App.QuickViewLinks = Em.View.extend({
}
if (item.get('service_id')==='OOZIE') {
newItem.url = item.get('template').fmt(protocol, host.publicHostName, port, App.router.get('loginName'));
+ } else if (item.get('service_id')==='MAPREDUCE2') {
+ var hostPortConfigValue = "%@:%@".fmt(host.publicHostName, port);
+ newItem.url = item.get('template').fmt(protocol, hostPortConfigValue);
} else {
newItem.url = item.get('template').fmt(protocol, host.publicHostName, port);
}
@@ -259,10 +262,11 @@ App.QuickViewLinks = Em.View.extend({
return [App.get('singleNodeAlias')];
}
var hosts = [];
+ var components;
switch (serviceName) {
case 'OOZIE':
// active OOZIE components
- var components = this.get('content.hostComponents').filterProperty('componentName','OOZIE_SERVER').filterProperty('workStatus', 'STARTED');
+ components = this.get('content.hostComponents').filterProperty('componentName','OOZIE_SERVER').filterProperty('workStatus', 'STARTED');
if (components && components.length > 1) {
components.forEach(function (component) {
hosts.push({
@@ -361,6 +365,18 @@ App.QuickViewLinks = Em.View.extend({
case "ATLAS":
hosts[0] = this.findComponentHost(response.items, "ATLAS_SERVER");
break;
+ case "MAPREDUCE2":
+ components = this.get('content.hostComponents').filterProperty('componentName', 'HISTORYSERVER');
+ if (components && components.length > 1) {
+ components.forEach(function (component) {
+ hosts.push({
+ 'publicHostName': response.items.findProperty('Hosts.host_name', component.get('hostName')).Hosts.public_host_name
+ });
+ });
+ } else if (components && components.length === 1) {
+ hosts[0] = this.findComponentHost(response.items, 'HISTORYSERVER');
+ }
+ break;
default:
var service = App.StackService.find().findProperty('serviceName', serviceName);
if (service && service.get('hasMaster')) {
[19/19] ambari git commit: AMBARI-15029. Adding a Service results in
deleting Config Group mappings (more than 1 CG present) (akovalenko)
Posted by yu...@apache.org.
AMBARI-15029. Adding a Service results in deleting Config Group mappings (more than 1 CG present) (akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7bd0a877
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7bd0a877
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7bd0a877
Branch: refs/heads/2.2.1-maint
Commit: 7bd0a87775bfd32e32257c2ceab4c78110c7c0df
Parents: 6893b5a
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Fri Feb 12 16:55:58 2016 +0200
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Fri Feb 12 10:30:31 2016 -0800
----------------------------------------------------------------------
.../controllers/main/service/info/configs.js | 2 +-
ambari-web/app/controllers/wizard.js | 4 +++-
.../app/controllers/wizard/step7_controller.js | 21 ++++++++++++++------
ambari-web/app/routes/add_service_routes.js | 1 +
ambari-web/app/utils/config.js | 18 +++++++++--------
ambari-web/test/controllers/wizard_test.js | 5 ++---
6 files changed, 32 insertions(+), 19 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7bd0a877/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index 0447392..f265c66 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -485,7 +485,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ConfigsLoader, A
}
} else {
var isEditable = self.get('canEdit') && configGroup.get('name') == self.get('selectedConfigGroup.name');
- allConfigs.push(App.config.createCustomGroupConfig(prop, config, configGroup, isEditable));
+ allConfigs.push(App.config.createCustomGroupConfig(prop, config.type, config.properties[prop], configGroup, isEditable));
}
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/7bd0a877/ambari-web/app/controllers/wizard.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard.js b/ambari-web/app/controllers/wizard.js
index ad78d8c..aa82234 100644
--- a/ambari-web/app/controllers/wizard.js
+++ b/ambari-web/app/controllers/wizard.js
@@ -870,11 +870,13 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
installedServiceNamesMap[name] = true;
});
stepController.get('stepConfigs').forEach(function (_content) {
-
if (_content.serviceName === 'YARN') {
_content.set('configs', App.config.textareaIntoFileConfigs(_content.get('configs'), 'capacity-scheduler.xml'));
}
_content.get('configs').forEach(function (_configProperties) {
+ if (!Em.isNone(_configProperties.get('group'))) {
+ return false;
+ }
var configProperty = App.config.createDefaultConfig(
_configProperties.get('name'),
_configProperties.get('serviceName'),
http://git-wip-us.apache.org/repos/asf/ambari/blob/7bd0a877/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index 4b27c76..2515219 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -531,6 +531,9 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
if (Em.isNone(serviceConfigProperty.get('isOverridable'))) {
serviceConfigProperty.set('isOverridable', true);
}
+ if (!Em.isNone(serviceConfigProperty.get('group'))) {
+ serviceConfigProperty.get('group.properties').pushObject(serviceConfigProperty);
+ }
this._updateOverridesForConfig(serviceConfigProperty, component);
this._updateIsEditableFlagForConfig(serviceConfigProperty, defaultGroupSelected);
@@ -1268,23 +1271,28 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
var readyGroup = App.ConfigGroup.create(configGroup);
var wrappedProperties = [];
readyGroup.get('properties').forEach(function (propertyData) {
- var parentSCP = service.configs.filterProperty('filename', propertyData.filename).findProperty('name', propertyData.name);
- var overriddenSCP = App.ServiceConfigProperty.create(parentSCP);
+ var overriddenSCP, parentSCP = service.configs.filterProperty('filename', propertyData.filename).findProperty('name', propertyData.name);
+ if (parentSCP) {
+ overriddenSCP = App.ServiceConfigProperty.create(parentSCP);
+ overriddenSCP.set('parentSCP', parentSCP);
+ } else {
+ overriddenSCP = App.config.createCustomGroupConfig(propertyData.name, propertyData.filename, propertyData.value, readyGroup, true, false);
+ this.get('stepConfigs').findProperty('serviceName', service.serviceName).get('configs').pushObject(overriddenSCP);
+ }
overriddenSCP.set('isOriginalSCP', false);
- overriddenSCP.set('parentSCP', parentSCP);
overriddenSCP.set('group', readyGroup);
overriddenSCP.setProperties(propertyData);
wrappedProperties.pushObject(App.ServiceConfigProperty.create(overriddenSCP));
- });
+ }, this);
wrappedProperties.setEach('group', readyGroup);
readyGroup.set('properties', wrappedProperties);
readyGroup.set('parentConfigGroup', defaultGroup);
serviceGroups.pushObject(readyGroup);
- });
+ }, this);
defaultGroup.set('childConfigGroups', serviceGroups);
serviceGroups.pushObject(defaultGroup);
}
- });
+ }, this);
},
/**
@@ -1365,6 +1373,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
* @method _setOverrides
*/
_setOverrides: function (config, overrides) {
+ if (config.get('group')) return config;
var selectedGroup = this.get('selectedConfigGroup'),
overrideToAdd = this.get('overrideToAdd'),
configOverrides = overrides.filterProperty('name', config.get('name'));
http://git-wip-us.apache.org/repos/asf/ambari/blob/7bd0a877/ambari-web/app/routes/add_service_routes.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/add_service_routes.js b/ambari-web/app/routes/add_service_routes.js
index 85be52b..ea3baa1 100644
--- a/ambari-web/app/routes/add_service_routes.js
+++ b/ambari-web/app/routes/add_service_routes.js
@@ -205,6 +205,7 @@ module.exports = App.WizardRoute.extend({
recommendationsConfigs: null
});
router.get('wizardStep7Controller').set('recommendationsConfigs', null);
+ addServiceController.setDBProperty('serviceConfigGroups', undefined);
router.transitionTo('step4');
});
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/7bd0a877/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index f478fd3..c6620a3 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -858,19 +858,20 @@ App.config = Em.Object.create({
loadServiceConfigGroupOverridesSuccess: function (data, opt, params) {
data.items.forEach(function (config) {
+ var hostOverrideValue, hostOverrideIsFinal;
var group = params.typeTagToGroupMap[config.type + "///" + config.tag];
var properties = config.properties;
for (var prop in properties) {
var fileName = this.getOriginalFileName(config.type);
var serviceConfig = !!params.configKeyToConfigMap[fileName] ? params.configKeyToConfigMap[fileName][prop] : false;
- var hostOverrideValue = this.formatPropertyValue(serviceConfig, properties[prop]);
- var hostOverrideIsFinal = !!(config.properties_attributes && config.properties_attributes.final && config.properties_attributes.final[prop]);
if (serviceConfig) {
// Value of this property is different for this host.
+ hostOverrideValue = this.formatPropertyValue(serviceConfig, properties[prop]);
+ hostOverrideIsFinal = !!(config.properties_attributes && config.properties_attributes.final && config.properties_attributes.final[prop]);
if (!Em.get(serviceConfig, 'overrides')) Em.set(serviceConfig, 'overrides', []);
serviceConfig.overrides.pushObject({value: hostOverrideValue, group: group, isFinal: hostOverrideIsFinal});
} else {
- params.serviceConfigs.push(this.createCustomGroupConfig(prop, config, group));
+ params.serviceConfigs.push(this.createCustomGroupConfig(prop, config.type, config.properties[prop], group));
}
}
}, this);
@@ -882,15 +883,16 @@ App.config = Em.Object.create({
* can be created and assigned to non-default config group.
*
* @param {String} propertyName - name of the property
- * @param {Object} config - config info
+ * @param {String} filename - config filename
+ * @param {String} value - property value
* @param {Em.Object} group - config group to set
* @param {Boolean} isEditable
* @return {Object}
**/
- createCustomGroupConfig: function (propertyName, config, group, isEditable) {
- var propertyObject = this.createDefaultConfig(propertyName, group.get('service.serviceName'), this.getOriginalFileName(config.type), false, {
- savedValue: config.properties[propertyName],
- value: config.properties[propertyName],
+ createCustomGroupConfig: function (propertyName, filename, value, group, isEditable) {
+ var propertyObject = this.createDefaultConfig(propertyName, group.get('service.serviceName'), this.getOriginalFileName(filename), false, {
+ savedValue: value,
+ value: value,
group: group,
isEditable: isEditable !== false,
isOverridable: false
http://git-wip-us.apache.org/repos/asf/ambari/blob/7bd0a877/ambari-web/test/controllers/wizard_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard_test.js b/ambari-web/test/controllers/wizard_test.js
index 035e0ab..ea329d7 100644
--- a/ambari-web/test/controllers/wizard_test.js
+++ b/ambari-web/test/controllers/wizard_test.js
@@ -1004,7 +1004,6 @@ describe('App.WizardController', function () {
isRequiredByAgent: true,
hasInitialValue: true,
isRequired: true,
- group: {name: 'group'},
showLabel: true,
category: 'some_category'
})
@@ -1038,10 +1037,10 @@ describe('App.WizardController', function () {
})
]});
- it('should save configs to content.serviceConfigProperties', function () {
+ it('should save configs from default config group to content.serviceConfigProperties', function () {
c.saveServiceConfigProperties(stepController);
var saved = c.get('content.serviceConfigProperties');
- expect(saved.length).to.equal(2);
+ expect(saved.length).to.equal(1);
expect(saved[0].category).to.equal('some_category');
});
[04/19] ambari git commit: AMBARI-378. Missing values in “Advanced hive-site” section.(vbrodetskyi)
Posted by yu...@apache.org.
AMBARI-378. Missing values in “Advanced hive-site” section.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/90c0293b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/90c0293b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/90c0293b
Branch: refs/heads/2.2.1-maint
Commit: 90c0293bac7c4633e90ebd8a1b7962e0a31d53e4
Parents: a17c7a6
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Feb 1 12:46:12 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Feb 1 12:46:12 2016 +0200
----------------------------------------------------------------------
.../stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml | 2 --
1 file changed, 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/90c0293b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
index fb10b45..cc3e75f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
@@ -1781,7 +1781,6 @@ limitations under the License.
</property>
<property>
<name>hive.server2.authentication.pam.services</name>
- <value></value>
<property-type>DONT_ADD_ON_UPGRADE</property-type>
<depends-on>
<property>
@@ -1792,7 +1791,6 @@ limitations under the License.
</property>
<property>
<name>hive.server2.custom.authentication.class</name>
- <value></value>
<property-type>DONT_ADD_ON_UPGRADE</property-type>
<depends-on>
<property>