You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/01/25 18:56:50 UTC

[01/50] [abbrv] ambari git commit: AMBARI-19647. Issue while submitting workflow as its not able to register as a project (Madhan Mohan Reddy via pallavkul)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade eb2c904e1 -> 551f17b42


AMBARI-19647. Issue while submitting workflow as its not able to register as a project (Madhan Mohan Reddy via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9abe8da6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9abe8da6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9abe8da6

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9abe8da6e582f8a011bec1889d87f2186a79550f
Parents: f4a3dbc
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 23 12:25:22 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 23 12:25:22 2017 +0530

----------------------------------------------------------------------
 contrib/views/wfmanager/pom.xml                     | 1 +
 contrib/views/wfmanager/src/main/resources/view.xml | 6 ++----
 2 files changed, 3 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9abe8da6/contrib/views/wfmanager/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/pom.xml b/contrib/views/wfmanager/pom.xml
index ae01a4f..fff793c 100644
--- a/contrib/views/wfmanager/pom.xml
+++ b/contrib/views/wfmanager/pom.xml
@@ -31,6 +31,7 @@
 		<dependency>
 			<groupId>org.apache.ambari</groupId>
 			<artifactId>ambari-views</artifactId>
+			<scope>provided</scope>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.ambari.contrib.views</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9abe8da6/contrib/views/wfmanager/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/view.xml b/contrib/views/wfmanager/src/main/resources/view.xml
index f2f520d..85cf3e5 100644
--- a/contrib/views/wfmanager/src/main/resources/view.xml
+++ b/contrib/views/wfmanager/src/main/resources/view.xml
@@ -104,14 +104,13 @@
         <required>false</required>
         <cluster-config>core-site/hadoop.security.auth_to_local</cluster-config>
     </parameter>
-<!--
     <parameter>
         <name>webhdfs.username</name>
         <description>doAs for proxy user for HDFS. By default, uses the currently logged-in Ambari user.</description>
         <label>WebHDFS Username</label>
         <default-value>${username}</default-value>
         <required>false</required>
-    </parameter> -->
+    </parameter>
     <parameter>
         <name>webhdfs.auth</name>
         <description>Semicolon-separated authentication configs.</description>
@@ -146,11 +145,10 @@
   </parameter>
 
     <persistence>
-        <!--
         <entity>
              <class>org.apache.oozie.ambari.view.workflowmanager.model.Workflow</class>
             <id-property>id</id-property>
-        </entity> -->
+        </entity>
         <entity>
             <class>org.apache.oozie.ambari.view.assets.model.ActionAssetDefinition</class>
             <id-property>id</id-property>


[11/50] [abbrv] ambari git commit: AMBARI-19672. Alert Popup doesn't execute callback if it's closed without click on "Primary" (onechiporenko)

Posted by nc...@apache.org.
AMBARI-19672. Alert Popup doesn't execute callback if it's closed without click on "Primary" (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aaa99316
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aaa99316
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aaa99316

Branch: refs/heads/branch-dev-patch-upgrade
Commit: aaa99316f42b5f7633bb8b2ace3d2557c73a5500
Parents: cacb1cc
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Mon Jan 23 13:31:45 2017 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Mon Jan 23 15:07:04 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/views/common/modal_popups/alert_popup.js | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aaa99316/ambari-web/app/views/common/modal_popups/alert_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/modal_popups/alert_popup.js b/ambari-web/app/views/common/modal_popups/alert_popup.js
index 413d33c..3364fcb 100644
--- a/ambari-web/app/views/common/modal_popups/alert_popup.js
+++ b/ambari-web/app/views/common/modal_popups/alert_popup.js
@@ -23,19 +23,19 @@ var App = require('app');
  *
  * @param {String} header - header of the popup
  * @param {String} body - body of the popup
- * @param {Function} primary - function to call upon clicking the OK button
+ * @param {Function} callback - function to call upon clicking the OK button, clicking "x" or pressing "Esc"
  * @return {*}
  */
-App.showAlertPopup = function (header, body, primary) {
+App.showAlertPopup = function (header, body, callback) {
   return App.ModalPopup.show({
     primary: Em.I18n.t('ok'),
     secondary: null,
     header: header,
     body: body,
-    onPrimary: function () {
-      this.hide();
-      if (primary) {
-        primary();
+    hide: function () {
+      this._super();
+      if (callback) {
+        callback();
       }
     }
   });


[30/50] [abbrv] ambari git commit: AMBARI-13324 automate creating Flume Keytab and principal (Shi Wang via dili)

Posted by nc...@apache.org.
AMBARI-13324 automate creating Flume Keytab and principal (Shi Wang via dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d77f3a54
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d77f3a54
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d77f3a54

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d77f3a54fcbb79e9a2518a56bb78b0468a8a8b4f
Parents: ad0f4ec
Author: Di Li <di...@apache.org>
Authored: Tue Jan 24 15:19:41 2017 -0500
Committer: Di Li <di...@apache.org>
Committed: Tue Jan 24 15:19:41 2017 -0500

----------------------------------------------------------------------
 .../FLUME/1.4.0.2.0/kerberos.json               | 44 ++++++++++++++++++++
 .../1.4.0.2.0/package/scripts/flume_check.py    |  6 +--
 .../FLUME/1.4.0.2.0/package/scripts/params.py   | 12 +++++-
 .../stacks/2.0.6/FLUME/test_service_check.py    |  1 +
 4 files changed, 59 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d77f3a54/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/kerberos.json
new file mode 100644
index 0000000..ab46912
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/kerberos.json
@@ -0,0 +1,44 @@
+{
+  "services": [
+    {
+      "name": "FLUME",
+      "components": [
+        {
+          "name": "FLUME_HANDLER",
+          "identities": [
+            {
+              "name": "flume_principal",
+              "principal": {
+                "value": "${flume-env/flume_user}/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "flume-env/flume_principal_name",
+                "local_username": "${flume-env/flume_user}"
+
+              },
+              "keytab": {
+                "file": "${keytab_dir}/flume.service.keytab",
+                "owner": {
+                  "name": "${flume-env/flume_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "flume-env/flume_keytab_path"
+              }
+            }
+          ],
+          "configurations": [
+            {
+              "core-site": {
+                "hadoop.proxyuser.flume.groups": "${hadoop-env/proxyuser_group}",
+                "hadoop.proxyuser.flume.hosts": "*"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d77f3a54/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_check.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_check.py
index c5450bb..80f4de2 100644
--- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_check.py
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_check.py
@@ -38,11 +38,11 @@ class FlumeServiceCheck(Script):
     import params
     env.set_params(params)
     if params.security_enabled:
-      principal_replaced = params.http_principal.replace("_HOST", params.hostname)
-      Execute(format("{kinit_path_local} -kt {http_keytab} {principal_replaced}"),
-              user=params.smoke_user)
+      Execute(format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal}"),
+              user=params.smokeuser)
 
     Execute(format('env JAVA_HOME={java_home} {flume_bin} version'),
+            user=params.smokeuser,
             logoutput=True,
             tries = 3,
             try_sleep = 20)

http://git-wip-us.apache.org/repos/asf/ambari/blob/d77f3a54/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
index a44b461..b143941 100644
--- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py
@@ -22,6 +22,7 @@ from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import get_kinit_path
 from ambari_commons.ambari_metrics_helper import select_metric_collector_hosts_from_hostnames
 
 if OSCheck.is_windows_family():
@@ -40,7 +41,11 @@ version = default("/commandParams/version", None)
 user_group = config['configurations']['cluster-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
 
-security_enabled = False
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+if security_enabled :
+    _hostname_lowercase = config['hostname'].lower()
+    flume_jaas_princ = config['configurations']['flume-env']['flume_principal_name']
+    flume_keytab_path = config['configurations']['flume-env']['flume_keytab_path']
 
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
@@ -125,3 +130,8 @@ if not len(default("/clusterHostInfo/zookeeper_hosts", [])) == 0:
   # last port config
   zookeeper_quorum += ':' + zookeeper_clientPort
 
+# smokeuser
+kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d77f3a54/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_service_check.py
index 152d00c..8f59174 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_service_check.py
@@ -35,6 +35,7 @@ class TestFlumeCheck(RMFTestCase):
     )
 
     self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/bin/flume-ng version',
+                              user = 'ambari-qa',
                               logoutput = True,
                               tries = 3,
                               try_sleep = 20)


[47/50] [abbrv] ambari git commit: AMBARI-19710. RU: NFS Gateway restart fails during RU to 2.4.3.0 with portmap error (aonishuk)

Posted by nc...@apache.org.
AMBARI-19710. RU: NFS Gateway restart fails during RU to 2.4.3.0 with portmap error (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1c115bc1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1c115bc1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1c115bc1

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1c115bc184d386f588f601d8f7f517c1fd09658d
Parents: b60faba
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Jan 25 16:52:11 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Jan 25 16:52:11 2017 +0200

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml  | 3 +++
 .../src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml  | 3 +++
 2 files changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1c115bc1/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
index 07a9ef8..8c2ec8b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml
@@ -55,6 +55,9 @@
             <package>
               <name>hadoop_${stack_version}-libhdfs</name>
             </package>
+            <package>
+              <name>libtirpc-devel</name>
+            </package>
           </packages>
         </osSpecific>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1c115bc1/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
index 63e85e3..ccf9a4e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml
@@ -87,6 +87,9 @@
             <package>
               <name>hadoop_${stack_version}-libhdfs</name>
             </package>
+            <package>
+              <name>libtirpc-devel</name>
+            </package>
           </packages>
         </osSpecific>
 


[27/50] [abbrv] ambari git commit: AMBARI-19668. Supporting zookeeper security only from HDP 2.6. (Attila Magyar via stoader)

Posted by nc...@apache.org.
AMBARI-19668. Supporting zookeeper security only from HDP 2.6. (Attila Magyar via stoader)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d0dc19e0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d0dc19e0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d0dc19e0

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d0dc19e0c95000a87884029ddb6c7ab4f3f32fb7
Parents: 75b30a4
Author: Attila Magyar <am...@hortonworks.com>
Authored: Tue Jan 24 16:04:08 2017 +0100
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Tue Jan 24 16:04:24 2017 +0100

----------------------------------------------------------------------
 .../libraries/functions/constants.py            |   1 +
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |   5 -
 .../HDFS/2.1.0.2.0/kerberos.json                |   3 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   5 +
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |  18 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |   7 +-
 .../3.0.0.3.0/package/scripts/zkfc_slave.py     |  18 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |   4 +
 .../4.0.0.2.0/package/scripts/params_linux.py   |   3 +
 .../OOZIE/4.2.0.2.3/kerberos.json               |   3 +-
 .../YARN/2.1.0.2.0/kerberos.json                |   1 -
 .../2.1.0.2.0/package/scripts/params_linux.py   |   1 +
 .../package/scripts/resourcemanager.py          |   5 +-
 .../YARN/3.0.0.3.0/kerberos.json                |   3 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |   9 +-
 .../HDP/2.0.6/properties/stack_features.json    |   5 +
 .../services/HDFS/configuration/hadoop-env.xml  |   5 -
 .../stacks/HDP/2.2/services/YARN/kerberos.json  |   3 +-
 .../HDP/2.3.ECS/services/YARN/kerberos.json     |   3 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   5 -
 .../stacks/HDP/2.3/services/YARN/kerberos.json  |   3 +-
 .../services/HDFS/configuration/hadoop-env.xml  |   5 -
 .../stacks/HDP/2.5/services/HDFS/kerberos.json  |   3 +-
 .../stacks/HDP/2.5/services/YARN/kerberos.json  |   3 +-
 .../services/HDFS/configuration/hadoop-env.xml  | 181 ++++++++++++
 .../stacks/HDP/2.6/services/HDFS/kerberos.json  | 247 ++++++++++++++++
 .../stacks/HDP/2.6/services/OOZIE/kerberos.json |  70 +++++
 .../stacks/HDP/2.6/services/YARN/kerberos.json  | 278 +++++++++++++++++++
 .../HDP/3.0/hooks/before-ANY/scripts/params.py  |   8 +-
 .../HDP/3.0/properties/stack_features.json      |   5 +
 .../services/HDFS/configuration/hadoop-env.xml  |   2 +-
 .../PERF/1.0/properties/stack_features.json     |   5 +
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |   7 -
 .../HDF/2.0/properties/stack_features.json      |   5 +
 34 files changed, 874 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 02ce194..8fd5c8d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -49,6 +49,7 @@ class StackFeature:
   CONFIG_VERSIONING = "config_versioning"
   FALCON_EXTENSIONS = "falcon_extensions"
   DATANODE_NON_ROOT = "datanode_non_root"
+  SECURE_ZOOKEEPER = "secure_zookeeper"
   REMOVE_RANGER_HDFS_PLUGIN_ENV = "remove_ranger_hdfs_plugin_env"
   RANGER = "ranger"
   RANGER_TAGSYNC_COMPONENT = "ranger_tagsync_component"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
index bc64d1f..89d5001 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
@@ -377,11 +377,6 @@ if [ "$command" == "datanode" ] &amp;&amp; [ "$EUID" -eq 0 ] &amp;&amp; [ -n "$H
   ulimit -l {{datanode_max_locked_memory}}
 fi
 {% endif %}
-
-# Enable ACLs on zookeper znodes if required
-{% if hadoop_zkfc_opts is defined %}
-      export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}} $HADOOP_ZKFC_OPTS"
-{% endif %}
     </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json
index ac3b782..1cf1603 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json
@@ -24,8 +24,7 @@
           "core-site": {
             "hadoop.security.authentication": "kerberos",
             "hadoop.security.authorization": "true",
-            "hadoop.proxyuser.HTTP.groups": "${hadoop-env/proxyuser_group}",
-            "ha.zookeeper.acl":"sasl:nn:rwcda"
+            "hadoop.proxyuser.HTTP.groups": "${hadoop-env/proxyuser_group}"
           }
         }
       ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 31431b9..41f44c3 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -75,6 +75,7 @@ version_for_stack_feature_checks = get_stack_feature_version(config)
 
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
+stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user = status_params.hdfs_user
@@ -280,6 +281,9 @@ dfs_ha_automatic_failover_enabled = default("/configurations/hdfs-site/dfs.ha.au
 dfs_ha_namenode_active = default("/configurations/hadoop-env/dfs_ha_initial_namenode_active", None)
 # hostname of the standby HDFS HA Namenode (only used when HA is enabled)
 dfs_ha_namenode_standby = default("/configurations/hadoop-env/dfs_ha_initial_namenode_standby", None)
+ha_zookeeper_quorum = config['configurations']['core-site']['ha.zookeeper.quorum']
+jaas_file = os.path.join(hadoop_conf_secure_dir, 'hdfs_jaas.conf')
+zk_namespace = default('/configurations/hdfs-site/ha.zookeeper.parent-znode', '/hadoop-ha')
 
 # Values for the current Host
 namenode_id = None
@@ -375,6 +379,7 @@ name_node_params = default("/commandParams/namenode", None)
 
 java_home = config['hostLevelParams']['java_home']
 java_version = expect("/hostLevelParams/java_version", int)
+java_exec = format("{java_home}/bin/java")
 
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
 namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
index 69cd2a5..bfc9429 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
@@ -37,6 +37,7 @@ from resource_management.libraries.functions.security_commons import validate_se
 from resource_management.libraries.functions.security_commons import FILE_TYPE_XML
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.script import Script
+from resource_management.core.resources.zkmigrator import ZkMigrator
 
 
 
@@ -62,8 +63,10 @@ class ZkfcSlave(Script):
     env.set_params(params)
     hdfs("zkfc_slave")
 
-    # set up failover /  zookeper ACLs
-    utils.set_up_zkfc_security(params)
+    # set up failover /  zookeper ACLs, this feature is supported from HDP 2.6 ownwards
+    if params.stack_supports_zk_security:
+      utils.set_up_zkfc_security(params)
+
     pass
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
@@ -164,7 +167,16 @@ class ZkfcSlaveDefault(ZkfcSlave):
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
       self.put_structured_out({"securityState": "UNSECURED"})
-      
+
+  def disable_security(self, env):
+    import params
+
+    if not params.stack_supports_zk_security:
+      return
+
+    zkmigrator = ZkMigrator(params.ha_zookeeper_quorum, params.java_exec, params.java_home, params.jaas_file, params.hdfs_user)
+    zkmigrator.set_acls(params.zk_namespace if params.zk_namespace.startswith('/') else '/' + params.zk_namespace, 'world:anyone:crdwa')
+
   def get_log_folder(self):
     import params
     return params.hdfs_log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
index 59ae815..f7aa4c9 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
@@ -72,6 +72,7 @@ version_for_stack_feature_checks = get_stack_feature_version(config)
 
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
+stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
 
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user = status_params.hdfs_user
@@ -277,6 +278,9 @@ dfs_ha_automatic_failover_enabled = default("/configurations/hdfs-site/dfs.ha.au
 dfs_ha_namenode_active = default("/configurations/hadoop-env/dfs_ha_initial_namenode_active", None)
 # hostname of the standby HDFS HA Namenode (only used when HA is enabled)
 dfs_ha_namenode_standby = default("/configurations/hadoop-env/dfs_ha_initial_namenode_standby", None)
+ha_zookeeper_quorum = config['configurations']['core-site']['ha.zookeeper.quorum']
+jaas_file = os.path.join(hadoop_conf_secure_dir, 'hdfs_jaas.conf')
+zk_namespace = default('/configurations/hdfs-site/ha.zookeeper.parent-znode', '/hadoop-ha')
 
 # Values for the current Host
 namenode_id = None
@@ -372,6 +376,7 @@ name_node_params = default("/commandParams/namenode", None)
 
 java_home = config['hostLevelParams']['java_home']
 java_version = expect("/hostLevelParams/java_version", int)
+java_exec = format("{java_home}/bin/java")
 
 hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
 namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
@@ -534,4 +539,4 @@ if enable_ranger_hdfs:
   if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
     xa_audit_db_is_enabled = False
 
-# ranger hdfs plugin section end
\ No newline at end of file
+# ranger hdfs plugin section end

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
index 92e4182..f2ea6ad 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
@@ -39,6 +39,7 @@ from resource_management.libraries.functions.stack_features import check_stack_f
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions.version_select_util import get_component_version
+from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class ZkfcSlave(Script):
   def get_component_name(self):
@@ -62,8 +63,9 @@ class ZkfcSlave(Script):
     env.set_params(params)
     hdfs("zkfc_slave")
 
-    # set up failover /  zookeper ACLs
-    utils.set_up_zkfc_security(params)
+    # set up failover /  zookeper ACLs, this feature is supported from HDP 2.6 ownwards
+    if params.stack_supports_zk_security:
+      utils.set_up_zkfc_security(params)
 
     pass
 
@@ -165,7 +167,17 @@ class ZkfcSlaveDefault(ZkfcSlave):
         self.put_structured_out({"securityState": "UNSECURED"})
     else:
       self.put_structured_out({"securityState": "UNSECURED"})
-      
+
+  def disable_security(self, env):
+    import params
+
+    if not params.stack_supports_zk_security:
+      return
+
+    zkmigrator = ZkMigrator(params.ha_zookeeper_quorum, params.java_exec, params.java_home, params.jaas_file, params.hdfs_user)
+    zkmigrator.set_acls(params.zk_namespace if params.zk_namespace.startswith('/') else '/' + params.zk_namespace, 'world:anyone:crdwa')
+
+
   def get_log_folder(self):
     import params
     return params.hdfs_log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
index 1a34b87..a8b2cf4 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
@@ -196,7 +196,11 @@ class OozieServerDefault(OozieServer):
 
   def disable_security(self, env):
     import params
+    if not params.stack_supports_zk_security:
+      Logger.info("Stack doesn't support zookeeper security")
+      return
     if not params.zk_connection_string:
+      Logger.info("No zookeeper connection string. Skipping reverting ACL")
       return
     zkmigrator = ZkMigrator(params.zk_connection_string, params.java_exec, params.java64_home, params.jaas_file, params.oozie_user)
     zkmigrator.set_acls(params.zk_namespace if params.zk_namespace.startswith('/') else '/' + params.zk_namespace, 'world:anyone:crdwa')

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 48c8ef0..eb438e7 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -32,6 +32,7 @@ from resource_management.libraries.functions.get_lzo_packages import get_lzo_pac
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions.get_architecture import get_architecture
+from resource_management.libraries.functions.stack_features import get_stack_feature_version
 
 from urlparse import urlparse
 
@@ -63,6 +64,7 @@ agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int
 stack_root = status_params.stack_root
 stack_version_unformatted =  status_params.stack_version_unformatted
 stack_version_formatted =  status_params.stack_version_formatted
+version_for_stack_feature_checks = get_stack_feature_version(config)
 
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
@@ -162,6 +164,7 @@ yarn_resourcemanager_address = config['configurations']['yarn-site']['yarn.resou
 zk_namespace = default('/configurations/oozie-site/oozie.zookeeper.namespace', 'oozie')
 zk_connection_string = default('/configurations/oozie-site/oozie.zookeeper.connection.string', None)
 jaas_file = os.path.join(conf_dir, 'zkmigrator_jaas.conf')
+stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
 
 if security_enabled:
   oozie_site = dict(config['configurations']['oozie-site'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/kerberos.json b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/kerberos.json
index f1092f5..d2e2ab8 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/kerberos.json
@@ -20,8 +20,7 @@
             "oozie.service.AuthorizationService.authorization.enabled": "true",
             "oozie.service.HadoopAccessorService.kerberos.enabled": "true",
             "local.realm": "${realm}",
-            "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials,hive2=org.apache.oozie.action.hadoop.Hive2Credentials",
-            "oozie.zookeeper.secure" : "true"
+            "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials,hive2=org.apache.oozie.action.hadoop.Hive2Credentials"
           }
         }
       ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
index c8b5989..7315c09 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/kerberos.json
@@ -31,7 +31,6 @@
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
             "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda",
             "hadoop.registry.secure" : "true",
             "hadoop.registry.system.accounts" : "sasl:yarn,sasl:mapred,sasl:hadoop,sasl:hdfs,sasl:rm"
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 7df82bf..aed8abc 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -256,6 +256,7 @@ nodemanager_kinit_cmd = ""
 rm_zk_address = config['configurations']['yarn-site']['yarn.resourcemanager.zk-address']
 rm_zk_znode = config['configurations']['yarn-site']['yarn.resourcemanager.zk-state-store.parent-path']
 rm_zk_store_class = config['configurations']['yarn-site']['yarn.resourcemanager.store.class']
+stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
 
 if security_enabled:
   rm_principal_name = config['configurations']['yarn-site']['yarn.resourcemanager.principal']

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index 77bd363..a659dd1 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -228,8 +228,11 @@ class ResourcemanagerDefault(Resourcemanager):
 
   def disable_security(self, env):
     import params
+    if not params.stack_supports_zk_security:
+      Logger.info("Stack doesn't support zookeeper security")
+      return
     if not params.rm_zk_address:
-      Logger.info("Skipping reverting ACL")
+      Logger.info("No zookeeper connection string. Skipping reverting ACL")
       return
     zkmigrator = ZkMigrator(
       params.rm_zk_address, \

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
index eaffec6..29cc00a 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/kerberos.json
@@ -31,8 +31,7 @@
             "yarn.resourcemanager.proxyuser.*.groups": "",
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
-            "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda"
+            "yarn.resourcemanager.proxy-user-privileges.enabled": "true"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index d4e505a..8e0e783 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -32,6 +32,9 @@ from resource_management.libraries.functions import format_jvm_option
 from resource_management.libraries.functions.is_empty import is_empty
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 
 
@@ -181,6 +184,9 @@ ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
 zkfc_hosts = default("/clusterHostInfo/zkfc_hosts", [])
 
+# get the correct version to use for checking stack features
+version_for_stack_feature_checks = get_stack_feature_version(config)
+
 
 has_namenode = not len(namenode_host) == 0
 has_ganglia_server = not len(ganglia_server_hosts) == 0
@@ -191,6 +197,7 @@ has_falcon_server_hosts = not len(falcon_server_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 has_zeppelin_master = not len(zeppelin_master_hosts) == 0
 has_zkfc_hosts = not len(zkfc_hosts)== 0
+stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
 
 if has_namenode or dfs_type == 'HCFS':
     hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
@@ -239,5 +246,5 @@ tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]
 override_uid = str(default("/configurations/cluster-env/override_uid", "true")).lower()
 
 # if NN HA on secure clutser, access Zookeper securely
-if has_zkfc_hosts and security_enabled:
+if stack_supports_zk_security and has_zkfc_hosts and security_enabled:
     hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index e4a499b..0fd1766 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -22,6 +22,11 @@
       "min_version": "2.2.0.0"
     },
     {
+      "name": "secure_zookeeper",
+      "description": "Protect ZNodes with SASL acl in secure clusters",
+      "min_version": "2.6.0.0"
+    },
+    {
       "name": "config_versioning",
       "description": "Configurable versions support",
       "min_version": "2.3.0.0"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index ef111e0..5be2b74 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -180,11 +180,6 @@ if [ "$command" == "datanode" ] &amp;&amp; [ "$EUID" -eq 0 ] &amp;&amp; [ -n "$H
   ulimit -l {{datanode_max_locked_memory}}
 fi
 {% endif %}
-
-# Enable ACLs on zookeper znodes if required
-{% if hadoop_zkfc_opts is defined %}
-  export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}}"
-{% endif %}
     </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
index a8ef83c..8618804 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/kerberos.json
@@ -31,8 +31,7 @@
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
             "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-state-store.parent-path": "/rmstore-secure",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda"
+            "yarn.resourcemanager.zk-state-store.parent-path": "/rmstore-secure"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/YARN/kerberos.json
index 3059f14..4c5bcdb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/YARN/kerberos.json
@@ -33,8 +33,7 @@
             "yarn.resourcemanager.proxyuser.*.groups": "",
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
-            "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda"
+            "yarn.resourcemanager.proxy-user-privileges.enabled": "true"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
index 0212ba0..24e0193 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
@@ -156,11 +156,6 @@ if [ "$command" == "datanode" ] &amp;&amp; [ "$EUID" -eq 0 ] &amp;&amp; [ -n "$H
   {% endif %}
   ulimit -n {{hdfs_user_nofile_limit}}
 fi
-
-# Enable ACLs on zookeper znodes if required
-{% if hadoop_zkfc_opts is defined %}
-  export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}}"
-{% endif %}
     </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
index 5fff05c..0e7a5de 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/kerberos.json
@@ -31,8 +31,7 @@
             "yarn.resourcemanager.proxyuser.*.groups": "",
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
-            "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda"
+            "yarn.resourcemanager.proxy-user-privileges.enabled": "true"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
index 0212ba0..24e0193 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
@@ -156,11 +156,6 @@ if [ "$command" == "datanode" ] &amp;&amp; [ "$EUID" -eq 0 ] &amp;&amp; [ -n "$H
   {% endif %}
   ulimit -n {{hdfs_user_nofile_limit}}
 fi
-
-# Enable ACLs on zookeper znodes if required
-{% if hadoop_zkfc_opts is defined %}
-  export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}}"
-{% endif %}
     </value>
     <value-attributes>
       <type>content</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/kerberos.json
index 58942aa..766a014 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/kerberos.json
@@ -24,8 +24,7 @@
           "core-site": {
             "hadoop.security.authentication": "kerberos",
             "hadoop.security.authorization": "true",
-            "hadoop.proxyuser.HTTP.groups": "${hadoop-env/proxyuser_group}",
-            "ha.zookeeper.acl":"sasl:nn:rwcda"
+            "hadoop.proxyuser.HTTP.groups": "${hadoop-env/proxyuser_group}"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/kerberos.json
index eaffec6..29cc00a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/kerberos.json
@@ -31,8 +31,7 @@
             "yarn.resourcemanager.proxyuser.*.groups": "",
             "yarn.resourcemanager.proxyuser.*.hosts": "",
             "yarn.resourcemanager.proxyuser.*.users": "",
-            "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
-            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda"
+            "yarn.resourcemanager.proxy-user-privileges.enabled": "true"
           }
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
new file mode 100644
index 0000000..768ca82
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/configuration/hadoop-env.xml
@@ -0,0 +1,181 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_adding_forbidden="true">
+  <!-- hadoop-env.sh -->
+  <property>
+    <name>content</name>
+    <display-name>hadoop-env template</display-name>
+    <description>This is the jinja template for hadoop-env.sh file</description>
+    <value>
+      # Set Hadoop-specific environment variables here.
+
+      # The only required environment variable is JAVA_HOME.  All others are
+      # optional.  When running a distributed configuration it is best to
+      # set JAVA_HOME in this file, so that it is correctly defined on
+      # remote nodes.
+
+      # The java implementation to use.  Required.
+      export JAVA_HOME={{java_home}}
+      export HADOOP_HOME_WARN_SUPPRESS=1
+
+      # Hadoop home directory
+      export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+
+      # Hadoop Configuration Directory
+
+      {# this is different for HDP1 #}
+      # Path to jsvc required by secure HDP 2.0 datanode
+      export JSVC_HOME={{jsvc_path}}
+
+
+      # The maximum amount of heap to use, in MB. Default is 1000.
+      export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"
+
+      export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms{{namenode_heapsize}}"
+
+      # Extra Java runtime options.  Empty by default.
+      export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}"
+
+      # Command specific options appended to HADOOP_OPTS when specified
+      HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}"
+
+      HADOOP_TASKTRACKER_OPTS="-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}"
+
+      {% if java_version &lt; 8 %}
+      SHARED_HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT"
+      export HADOOP_NAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\"/usr/hdp/current/hadoop-hdfs-namenode/bin/kill-name-node\" -Dorg.mortbay.jetty.Request.maxFormContentSize=-1 ${HADOOP_NAMENODE_OPTS}"
+      export HADOOP_DATANODE_OPTS="-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -XX:PermSize=128m -XX:MaxPermSize=256m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtnode_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly"
+
+      export HADOOP_SECONDARYNAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\"/usr/hdp/current/hadoop-hdfs-secondarynamenode/bin/kill-secondary-name-node\" ${HADOOP_SECONDARYNAMENODE_OPTS}"
+
+      # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+      export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m -XX:MaxPermSize=512m $HADOOP_CLIENT_OPTS"
+
+      {% else %}
+      SHARED_HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT"
+      export HADOOP_NAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\"/usr/hdp/current/hadoop-hdfs-namenode/bin/kill-name-node\" -Dorg.mortbay.jetty.Request.maxFormContentSize=-1 ${HADOOP_NAMENODE_OPTS}"
+      export HADOOP_DATANODE_OPTS="-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtnode_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS} -XX:CMSInitiatingOccupancyFraction=70 -XX:+UseCMSInitiatingOccupancyOnly"
+
+      export HADOOP_SECONDARYNAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOfMemoryError=\"/usr/hdp/current/hadoop-hdfs-secondarynamenode/bin/kill-secondary-name-node\" ${HADOOP_SECONDARYNAMENODE_OPTS}"
+
+      # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+      export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
+      {% endif %}
+
+      HADOOP_NFS3_OPTS="-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}"
+      HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
+
+
+      # On secure datanodes, user to run the datanode as after dropping privileges
+      export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER:-{{hadoop_secure_dn_user}}}
+
+      # Extra ssh options.  Empty by default.
+      export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR"
+
+      # Where log files are stored.  $HADOOP_HOME/logs by default.
+      export HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER
+
+      # History server logs
+      export HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER
+
+      # Where log files are stored in the secure data environment.
+      export HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+      # File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+      # export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+      # host:path where hadoop code should be rsync'd from.  Unset by default.
+      # export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+      # Seconds to sleep between slave commands.  Unset by default.  This
+      # can be useful in large clusters, where, e.g., slave rsyncs can
+      # otherwise arrive faster than the master can service them.
+      # export HADOOP_SLAVE_SLEEP=0.1
+
+      # The directory where pid files are stored. /tmp by default.
+      export HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER
+      export HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+      # History server pid
+      export HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER
+
+      YARN_RESOURCEMANAGER_OPTS="-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY"
+
+      # A string representing this instance of hadoop. $USER by default.
+      export HADOOP_IDENT_STRING=$USER
+
+      # The scheduling priority for daemon processes.  See 'man nice'.
+
+      # export HADOOP_NICENESS=10
+
+      # Add database libraries
+      JAVA_JDBC_LIBS=""
+      if [ -d "/usr/share/java" ]; then
+      for jarFile in `ls /usr/share/java | grep -E "(mysql|ojdbc|postgresql|sqljdbc)" 2&gt;/dev/null`
+      do
+      JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
+      done
+      fi
+
+      # Add libraries to the hadoop classpath - some may not need a colon as they already include it
+      export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}
+
+      # Setting path to hdfs command line
+      export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
+
+      # Mostly required for hadoop 2.0
+      export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
+
+      export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
+
+
+      # Fix temporary bug, when ulimit from conf files is not picked up, without full relogin.
+      # Makes sense to fix only when runing DN as root
+      if [ "$command" == "datanode" ] &amp;&amp; [ "$EUID" -eq 0 ] &amp;&amp; [ -n "$HADOOP_SECURE_DN_USER" ]; then
+      {% if is_datanode_max_locked_memory_set %}
+      ulimit -l {{datanode_max_locked_memory}}
+      {% endif %}
+      ulimit -n {{hdfs_user_nofile_limit}}
+      fi
+
+      # Enable ACLs on zookeper znodes if required
+      {% if hadoop_zkfc_opts is defined %}
+      export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}} $HADOOP_ZKFC_OPTS"
+      {% endif %}
+    </value>
+    <value-attributes>
+      <type>content</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>nfsgateway_heapsize</name>
+    <display-name>NFSGateway maximum Java heap size</display-name>
+    <value>1024</value>
+    <description>Maximum Java heap size for NFSGateway (Java option -Xmx)</description>
+    <value-attributes>
+      <type>int</type>
+      <unit>MB</unit>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/kerberos.json
new file mode 100644
index 0000000..b5acf92
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HDFS/kerberos.json
@@ -0,0 +1,247 @@
+{
+  "services": [
+    {
+      "name": "HDFS",
+      "identities": [
+        {
+          "name": "/spnego",
+          "principal": {
+            "configuration": "hdfs-site/dfs.web.authentication.kerberos.principal"
+          },
+          "keytab": {
+            "configuration": "hdfs-site/dfs.web.authentication.kerberos.keytab"
+          }
+        },
+        {
+          "name": "/smokeuser"
+        }
+      ],
+      "auth_to_local_properties" : [
+        "core-site/hadoop.security.auth_to_local"
+      ],
+      "configurations": [
+        {
+          "core-site": {
+            "hadoop.security.authentication": "kerberos",
+            "hadoop.security.authorization": "true",
+            "hadoop.proxyuser.HTTP.groups": "${hadoop-env/proxyuser_group}",
+            "ha.zookeeper.acl":"sasl:nn:rwcda"
+          }
+        },
+        {
+          "ranger-hdfs-audit": {
+            "xasecure.audit.jaas.Client.loginModuleName": "com.sun.security.auth.module.Krb5LoginModule",
+            "xasecure.audit.jaas.Client.loginModuleControlFlag": "required",
+            "xasecure.audit.jaas.Client.option.useKeyTab": "true",
+            "xasecure.audit.jaas.Client.option.storeKey": "false",
+            "xasecure.audit.jaas.Client.option.serviceName": "solr",
+            "xasecure.audit.destination.solr.force.use.inmemory.jaas.config": "true"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name":  "HDFS_CLIENT",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            }
+          ]
+        },
+        {
+          "name": "NAMENODE",
+          "identities": [
+            {
+              "name": "hdfs",
+              "principal": {
+                "value": "${hadoop-env/hdfs_user}${principal_suffix}@${realm}",
+                "type" : "user" ,
+                "configuration": "hadoop-env/hdfs_principal_name",
+                "local_username" : "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/hdfs.headless.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hadoop-env/hdfs_user_keytab"
+              }
+            },
+            {
+              "name": "namenode_nn",
+              "principal": {
+                "value": "nn/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "hdfs-site/dfs.namenode.kerberos.principal",
+                "local_username" : "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.namenode.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "hdfs-site/dfs.namenode.kerberos.internal.spnego.principal"
+              }
+            },
+            {
+              "name": "/HDFS/NAMENODE/namenode_nn",
+              "principal": {
+                "configuration": "ranger-hdfs-audit/xasecure.audit.jaas.Client.option.principal"
+              },
+              "keytab": {
+                "configuration": "ranger-hdfs-audit/xasecure.audit.jaas.Client.option.keyTab"
+              }
+            }
+          ],
+          "configurations": [
+            {
+              "hdfs-site": {
+                "dfs.block.access.token.enable": "true"
+              }
+            }
+          ]
+        },
+        {
+          "name": "DATANODE",
+          "identities": [
+            {
+              "name": "datanode_dn",
+              "principal": {
+                "value": "dn/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "hdfs-site/dfs.datanode.kerberos.principal",
+                "local_username" : "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/dn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.datanode.keytab.file"
+              }
+            }
+          ],
+          "configurations" : [
+            {
+              "hdfs-site" : {
+                "dfs.datanode.address" : "0.0.0.0:1019",
+                "dfs.datanode.http.address": "0.0.0.0:1022"
+              }
+            }
+          ]
+        },
+        {
+          "name": "SECONDARY_NAMENODE",
+          "identities": [
+            {
+              "name": "secondary_namenode_nn",
+              "principal": {
+                "value": "nn/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "hdfs-site/dfs.secondary.namenode.kerberos.principal",
+                "local_username" : "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.secondary.namenode.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "hdfs-site/dfs.secondary.namenode.kerberos.internal.spnego.principal"
+              }
+            }
+          ]
+        },
+        {
+          "name": "NFS_GATEWAY",
+          "identities": [
+            {
+              "name": "nfsgateway",
+              "principal": {
+                "value": "nfs/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "hdfs-site/nfs.kerberos.principal",
+                "local_username" : "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nfs.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/nfs.keytab.file"
+              }
+            }
+          ]
+        },
+        {
+          "name": "JOURNALNODE",
+          "identities": [
+            {
+              "name": "journalnode_jn",
+              "principal": {
+                "value": "jn/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "hdfs-site/dfs.journalnode.kerberos.principal",
+                "local_username" : "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/jn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.journalnode.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "hdfs-site/dfs.journalnode.kerberos.internal.spnego.principal"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/kerberos.json
new file mode 100644
index 0000000..f1092f5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/OOZIE/kerberos.json
@@ -0,0 +1,70 @@
+{
+  "services": [
+    {
+      "name": "OOZIE",
+      "identities": [
+        {
+          "name": "/spnego"
+        },
+        {
+          "name": "/smokeuser"
+        }
+      ],
+      "auth_to_local_properties" : [
+        "oozie-site/oozie.authentication.kerberos.name.rules"
+      ],
+      "configurations": [
+        {
+          "oozie-site": {
+            "oozie.authentication.type": "kerberos",
+            "oozie.service.AuthorizationService.authorization.enabled": "true",
+            "oozie.service.HadoopAccessorService.kerberos.enabled": "true",
+            "local.realm": "${realm}",
+            "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials,hive2=org.apache.oozie.action.hadoop.Hive2Credentials",
+            "oozie.zookeeper.secure" : "true"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "OOZIE_SERVER",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "oozie_server",
+              "principal": {
+                "value": "oozie/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "oozie-site/oozie.service.HadoopAccessorService.kerberos.principal",
+                "local_username" : "${oozie-env/oozie_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/oozie.service.keytab",
+                "owner": {
+                  "name": "${oozie-env/oozie_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "oozie-site/oozie.service.HadoopAccessorService.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "oozie-site/oozie.authentication.kerberos.principal"
+              },
+              "keytab": {
+                "configuration": "oozie-site/oozie.authentication.kerberos.keytab"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
new file mode 100644
index 0000000..eaffec6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/kerberos.json
@@ -0,0 +1,278 @@
+{
+  "services": [
+    {
+      "name": "YARN",
+      "identities": [
+        {
+          "name": "/spnego"
+        },
+        {
+          "name": "/smokeuser"
+        }
+      ],
+      "configurations": [
+        {
+          "yarn-site": {
+            "yarn.timeline-service.enabled": "true",
+            "yarn.timeline-service.http-authentication.type": "kerberos",
+            "yarn.acl.enable": "true",
+            "yarn.admin.acl": "${yarn-env/yarn_user},dr.who",
+            "yarn.timeline-service.http-authentication.signature.secret": "",
+            "yarn.timeline-service.http-authentication.signature.secret.file": "",
+            "yarn.timeline-service.http-authentication.signer.secret.provider": "",
+            "yarn.timeline-service.http-authentication.signer.secret.provider.object": "",
+            "yarn.timeline-service.http-authentication.token.validity": "",
+            "yarn.timeline-service.http-authentication.cookie.domain": "",
+            "yarn.timeline-service.http-authentication.cookie.path": "",
+            "yarn.timeline-service.http-authentication.proxyuser.*.hosts": "",
+            "yarn.timeline-service.http-authentication.proxyuser.*.users": "",
+            "yarn.timeline-service.http-authentication.proxyuser.*.groups": "",
+            "yarn.timeline-service.http-authentication.kerberos.name.rules": "",
+            "yarn.resourcemanager.proxyuser.*.groups": "",
+            "yarn.resourcemanager.proxyuser.*.hosts": "",
+            "yarn.resourcemanager.proxyuser.*.users": "",
+            "yarn.resourcemanager.proxy-user-privileges.enabled": "true",
+            "yarn.resourcemanager.zk-acl" : "sasl:rm:rwcda"
+          }
+        },
+        {
+          "core-site": {
+            "hadoop.proxyuser.${yarn-env/yarn_user}.groups": "*",
+            "hadoop.proxyuser.${yarn-env/yarn_user}.hosts": "${clusterHostInfo/rm_host}"
+          }
+        },
+        {
+          "capacity-scheduler": {
+            "yarn.scheduler.capacity.root.acl_administer_queue": "${yarn-env/yarn_user}",
+            "yarn.scheduler.capacity.root.default.acl_administer_queue": "${yarn-env/yarn_user}",
+            "yarn.scheduler.capacity.root.acl_administer_jobs": "${yarn-env/yarn_user}",
+            "yarn.scheduler.capacity.root.default.acl_administer_jobs": "${yarn-env/yarn_user}",
+            "yarn.scheduler.capacity.root.default.acl_submit_applications": "${yarn-env/yarn_user}"
+          }
+        },
+        {
+          "ranger-yarn-audit": {
+            "xasecure.audit.jaas.Client.loginModuleName": "com.sun.security.auth.module.Krb5LoginModule",
+            "xasecure.audit.jaas.Client.loginModuleControlFlag": "required",
+            "xasecure.audit.jaas.Client.option.useKeyTab": "true",
+            "xasecure.audit.jaas.Client.option.storeKey": "false",
+            "xasecure.audit.jaas.Client.option.serviceName": "solr",
+            "xasecure.audit.destination.solr.force.use.inmemory.jaas.config": "true"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "NODEMANAGER",
+          "identities": [
+            {
+              "name": "nodemanager_nm",
+              "principal": {
+                "value": "nm/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "yarn-site/yarn.nodemanager.principal",
+                "local_username": "${yarn-env/yarn_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nm.service.keytab",
+                "owner": {
+                  "name": "${yarn-env/yarn_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "yarn-site/yarn.nodemanager.keytab"
+              }
+            },
+            {
+              "name": "/HIVE/HIVE_SERVER/hive_server_hive",
+              "principal": {
+                "configuration": "hive-interactive-site/hive.llap.daemon.service.principal"
+              },
+              "keytab": {
+                "configuration": "hive-interactive-site/hive.llap.daemon.keytab.file"
+              },
+              "when" : {
+                "contains" : ["services", "HIVE"]
+              }
+            },
+            {
+              "name": "llap_zk_hive",
+              "principal": {
+                "value": "hive/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "hive-interactive-site/hive.llap.zk.sm.principal"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/hive.llap.zk.sm.keytab",
+                "owner": {
+                  "name": "${yarn-env/yarn_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": "r"
+                },
+                "configuration": "hive-interactive-site/hive.llap.zk.sm.keytab.file"
+              },
+              "when" : {
+                "contains" : ["services", "HIVE"]
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "yarn-site/yarn.nodemanager.webapp.spnego-principal"
+              },
+              "keytab": {
+                "configuration": "yarn-site/yarn.nodemanager.webapp.spnego-keytab-file"
+              }
+            }
+          ],
+          "configurations": [
+            {
+              "yarn-site": {
+                "yarn.nodemanager.container-executor.class": "org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor"
+              }
+            }
+          ]
+        },
+        {
+          "name": "RESOURCEMANAGER",
+          "identities": [
+            {
+              "name": "resource_manager_rm",
+              "principal": {
+                "value": "rm/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "yarn-site/yarn.resourcemanager.principal",
+                "local_username": "${yarn-env/yarn_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/rm.service.keytab",
+                "owner": {
+                  "name": "${yarn-env/yarn_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "yarn-site/yarn.resourcemanager.keytab"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "yarn-site/yarn.resourcemanager.webapp.spnego-principal"
+              },
+              "keytab": {
+                "configuration": "yarn-site/yarn.resourcemanager.webapp.spnego-keytab-file"
+              }
+            },
+            {
+              "name": "/YARN/RESOURCEMANAGER/resource_manager_rm",
+              "principal": {
+                "configuration": "ranger-yarn-audit/xasecure.audit.jaas.Client.option.principal"
+              },
+              "keytab": {
+                "configuration": "ranger-yarn-audit/xasecure.audit.jaas.Client.option.keyTab"
+              }
+            }
+          ]
+        },
+        {
+          "name": "APP_TIMELINE_SERVER",
+          "identities": [
+            {
+              "name": "app_timeline_server_yarn",
+              "principal": {
+                "value": "yarn/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "yarn-site/yarn.timeline-service.principal",
+                "local_username": "${yarn-env/yarn_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/yarn.service.keytab",
+                "owner": {
+                  "name": "${yarn-env/yarn_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "yarn-site/yarn.timeline-service.keytab"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "yarn-site/yarn.timeline-service.http-authentication.kerberos.principal"
+              },
+              "keytab": {
+                "configuration": "yarn-site/yarn.timeline-service.http-authentication.kerberos.keytab"
+              }
+            },
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            }
+          ]
+        }
+      ]
+    },
+    {
+      "name": "MAPREDUCE2",
+      "identities": [
+        {
+          "name": "/spnego"
+        },
+        {
+          "name": "/smokeuser"
+        }
+      ],
+      "components": [
+        {
+          "name": "HISTORYSERVER",
+          "identities": [
+            {
+              "name": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "history_server_jhs",
+              "principal": {
+                "value": "jhs/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "mapred-site/mapreduce.jobhistory.principal",
+                "local_username": "${mapred-env/mapred_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/jhs.service.keytab",
+                "owner": {
+                  "name": "${mapred-env/mapred_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "mapred-site/mapreduce.jobhistory.keytab"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "mapred-site/mapreduce.jobhistory.webapp.spnego-principal"
+              },
+              "keytab": {
+                "configuration": "mapred-site/mapreduce.jobhistory.webapp.spnego-keytab-file"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
index 74f56a8..8e5d210 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
@@ -33,6 +33,9 @@ from resource_management.libraries.functions.is_empty import is_empty
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from ambari_commons.os_check import OSCheck
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 
@@ -177,6 +180,8 @@ ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
 zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
 zkfc_hosts = default("/clusterHostInfo/zkfc_hosts", [])
 
+# get the correct version to use for checking stack features
+version_for_stack_feature_checks = get_stack_feature_version(config)
 
 has_namenode = not len(namenode_host) == 0
 has_ganglia_server = not len(ganglia_server_hosts) == 0
@@ -187,6 +192,7 @@ has_falcon_server_hosts = not len(falcon_server_hosts) == 0
 has_ranger_admin = not len(ranger_admin_hosts) == 0
 has_zeppelin_master = not len(zeppelin_master_hosts) == 0
 has_zkfc_hosts = not len(zkfc_hosts)== 0
+stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
 
 if has_namenode or dfs_type == 'HCFS':
     hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
@@ -235,5 +241,5 @@ tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]
 override_uid = str(default("/configurations/cluster-env/override_uid", "true")).lower()
 
 # if NN HA on secure clutser, access Zookeper securely
-if has_zkfc_hosts and security_enabled:
+if stack_supports_zk_security and has_zkfc_hosts and security_enabled:
     hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json
index ddf8348..d64598a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json
@@ -22,6 +22,11 @@
       "min_version": "2.2.0.0"
     },
     {
+      "name": "secure_zookeeper",
+      "description": "Protect ZNodes with SASL acl in secure clusters",
+      "min_version": "2.6.0.0"
+    },
+    {
       "name": "config_versioning",
       "description": "Configurable versions support",
       "min_version": "2.3.0.0"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
index 13ef4ba..fbda35a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
@@ -159,7 +159,7 @@
       fi
       # Enable ACLs on zookeper znodes if required
       {% if hadoop_zkfc_opts is defined %}
-      export HADOOP_ZKFC_OPTS={{hadoop_zkfc_opts}}
+      export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}} $HADOOP_ZKFC_OPTS"
       {% endif %}
     </value>
     <value-attributes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_features.json b/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_features.json
index 81640b6..e9e0ed2 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/properties/stack_features.json
@@ -6,6 +6,11 @@
       "min_version": "1.0.0.0"
     },
     {
+      "name": "secure_zookeeper",
+      "description": "Protect ZNodes with SASL acl in secure clusters",
+      "min_version": "2.6.0.0"
+    },
+    {
       "name": "config_versioning",
       "description": "Configurable versions support",
       "min_version": "1.0.0.0"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index aa9e9bc..e952108 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -174,13 +174,6 @@ class TestZkfc(RMFTestCase):
                               owner = 'root',
                               )
 
-    self.assertResourceCalled('File', '/etc/hadoop/conf/secure/hdfs_jaas.conf',
-                              owner='root',
-                              group='root',
-                              mode=0644,
-                              content=Template("hdfs_jaas.conf.j2")
-                              )
-
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/d0dc19e0/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/properties/stack_features.json b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/properties/stack_features.json
index 0b6b3ab..ed066cf 100644
--- a/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/properties/stack_features.json
+++ b/contrib/management-packs/hdf-ambari-mpack/src/main/resources/stacks/HDF/2.0/properties/stack_features.json
@@ -22,6 +22,11 @@
       "min_version": "0.2.0.0"
     },
     {
+      "name": "secure_zookeeper",
+      "description": "Protect ZNodes with SASL acl in secure clusters",
+      "min_version": "2.6.0.0"
+    },
+    {
       "name": "config_versioning",
       "description": "Configurable versions support",
       "min_version": "0.3.0.0"


[23/50] [abbrv] ambari git commit: AMBARI-19645. Log Search: support credential store api - part 1 (oleewere)

Posted by nc...@apache.org.
AMBARI-19645. Log Search: support credential store api - part 1 (oleewere)

Change-Id: I00e5229da73b78dd0da998f947c208cbc631b81b


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9c952c30
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9c952c30
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9c952c30

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9c952c300881623de5911dab06fa24f2a934b1a7
Parents: 7b0ee28
Author: oleewere <ol...@gmail.com>
Authored: Tue Jan 24 00:15:09 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Tue Jan 24 00:30:25 2017 +0100

----------------------------------------------------------------------
 .../apache/ambari/logfeeder/util/SSLUtil.java   | 52 +++++++++++--
 .../src/main/scripts/run.sh                     | 78 ++++++++++----------
 .../apache/ambari/logsearch/util/SSLUtil.java   | 65 ++++++++++++----
 3 files changed, 135 insertions(+), 60 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9c952c30/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
index ea9f45d..80b34e0 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
@@ -21,19 +21,27 @@ package org.apache.ambari.logfeeder.util;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
 
 import java.io.File;
 
 public class SSLUtil {
+  private static final Logger LOG = Logger.getLogger(SSLUtil.class);
+
   private static final String KEYSTORE_LOCATION_ARG = "javax.net.ssl.keyStore";
   private static final String TRUSTSTORE_LOCATION_ARG = "javax.net.ssl.trustStore";
   private static final String KEYSTORE_TYPE_ARG = "javax.net.ssl.keyStoreType";
   private static final String TRUSTSTORE_TYPE_ARG = "javax.net.ssl.trustStoreType";
   private static final String KEYSTORE_PASSWORD_ARG = "javax.net.ssl.keyStorePassword";
   private static final String TRUSTSTORE_PASSWORD_ARG = "javax.net.ssl.trustStorePassword";
+  private static final String KEYSTORE_PASSWORD_PROPERTY_NAME = "logfeeder_keystore_password";
+  private static final String TRUSTSTORE_PASSWORD_PROPERTY_NAME = "logfeeder_truststore_password";
   private static final String KEYSTORE_PASSWORD_FILE = "ks_pass.txt";
   private static final String TRUSTSTORE_PASSWORD_FILE = "ts_pass.txt";
-  
+
+  private static final String CREDENTIAL_STORE_PROVIDER_PATH = "hadoop.security.credential.provider.path";
   private static final String LOGFEEDER_CERT_DEFAULT_FOLDER = "/etc/ambari-logsearch-portal/conf/keys";
   private static final String LOGFEEDER_STORE_DEFAULT_PASSWORD = "bigdata";
   
@@ -66,17 +74,48 @@ public class SSLUtil {
   }
   
   public static void ensureStorePasswords() {
-    ensureStorePassword(KEYSTORE_LOCATION_ARG, KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_FILE);
-    ensureStorePassword(TRUSTSTORE_LOCATION_ARG, TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_FILE);
+    ensureStorePassword(KEYSTORE_LOCATION_ARG, KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_PROPERTY_NAME, KEYSTORE_PASSWORD_FILE);
+    ensureStorePassword(TRUSTSTORE_LOCATION_ARG, TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_PROPERTY_NAME, TRUSTSTORE_PASSWORD_FILE);
   }
   
-  private static void ensureStorePassword(String locationArg, String pwdArg, String pwdFile) {
+  private static void ensureStorePassword(String locationArg, String pwdArg, String propertyName, String fileName) {
     if (StringUtils.isNotEmpty(System.getProperty(locationArg)) && StringUtils.isEmpty(System.getProperty(pwdArg))) {
-      String password = getPasswordFromFile(pwdFile);
+      String password = getPassword(propertyName, fileName);
       System.setProperty(pwdArg, password);
     }
   }
 
+  private static String getPassword(String propertyName, String fileName) {
+    String credentialStorePassword = getPasswordFromCredentialStore(propertyName);
+    if (credentialStorePassword != null) {
+      return credentialStorePassword;
+    }
+    
+    String filePassword = getPasswordFromFile(fileName);
+    if (filePassword != null) {
+      return filePassword;
+    }
+    
+    return LOGFEEDER_STORE_DEFAULT_PASSWORD;
+  }
+  
+  private static String getPasswordFromCredentialStore(String propertyName) {
+    try {
+      String providerPath = LogFeederUtil.getStringProperty(CREDENTIAL_STORE_PROVIDER_PATH);
+      if (providerPath == null) {
+        return null;
+      }
+      
+      Configuration config = new Configuration();
+      config.set(CREDENTIAL_STORE_PROVIDER_PATH, providerPath);
+      char[] passwordChars = config.getPassword(propertyName);
+      return (ArrayUtils.isNotEmpty(passwordChars)) ? new String(passwordChars) : null;
+    } catch (Exception e) {
+      LOG.warn(String.format("Could not load password %s from credential store, using default password", propertyName));
+      return null;
+    }
+  }
+
   private static String getPasswordFromFile(String fileName) {
     try {
       File pwdFile = new File(LOGFEEDER_CERT_DEFAULT_FOLDER, fileName);
@@ -87,7 +126,8 @@ public class SSLUtil {
         return FileUtils.readFileToString(pwdFile);
       }
     } catch (Exception e) {
-      throw new RuntimeException("Exception occurred during read/write password file for keystore/truststore.", e);
+      LOG.warn("Exception occurred during read/write password file for keystore/truststore.", e);
+      return null;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c952c30/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
index 645c5f0..53cd17f 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
@@ -19,49 +19,48 @@ cd `dirname $0`; script_dir=`pwd`; cd $curr_dir
 
 foreground=0
 if [ "$1" = "-foreground" ]; then
-    foreground=1
-    shift
+  foreground=1
+  shift
 fi
 
 if [ ! -z "$LOGFEEDER_INCLUDE" ]; then
-   source $LOGFEEDER_INCLUDE
+  source $LOGFEEDER_INCLUDE
 fi
 
 if [ ! -z "$LOGSEARCH_SOLR_CLIENT_SSL_INCLUDE" ]; then
-   source $LOGSEARCH_SOLR_CLIENT_SSL_INCLUDE
+  source $LOGSEARCH_SOLR_CLIENT_SSL_INCLUDE
 fi
 
 JAVA=java
 if [ -x $JAVA_HOME/bin/java ]; then
-    JAVA=$JAVA_HOME/bin/java
+  JAVA=$JAVA_HOME/bin/java
 fi
 
 if [ "$LOGFEEDER_JAVA_MEM" = "" ]; then
-    LOGFEEDER_JAVA_MEM="-Xmx512m"
+  LOGFEEDER_JAVA_MEM="-Xmx512m"
 fi
 
 if [ "$LOGFILE" = "" ]; then
-    LOGFILE="/var/log/logfeeder/logfeeder.out"
+  LOGFILE="/var/log/logfeeder/logfeeder.out"
 fi
 
 if [ "$PID_FILE" = "" ]; then
-    LOGFEEDER_PID_DIR=$HOME
-    PID_FILE=$LOGFEEDER_PID_DIR/logsearch-logfeeder-$USER.pid
+  LOGFEEDER_PID_DIR=$HOME
+  PID_FILE=$LOGFEEDER_PID_DIR/logsearch-logfeeder-$USER.pid
 fi
 
 if [ "$LOGFEEDER_CONF_DIR" = "" ]; then
-    LOGFEEDER_CONF_DIR="/etc/logfeeder/conf"
-    if [ ! -d $LOGFEEDER_CONF_DIR ]; then
-      if [ -d $script_dir/classes ]; then
-	  LOGFEEDER_CONF_DIR=$script_dir/classes
-      fi
+  LOGFEEDER_CONF_DIR="/etc/logfeeder/conf"
+  if [ ! -d $LOGFEEDER_CONF_DIR ]; then
+    if [ -d $script_dir/classes ]; then
+      LOGFEEDER_CONF_DIR=$script_dir/classes
+    fi
   fi
-
 fi
 
 LOGFEEDER_DEBUG_SUSPEND=${LOGFEEDER_DEBUG_SUSPEND:-n}
 if [ "$LOGFEEDER_DEBUG" = "true" ] && [ ! -z "$LOGFEEDER_DEBUG_PORT" ]; then
-  LOGFEEDER_JAVA_OPTS="$LOGSEARCH_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$LOGFEEDER_DEBUG_PORT,server=y,suspend=$LOGFEEDER_DEBUG_SUSPEND "
+  LOGFEEDER_JAVA_OPTS="$LOGFEEDER_JAVA_OPTS -Xdebug -Xrunjdwp:transport=dt_socket,address=$LOGFEEDER_DEBUG_PORT,server=y,suspend=$LOGFEEDER_DEBUG_SUSPEND "
 fi
 
 LOGFEEDER_GC_LOGFILE=`dirname $LOGFILE`/logfeeder_gc.log
@@ -74,32 +73,31 @@ if [ "$LOGFEEDER_SSL" = "true" ]; then
 fi
 
 if [ $foreground -eq 0 ]; then
-    if [ -f ${PID_FILE} ]; then
-	PID=`cat ${PID_FILE}`
-	if kill -0 $PID 2>/dev/null; then
-	    echo "logfeeder already running (${PID}) killing..."
-	    kill $PID 2>/dev/null
-	    sleep 5
-	    if kill -0 $PID 2>/dev/null; then
-		echo "logfeeder still running. Will kill process forcefully in another 10 seconds..."
-		sleep 10
-		kill -9 $PID 2>/dev/null
-		sleep 2
-	    fi
-	fi
-
-	if kill -0 $PID 2>/dev/null; then
-	    echo "ERROR: Even after all efforts to stop logfeeder, it is still running. pid=$PID. Please manually kill the service and try again."
-	    exit 1
-	fi
+  if [ -f ${PID_FILE} ]; then
+  PID=`cat ${PID_FILE}`
+    if kill -0 $PID 2>/dev/null; then
+      echo "logfeeder already running (${PID}) killing..."
+      kill $PID 2>/dev/null
+      sleep 5
+      if kill -0 $PID 2>/dev/null; then
+        echo "logfeeder still running. Will kill process forcefully in another 10 seconds..."
+        sleep 10
+        kill -9 $PID 2>/dev/null
+        sleep 2
+      fi
     fi
 
-    echo "Starting logfeeder. Output file=$LOGFILE pid_file=$PID_FILE"
-    #LOGFEEDER_CLI_CLASSPATH=
-    #set -x
-    nohup $JAVA -cp "$LOGFEEDER_CLI_CLASSPATH:$LOGFEEDER_CONF_DIR:$script_dir/libs/*:$script_dir/classes" $LOGFEEDER_GC_OPTS $LOGFEEDER_JAVA_MEM $LOGFEEDER_JAVA_OPTS $JMX org.apache.ambari.logfeeder.LogFeeder $* > $LOGFILE 2>&1 &
-    echo $! > $PID_FILE
+    if kill -0 $PID 2>/dev/null; then
+      echo "ERROR: Even after all efforts to stop logfeeder, it is still running. pid=$PID. Please manually kill the service and try again."
+      exit 1
+    fi
+  fi
+
+  echo "Starting logfeeder. Output file=$LOGFILE pid_file=$PID_FILE"
+  #LOGFEEDER_CLI_CLASSPATH=set -x
+  nohup $JAVA -cp "$LOGFEEDER_CLI_CLASSPATH:$LOGFEEDER_CONF_DIR:$script_dir/libs/*:$script_dir/classes" $LOGFEEDER_GC_OPTS $LOGFEEDER_JAVA_MEM $LOGFEEDER_JAVA_OPTS $JMX org.apache.ambari.logfeeder.LogFeeder $* > $LOGFILE 2>&1 &
+  echo $! > $PID_FILE
 else
-    $JAVA -cp "$LOGFEEDER_CLI_CLASSPATH:$LOGFEEDER_CONF_DIR:$script_dir/libs/*:$script_dir/classes" $LOGFEEDER_JAVA_MEM $LOGFEEDER_JAVA_OPTS $JMX org.apache.ambari.logfeeder.LogFeeder $*
+  $JAVA -cp "$LOGFEEDER_CLI_CLASSPATH:$LOGFEEDER_CONF_DIR:$script_dir/libs/*:$script_dir/classes" $LOGFEEDER_JAVA_MEM $LOGFEEDER_JAVA_OPTS $JMX org.apache.ambari.logfeeder.LogFeeder $*
 fi
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c952c30/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
index 2fb4ff3..e0111e7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
@@ -21,8 +21,12 @@ package org.apache.ambari.logsearch.util;
 
 import javax.net.ssl.SSLContext;
 
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.bouncycastle.jce.X509Principal;
 import org.bouncycastle.jce.provider.BouncyCastleProvider;
 import org.bouncycastle.x509.X509V3CertificateGenerator;
@@ -64,9 +68,12 @@ public class SSLUtil {
   private static final String TRUSTSTORE_PASSWORD_ARG = "javax.net.ssl.trustStorePassword";
   private static final String TRUSTSTORE_TYPE_ARG = "javax.net.ssl.trustStoreType";
   private static final String DEFAULT_TRUSTSTORE_TYPE = "JKS";
+  private static final String KEYSTORE_PASSWORD_PROPERTY_NAME = "logsearch_keystore_password";
+  private static final String TRUSTSTORE_PASSWORD_PROPERTY_NAME = "logsearch_truststore_password";
   private static final String KEYSTORE_PASSWORD_FILE = "ks_pass.txt";
   private static final String TRUSTSTORE_PASSWORD_FILE = "ts_pass.txt";
-  
+  private static final String CREDENTIAL_STORE_PROVIDER_PATH = "hadoop.security.credential.provider.path";
+
   private SSLUtil() {
     throw new UnsupportedOperationException();
   }
@@ -104,8 +111,8 @@ public class SSLUtil {
   }
   
   public static SslContextFactory getSslContextFactory() {
-    setPasswordIfSysPropIsEmpty(KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_FILE);
-    setPasswordIfSysPropIsEmpty(TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_FILE);
+    setPasswordIfSysPropIsEmpty(KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_PROPERTY_NAME, KEYSTORE_PASSWORD_FILE);
+    setPasswordIfSysPropIsEmpty(TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_PROPERTY_NAME, TRUSTSTORE_PASSWORD_FILE);
     SslContextFactory sslContextFactory = new SslContextFactory();
     sslContextFactory.setKeyStorePath(getKeyStoreLocation());
     sslContextFactory.setKeyStorePassword(getKeyStorePassword());
@@ -137,20 +144,50 @@ public class SSLUtil {
     }
   }
 
-  private static String getPasswordFromFile(String certFolder, String fileName, String defaultPassword) {
+  private static String getPasswordFromFile(String fileName) {
     try {
-      String pwdFileName = String.format("%s/%s", certFolder, fileName);
-      File pwdFile = new File(pwdFileName);
+      File pwdFile = new File(LOGSEARCH_CERT_DEFAULT_FOLDER, fileName);
       if (!pwdFile.exists()) {
-        FileUtils.writeStringToFile(pwdFile, defaultPassword);
-        return defaultPassword;
+        FileUtils.writeStringToFile(pwdFile, LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD);
+        return LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD;
       } else {
         return FileUtils.readFileToString(pwdFile);
       }
     } catch (Exception e) {
-      String errMsg = "Exception occurred during read/write password file for keystore.";
-      throw new RuntimeException(errMsg, e);
+      LOG.warn("Exception occurred during read/write password file for keystore/truststore.", e);
+      return null;
+    }
+  }
+
+  private static String getPasswordFromCredentialStore(String propertyName) {
+    try {
+      String providerPath = PropertiesHelper.getProperty(CREDENTIAL_STORE_PROVIDER_PATH);
+      if (providerPath == null) {
+        return null;
+      }
+      
+      Configuration config = new Configuration();
+      config.set(CREDENTIAL_STORE_PROVIDER_PATH, providerPath);
+      char[] passwordChars = config.getPassword(propertyName);
+      return (ArrayUtils.isNotEmpty(passwordChars)) ? new String(passwordChars) : null;
+    } catch (Exception e) {
+      LOG.warn(String.format("Could not load password %s from credential store, using default password", propertyName));
+      return null;
+    }
+  }
+
+  private static String getPassword(String propertyName, String fileName) {
+    String credentialStorePassword = getPasswordFromCredentialStore(propertyName);
+    if (credentialStorePassword != null) {
+      return credentialStorePassword;
+    }
+    
+    String filePassword = getPasswordFromFile(fileName);
+    if (filePassword != null) {
+      return filePassword;
     }
+    
+    return LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD;
   }
 
   /**
@@ -200,10 +237,10 @@ public class SSLUtil {
     }
   }
 
-  private static void setPasswordIfSysPropIsEmpty(String prop, String pwdFile) {
-    if (StringUtils.isEmpty(System.getProperty(prop))) {
-      String password = getPasswordFromFile(LOGSEARCH_CERT_DEFAULT_FOLDER, pwdFile, LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD);
-      System.setProperty(prop, password);
+  private static void setPasswordIfSysPropIsEmpty(String pwdArg, String propertyName, String fileName) {
+    if (StringUtils.isEmpty(System.getProperty(pwdArg))) {
+      String password = getPassword(propertyName, fileName);
+      System.setProperty(pwdArg, password);
     }
   }
 


[14/50] [abbrv] ambari git commit: AMBARI-18739. Perf: Create Rolling and Express Upgrade Packs. Another fix of concurrent cp (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-18739. Perf: Create Rolling and Express Upgrade Packs. Another fix of concurrent cp (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9bb27b42
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9bb27b42
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9bb27b42

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9bb27b42b85643b4fae89e54801171abbc3ec2ad
Parents: 203e9fa
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Jan 23 18:00:45 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Jan 23 18:00:45 2017 +0200

----------------------------------------------------------------------
 .../PERF/1.0/hooks/before-INSTALL/scripts/hook.py    | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9bb27b42/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
index 0865ef5..f030cfc 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
@@ -17,6 +17,8 @@ limitations under the License.
 
 """
 import os
+
+from resource_management import ExecutionFailed
 from resource_management.core.resources.system import Directory, File, Execute
 from resource_management.libraries.script import Hook
 
@@ -37,12 +39,17 @@ class BeforeInstallHook(Hook):
     cache_dir = self.extrakt_var_from_pythonpath(AMBARI_AGENT_CACHE_DIR)
     conf_select = os.path.join(cache_dir, CONF_SELECT_PY)
     dist_select = os.path.join(cache_dir, DISTRO_SELECT_PY)
-    if not os.path.exists(CONF_SELECT_DEST):
-      Execute("cp -f %s %s" % (conf_select, CONF_SELECT_DEST), user="root")
+    try:
+      Execute("cp -n %s %s" % (conf_select, CONF_SELECT_DEST), user="root")
       Execute("chmod a+x %s" % (CONF_SELECT_DEST), user="root")
-    if not os.path.exists(DISTRO_SELECT_DEST):
-      Execute("cp -f %s %s" % (dist_select, DISTRO_SELECT_DEST), user="root")
+    except ExecutionFailed:
+      pass   # Due to concurrent execution, may produce error
+
+    try:
+      Execute("cp -n %s %s" % (dist_select, DISTRO_SELECT_DEST), user="root")
       Execute("chmod a+x %s" % (DISTRO_SELECT_DEST), user="root")
+    except ExecutionFailed:
+      pass   # Due to concurrent execution, may produce error
 
   def extrakt_var_from_pythonpath(self, name):
 


[34/50] [abbrv] ambari git commit: AMBARI-19660. Add log rotation settings - handle upgrade scenario - Storm, Hive (Madhuvanthi Radhakrishnan via smohanty)

Posted by nc...@apache.org.
AMBARI-19660. Add log rotation settings - handle upgrade scenario - Storm, Hive (Madhuvanthi Radhakrishnan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/78fefdd4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/78fefdd4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/78fefdd4

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 78fefdd4c327390062d16a15594b32bb816f65cf
Parents: 1b630eb
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Tue Jan 24 14:05:34 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Tue Jan 24 14:08:16 2017 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  | 46 +++++++++++++++++++
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 29 +++++++++++-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 29 ++++++++++++
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 30 ++++++++++++-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |  6 ++-
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |  5 +++
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     | 45 ++++++++++++++++++-
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  | 44 ++++++++++++++++++
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 28 ++++++++++++
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 27 +++++++++++
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |  9 +++-
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |  8 ++++
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  | 47 ++++++++++++++++++++
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 30 ++++++++++++-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |  8 ++++
 15 files changed, 384 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index c9adb8c..efa6e70 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -177,6 +177,21 @@
             <transfer operation="delete" delete-key="atlas.hook.hive.maxThreads" />
           </definition>
 
+          <definition xsi:type="configure" id="hive_log4j_parameterize" summary="Parameterizing Hive Log4J Properties">
+            <type>hive-log4j</type>
+            <set key="hive_log_maxfilesize" value="256"/>
+            <set key = "hive_log_maxbackupindex" value="30"/>
+            <regex-replace key="content" find="#log4j.appender.DRFA.MaxBackupIndex=([0-9]+)" replace-with="#log4j.appender.DRFA.MaxBackupIndex={{hive_log_maxbackupindex}}"/>
+            <replace key="content" find="log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.DRFA.MaxFileSize = {{hive_log_maxfilesize}}MB"/>
+          </definition>
+          <definition xsi:type="configure" id="hive_llap_log4j_parameterize" summary="Parameterizing Hive llap Log4J Properties">
+            <type>llap-daemon-log4j</type>
+            <set key="hive_llap_log_maxfilesize" value="256"/>
+            <set key = "hive_llap_log_maxbackupindex" value="240"/>
+            <regex-replace key="content" find="property.llap.daemon.log.maxfilesize = ([0-9]+)MB" replace-with="property.llap.daemon.log.maxfilesize = {{hive_llap_log_maxfilesize}}MB"/>
+            <regex-replace key="content" find="property.llap.daemon.log.maxbackupindex = ([0-9]+)" replace-with="property.llap.daemon.log.maxbackupindex = {{hive_llap_log_maxbackupindex}}"/>
+          </definition>
+
         </changes>
       </component>
       <component name="WEBHCAT_SERVER">
@@ -198,6 +213,14 @@
             <replace key="templeton.hcat" find="/usr/hdp/current/hive-client" replace-with="/usr/hdp/${hdp.version}/hive"/>
             <set key="templeton.hive.extra.files" value="/usr/hdp/${hdp.version}/tez/conf/tez-site.xml,/usr/hdp/${hdp.version}/tez,/usr/hdp/${hdp.version}/tez/lib"/>
           </definition>
+
+          <definition xsi:type="configure" id="webhcat_log4j_parameterize" summary="Parameterizing Webhcat Log4J Properties">
+            <type>webhcat-log4j</type>
+            <set key="webhcat_log_maxfilesize" value="256"/>
+            <set key = "webhcat_log_maxbackupindex" value="20"/>
+            <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB"/>
+            <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}"/>
+          </definition>
         </changes>
       </component>
     </service>
@@ -547,6 +570,29 @@
                  if-type="storm-site"
                  if-value="15000" />
           </definition>
+
+          <definition xsi:type="configure" id="storm_worker_log4j_parameterize" summary="Parameterizing Storm Worker Log4J Properties">
+            <type>storm-worker-log4j</type>
+            <set key="storm_wrkr_a1_maxfilesize" value="100"/>
+            <set key="storm_wrkr_a1_maxbackupindex" value="9"/>
+            <set key="storm_wrkr_out_maxfilesize" value="100"/>
+            <set key="storm_wrkr_out_maxbackupindex" value="4"/>
+            <set key="storm_wrkr_err_maxfilesize" value="100"/>
+            <set key="storm_wrkr_err_maxbackupindex" value="4"/>
+            <regex-replace key="content" find="A1&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="A1&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_a1_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_a1_maxbackupindex}}"/>
+            <regex-replace key="content" find="STDOUT&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.out&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.out.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="STDOUT&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.out&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.out.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_out_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_out_maxbackupindex}}"/>
+            <regex-replace key="content" find="STDERR&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.err&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.err.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="STDERR&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.err&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.err.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_err_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_err_maxbackupindex}}"/>
+          </definition>
+          <definition xsi:type="configure" id="storm_cluster_log4j_parameterize" summary="Parameterizing Storm Cluster Log4J Properties">
+            <type>storm-cluster-log4j</type>
+            <set key="storm_a1_maxfilesize" value="100"/>
+            <set key="storm_a1_maxbackupindex" value="9"/>
+            <regex-replace key="content" find="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}&quot;&#xA;                 filePattern=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;${sys:storm.log.dir}/${sys:logfile.name}&quot;&#xA;                 filePattern=&quot;${sys:storm.log.dir}/${sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_a1_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_a1_maxbackupindex}}"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index d674af7..619fa5d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -318,6 +318,22 @@
         <task xsi:type="configure" id="hdp_2_4_0_0_webhcat_server_update_configuration_paths"/>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive Log4J Properties">
+        <task xsi:type="configure" id="hive_log4j_parameterize">
+          <summary>Updating the Hive Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive LLAP Log4J Properties">
+        <task xsi:type="configure" id="hive_llap_log4j_parameterize">
+          <summary>Updating the Hive llap Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
+        <task xsi:type="configure" id="webhcat_log4j_parameterize">
+          <summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HIVE" component="HIVE_SERVER" title="Apply config changes for Hive Server">
         <task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
       </execute-stage>
@@ -368,7 +384,18 @@
         <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
       </execute-stage>
 
-     <!--ZOOKEEPER-->
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm worker Log4J Properties">
+        <task xsi:type="configure" id="storm_worker_log4j_parameterize">
+          <summary>Updating the Storm worker Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm cluster Log4J Properties">
+        <task xsi:type="configure" id="storm_cluster_log4j_parameterize">
+          <summary>Updating the Storm cluster Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
+      <!--ZOOKEEPER-->
       <execute-stage service="ZOOKEEPER" component="ZOOKEEPER_SERVER" title="Parameterizing Zookeeper Log4J Properties">
         <task xsi:type="configure" id="zookeeper_log4j_parameterize">
           <summary>Updating the Zookeeper Log4J properties to include parameterizations</summary>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index 7a05c99..b6d98aa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -362,6 +362,24 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_webhcat_server_update_configuration_paths"/>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive Log4J Properties">
+        <task xsi:type="configure" id="hive_log4j_parameterize">
+          <summary>Updating the Hive Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive LLAP Log4J Properties">
+        <task xsi:type="configure" id="hive_llap_log4j_parameterize">
+          <summary>Updating the Hive llap Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
+        <task xsi:type="configure" id="webhcat_log4j_parameterize">
+          <summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HIVE" component="HIVE_SERVER" title="Apply config changes for Hive Server">
         <task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
       </execute-stage>
@@ -552,6 +570,17 @@
         <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
       </execute-stage>
 
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm worker Log4J Properties">
+        <task xsi:type="configure" id="storm_worker_log4j_parameterize">
+          <summary>Updating the Storm worker Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm cluster Log4J Properties">
+        <task xsi:type="configure" id="storm_cluster_log4j_parameterize">
+          <summary>Updating the Storm cluster Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <!--ATLAS-->
       <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Parameterizing Atlas Log4J Properties">
         <task xsi:type="configure" id="atlas_log4j_parameterize">

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index 2c2049b..03fd683 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -364,6 +364,12 @@
         <task xsi:type="configure" id="hdp_2_5_0_0_webhcat_server_update_configuration_paths"/>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
+        <task xsi:type="configure" id="webhcat_log4j_parameterize">
+          <summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HIVE" component="HIVE_SERVER" title="Apply config changes for Hive Server">
         <task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
       </execute-stage>
@@ -383,6 +389,17 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive Log4J Properties">
+        <task xsi:type="configure" id="hive_log4j_parameterize">
+          <summary>Updating the Hive Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive LLAP Log4J Properties">
+        <task xsi:type="configure" id="hive_llap_log4j_parameterize">
+          <summary>Updating the Hive llap Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <!--OOZIE-->
       <execute-stage service="OOZIE" component="OOZIE_SERVER" title="Apply config changes for Oozie Server">
         <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">
@@ -560,7 +577,18 @@
         <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
       </execute-stage>
 
-     <!--ATLAS-->
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm worker Log4J Properties">
+        <task xsi:type="configure" id="storm_worker_log4j_parameterize">
+          <summary>Updating the Storm worker Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm cluster Log4J Properties">
+        <task xsi:type="configure" id="storm_cluster_log4j_parameterize">
+          <summary>Updating the Storm cluster Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
+      <!--ATLAS-->
       <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Parameterizing Atlas Log4J Properties">
         <task xsi:type="configure" id="atlas_log4j_parameterize">
           <summary>Updating the Atlas Log4J properties to include parameterizations</summary>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index a37d171..743dd1b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -754,6 +754,8 @@
       <component name="HIVE_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
+          <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hive_llap_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade/>
@@ -766,6 +768,7 @@
       <component name="WEBHCAT_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_webhcat_server_update_configuration_paths"/>
+          <task xsi:type="configure" id="webhcat_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade/>
@@ -933,7 +936,8 @@
       <component name="NIMBUS">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_3_0_0_remove_empty_storm_topology_submission_notifier_plugin_class"/>
-
+          <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
+          <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
           <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
         </pre-upgrade>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index 47f58bd..8db7669 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -874,6 +874,8 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hive_audit_db" />
+          <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hive_llap_log4j_parameterize" />
 
           <!-- Remove Atlas configs that were incorrectly added to hive-site instead of Atlas' application.properties. -->
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_hive_atlas_configs" />
@@ -893,6 +895,7 @@
       <component name="WEBHCAT_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_webhcat_server_update_configuration_paths"/>
+          <task xsi:type="configure" id="webhcat_log4j_parameterize"/>
         </pre-upgrade>
         
         <pre-downgrade />
@@ -1091,6 +1094,8 @@
             <function>delete_storm_local_data</function>
           </task>
           <task xsi:type="configure" id="hdp_2_5_0_0_add_storm_security_configs" />
+          <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
+          <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 8101682..981947f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -559,6 +559,10 @@
   <processing>
     <service name="ZOOKEEPER">
       <component name="ZOOKEEPER_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="zookeeper_log4j_parameterize" />
+        </pre-upgrade>
+        <pre-downgrade />
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -612,6 +616,7 @@
             <script>scripts/ranger_admin.py</script>
             <function>setup_ranger_java_patches</function>
           </task>
+          <task xsi:type="configure" id="admin_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade/> <!--  no-op to prevent config changes on downgrade -->
@@ -623,6 +628,19 @@
       </component>
 
       <component name="RANGER_USERSYNC">
+        <pre-upgrade>
+          <task xsi:type="configure" id="usersync_log4j_parameterize" />
+        </pre-upgrade>
+        <pre-downgrade />
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+      </component>
+      <component name="RANGER_TAGSYNC">
+        <pre-upgrade>
+          <task xsi:type="configure" id="tagsync_log4j_parameterize" />
+        </pre-upgrade>
+        <pre-downgrade />
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -633,7 +651,7 @@
       <component name="RANGER_KMS_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db" />
-
+          <task xsi:type="configure" id="kms_log4j_parameterize" />
           <task xsi:type="execute" hosts="any" sequential="true">
             <summary>Upgrading Ranger KMS database schema</summary>
             <script>scripts/kms_server.py</script>
@@ -660,6 +678,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_namenode_ha_adjustments"/>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hdfs_audit_db" />
+          <task xsi:type="configure" id="hdfs_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
@@ -739,6 +758,7 @@
             <summary>Calculating Yarn Properties for Spark Shuffle</summary>
           </task>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_yarn_audit_db" />
+          <task xsi:type="configure" id="yarn_log4j_parameterize" />
         </pre-upgrade>
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
 
@@ -772,6 +792,7 @@
           <!-- These HBASE configs changed in HDP 2.3.4.0, but Ambari can't distinguish HDP 2.3.2.0 vs HDP 2.3.4.0, so easier to always do them. -->
           <task xsi:type="configure" id="hdp_2_4_0_0_hbase_remove_local_indexing"/>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db" />
+          <task xsi:type="configure" id="hbase_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
@@ -861,6 +882,8 @@
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.HiveEnvClasspathAction">
             <summary>Update hive-env content</summary>
           </task>
+          <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hive_llap_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade/>
@@ -873,6 +896,7 @@
       <component name="WEBHCAT_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_webhcat_server_update_configuration_paths"/>
+          <task xsi:type="configure" id="webhcat_log4j_parameterize" />
         </pre-upgrade>
         
         <pre-downgrade />
@@ -946,7 +970,7 @@
       <component name="OOZIE_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_4_0_0_oozie_remove_service_classes" />
-
+          <task xsi:type="configure" id="oozie_log4j_parameterize" />
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">
             <summary>Adjusting Oozie properties</summary>
           </task>
@@ -1000,6 +1024,7 @@
       <component name="FALCON_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_falcon_server_adjust_services_property"/>
+          <task xsi:type="configure" id="falcon_log4j_parameterize" />
         </pre-upgrade>
         <pre-downgrade/>
         <upgrade>
@@ -1018,6 +1043,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kafka_audit_db" />
           <task xsi:type="configure" id="hdp_2_5_0_0_add_protocol_compatibility" />
+          <task xsi:type="configure" id="kafka_log4j_parameterize" />
         </pre-upgrade>
         
         <pre-downgrade/>
@@ -1032,6 +1058,8 @@
       <component name="KNOX_GATEWAY">
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_knox_audit_db" />
+          <task xsi:type="configure" id="knox_ldap_log4j_parameterize" />
+          <task xsi:type="configure" id="knox_gateway_log4j_parameterize" />
         </pre-upgrade>
         
         <pre-downgrade /> <!--  no-op to prevent config changes on downgrade -->
@@ -1067,6 +1095,8 @@
             <function>delete_storm_local_data</function>
           </task>
           <task xsi:type="configure" id="hdp_2_5_0_0_add_storm_security_configs" />
+          <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
+          <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade>
@@ -1192,5 +1222,16 @@
         </upgrade>
       </component>
     </service>
+    <service name="ATLAS">
+      <component name="ATLAS_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="atlas_log4j_parameterize" />
+        </pre-upgrade>
+        <pre-downgrade />
+        <upgrade>
+          <task xsi:type="restart-task" />
+        </upgrade>
+      </component>
+    </service>
   </processing>
 </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 4451306..0a6ea5e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -78,6 +78,21 @@
             <transfer operation="delete" delete-key="atlas.hook.hive.maxThreads" />
           </definition>
 
+          <definition xsi:type="configure" id="hive_log4j_parameterize" summary="Parameterizing Hive Log4J Properties">
+            <type>hive-log4j</type>
+            <set key="hive_log_maxfilesize" value="256"/>
+            <set key = "hive_log_maxbackupindex" value="30"/>
+            <regex-replace key="content" find="#log4j.appender.DRFA.MaxBackupIndex=([0-9]+)" replace-with="#log4j.appender.DRFA.MaxBackupIndex={{hive_log_maxbackupindex}}"/>
+            <replace key="content" find="log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.DRFA.MaxFileSize = {{hive_log_maxfilesize}}MB"/>
+          </definition>
+          <definition xsi:type="configure" id="hive_llap_log4j_parameterize" summary="Parameterizing Hive llap Log4J Properties">
+            <type>llap-daemon-log4j</type>
+            <set key="hive_llap_log_maxfilesize" value="256"/>
+            <set key = "hive_llap_log_maxbackupindex" value="240"/>
+            <regex-replace key="content" find="property.llap.daemon.log.maxfilesize = ([0-9]+)MB" replace-with="property.llap.daemon.log.maxfilesize = {{hive_llap_log_maxfilesize}}MB"/>
+            <regex-replace key="content" find="property.llap.daemon.log.maxbackupindex = ([0-9]+)" replace-with="property.llap.daemon.log.maxbackupindex = {{hive_llap_log_maxbackupindex}}"/>
+          </definition>
+
         </changes>
       </component>
 
@@ -91,6 +106,13 @@
             <replace key="templeton.hcat" find="/usr/hdp/current/hive-client" replace-with="/usr/hdp/${hdp.version}/hive"/>
             <set key="templeton.hive.extra.files" value="/usr/hdp/${hdp.version}/tez/conf/tez-site.xml,/usr/hdp/${hdp.version}/tez,/usr/hdp/${hdp.version}/tez/lib"/>
           </definition>
+          <definition xsi:type="configure" id="webhcat_log4j_parameterize" summary="Parameterizing Webhcat Log4J Properties">
+            <type>webhcat-log4j</type>
+            <set key="webhcat_log_maxfilesize" value="256"/>
+            <set key = "webhcat_log_maxbackupindex" value="20"/>
+            <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB"/>
+            <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}"/>
+          </definition>
         </changes>
       </component>
     </service>
@@ -381,6 +403,28 @@
                  if-type="storm-site"
                  if-value="15000" />
           </definition>
+          <definition xsi:type="configure" id="storm_worker_log4j_parameterize" summary="Parameterizing Storm Worker Log4J Properties">
+            <type>storm-worker-log4j</type>
+            <set key="storm_wrkr_a1_maxfilesize" value="100"/>
+            <set key="storm_wrkr_a1_maxbackupindex" value="9"/>
+            <set key="storm_wrkr_out_maxfilesize" value="100"/>
+            <set key="storm_wrkr_out_maxbackupindex" value="4"/>
+            <set key="storm_wrkr_err_maxfilesize" value="100"/>
+            <set key="storm_wrkr_err_maxbackupindex" value="4"/>
+            <regex-replace key="content" find="A1&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="A1&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_a1_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_a1_maxbackupindex}}"/>
+            <regex-replace key="content" find="STDOUT&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.out&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.out.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="STDOUT&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.out&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.out.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_out_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_out_maxbackupindex}}"/>
+            <regex-replace key="content" find="STDERR&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.err&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.err.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="STDERR&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.err&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.err.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_err_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_err_maxbackupindex}}"/>
+          </definition>
+          <definition xsi:type="configure" id="storm_cluster_log4j_parameterize" summary="Parameterizing Storm Cluster Log4J Properties">
+            <type>storm-cluster-log4j</type>
+            <set key="storm_a1_maxfilesize" value="100"/>
+            <set key="storm_a1_maxbackupindex" value="9"/>
+            <regex-replace key="content" find="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}&quot;&#xA;                 filePattern=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                           replace-with="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;${sys:storm.log.dir}/${sys:logfile.name}&quot;&#xA;                 filePattern=&quot;${sys:storm.log.dir}/${sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_a1_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_a1_maxbackupindex}}"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index 1e4da76..d41c4eb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -423,6 +423,22 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive Log4J Properties">
+        <task xsi:type="configure" id="hive_log4j_parameterize">
+          <summary>Updating the Hive Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive LLAP Log4J Properties">
+        <task xsi:type="configure" id="hive_llap_log4j_parameterize">
+          <summary>Updating the Hive llap Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
+        <task xsi:type="configure" id="webhcat_log4j_parameterize">
+          <summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <!-- HBASE -->
       <execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Hbase Master">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db"/>
@@ -479,6 +495,18 @@
         <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
       </execute-stage>
 
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm worker Log4J Properties">
+        <task xsi:type="configure" id="storm_worker_log4j_parameterize">
+          <summary>Updating the Storm worker Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm cluster Log4J Properties">
+        <task xsi:type="configure" id="storm_cluster_log4j_parameterize">
+          <summary>Updating the Storm cluster Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
+
       <!-- KAFKA -->
       <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Apply config changes for Kafka Broker">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kafka_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index 647cb45..e5c51ad 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -424,6 +424,22 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive Log4J Properties">
+        <task xsi:type="configure" id="hive_log4j_parameterize">
+          <summary>Updating the Hive Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive LLAP Log4J Properties">
+        <task xsi:type="configure" id="hive_llap_log4j_parameterize">
+          <summary>Updating the Hive llap Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
+        <task xsi:type="configure" id="webhcat_log4j_parameterize">
+          <summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <!-- HBASE -->
       <execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Hbase Master">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db"/>
@@ -479,6 +495,17 @@
         <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
       </execute-stage>
 
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm worker Log4J Properties">
+        <task xsi:type="configure" id="storm_worker_log4j_parameterize">
+          <summary>Updating the Storm worker Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm cluster Log4J Properties">
+        <task xsi:type="configure" id="storm_cluster_log4j_parameterize">
+          <summary>Updating the Storm cluster Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <!-- KAFKA -->
       <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Apply config changes for Kafka Broker">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kafka_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index fac26de..d2c6ff5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -875,7 +875,8 @@
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.HiveEnvClasspathAction">
             <summary>Update hive-env content</summary>
           </task>
-
+          <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hive_llap_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade/>
@@ -886,6 +887,10 @@
       </component>
 
       <component name="WEBHCAT_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="webhcat_log4j_parameterize" />
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -1042,6 +1047,8 @@
           <task xsi:type="configure" id="hdp_2_5_0_0_add_storm_security_configs" />
           <task xsi:type="configure" id="hdp_2_4_0_0_remove_empty_storm_topology_submission_notifier_plugin_class"/>
           <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
+          <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
+          <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index d75cb24..8b463dd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -880,6 +880,8 @@
           <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.HiveEnvClasspathAction">
             <summary>Update hive-env content</summary>
           </task>
+          <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hive_llap_log4j_parameterize" />
 
         </pre-upgrade>
 
@@ -891,6 +893,10 @@
       </component>
 
       <component name="WEBHCAT_SERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="webhcat_log4j_parameterize" />
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
@@ -1047,6 +1053,8 @@
           <task xsi:type="configure" id="hdp_2_5_0_0_add_storm_security_configs" />
           <task xsi:type="configure" id="hdp_2_4_0_0_remove_empty_storm_topology_submission_notifier_plugin_class"/>
           <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
+          <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
+          <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
         </pre-upgrade>
 
         <pre-downgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index 73e3c38..ddffc79 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -41,6 +41,28 @@
                  if-type="storm-site"
                  if-value="15000" />
           </definition>
+          <definition xsi:type="configure" id="storm_worker_log4j_parameterize" summary="Parameterizing Storm Worker Log4J Properties">
+            <type>storm-worker-log4j</type>
+            <set key="storm_wrkr_a1_maxfilesize" value="100"/>
+            <set key="storm_wrkr_a1_maxbackupindex" value="9"/>
+            <set key="storm_wrkr_out_maxfilesize" value="100"/>
+            <set key="storm_wrkr_out_maxbackupindex" value="4"/>
+            <set key="storm_wrkr_err_maxfilesize" value="100"/>
+            <set key="storm_wrkr_err_maxbackupindex" value="4"/>
+            <regex-replace key="content" find="A1&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                                         replace-with="A1&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_a1_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_a1_maxbackupindex}}"/>
+            <regex-replace key="content" find="STDOUT&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.out&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.out.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                                         replace-with="STDOUT&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.out&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.out.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_out_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_out_maxbackupindex}}"/>
+            <regex-replace key="content" find="STDERR&quot;&#xA;&#009;&#009;fileName=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.err&quot;&#xA;&#009;&#009;filePattern=&quot;\$\{sys:workers.artifacts}/\$\{sys:storm.id}/\$\{sys:worker.port}/\$\{sys:logfile.name}.err.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                                         replace-with="STDERR&quot;&#xA;&#009;&#009;fileName=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.err&quot;&#xA;&#009;&#009;filePattern=&quot;${sys:workers.artifacts}/${sys:storm.id}/${sys:worker.port}/${sys:logfile.name}.err.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${patternNoTime}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_wrkr_err_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_wrkr_err_maxbackupindex}}"/>
+          </definition>
+          <definition xsi:type="configure" id="storm_cluster_log4j_parameterize" summary="Parameterizing Storm Cluster Log4J Properties">
+            <type>storm-cluster-log4j</type>
+            <set key="storm_a1_maxfilesize" value="100"/>
+            <set key="storm_a1_maxbackupindex" value="9"/>
+            <regex-replace key="content" find="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}&quot;&#xA;                 filePattern=&quot;\$\{sys:storm.log.dir}/\$\{sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;\$\{pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;(?:[0-9]+) MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;([0-9]+)"
+                                         replace-with="A1&quot; immediateFlush=&quot;false&quot;&#xA;                 fileName=&quot;${sys:storm.log.dir}/${sys:logfile.name}&quot;&#xA;                 filePattern=&quot;${sys:storm.log.dir}/${sys:logfile.name}.%i.gz&quot;&gt;&#xA;        &lt;PatternLayout&gt;&#xA;            &lt;pattern&gt;${pattern}&lt;/pattern&gt;&#xA;        &lt;/PatternLayout&gt;&#xA;        &lt;Policies&gt;&#xA;            &lt;SizeBasedTriggeringPolicy size=&quot;{{storm_a1_maxfilesize}} MB&quot;/&gt; &lt;!-- Or every 100 MB --&gt;&#xA;        &lt;/Policies&gt;&#xA;        &lt;DefaultRolloverStrategy max=&quot;{{storm_a1_maxbackupindex}}"/>
+          </definition>
         </changes>
       </component>
     </service>
@@ -267,6 +289,20 @@
             <type>tez-site</type>
             <insert key="tez.task.launch.cmd-opts" value="{{heap_dump_opts}}" insert-type="append" newline-before="false" newline-after="false" />
           </definition>
+          <definition xsi:type="configure" id="hive_log4j_parameterize" summary="Parameterizing Hive Log4J Properties">
+            <type>hive-log4j</type>
+            <set key="hive_log_maxfilesize" value="256"/>
+            <set key = "hive_log_maxbackupindex" value="30"/>
+            <regex-replace key="content" find="#log4j.appender.DRFA.MaxBackupIndex=([0-9]+)" replace-with="#log4j.appender.DRFA.MaxBackupIndex={{hive_log_maxbackupindex}}"/>
+            <replace key="content" find="log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.DRFA.MaxFileSize = {{hive_log_maxfilesize}}MB"/>
+          </definition>
+          <definition xsi:type="configure" id="hive_llap_log4j_parameterize" summary="Parameterizing Hive llap Log4J Properties">
+            <type>llap-daemon-log4j</type>
+            <set key="hive_llap_log_maxfilesize" value="256"/>
+            <set key = "hive_llap_log_maxbackupindex" value="240"/>
+            <regex-replace key="content" find="property.llap.daemon.log.maxfilesize = ([0-9]+)MB" replace-with="property.llap.daemon.log.maxfilesize = {{hive_llap_log_maxfilesize}}MB"/>
+            <regex-replace key="content" find="property.llap.daemon.log.maxbackupindex = ([0-9]+)" replace-with="property.llap.daemon.log.maxbackupindex = {{hive_llap_log_maxbackupindex}}"/>
+          </definition>
         </changes>
       </component>
       <component name="HIVE_SERVER_INTERACTIVE">
@@ -281,6 +317,17 @@
           </definition>
         </changes>
       </component>
+      <component name = "WEBHCAT_SERVER">
+        <changes>
+          <definition xsi:type="configure" id="webhcat_log4j_parameterize" summary="Parameterizing Webhcat Log4J Properties">
+            <type>webhcat-log4j</type>
+            <set key="webhcat_log_maxfilesize" value="256"/>
+            <set key = "webhcat_log_maxbackupindex" value="20"/>
+            <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB"/>
+            <replace key="content" find="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard  =  org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}"/>
+          </definition>
+        </changes>
+      </component>
     </service>
 
   </services>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 6c4da04..ba33a7d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -401,7 +401,35 @@
       <execute-stage service="STORM" component="NIMBUS" title="Apply config changes for Nimbus">
         <task xsi:type="configure" id="increase_storm_zookeeper_timeouts"/>
       </execute-stage>
-      
+
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm worker Log4J Properties">
+        <task xsi:type="configure" id="storm_worker_log4j_parameterize">
+          <summary>Updating the Storm worker Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="STORM" component="NIMBUS" title="Parameterizing Storm cluster Log4J Properties">
+        <task xsi:type="configure" id="storm_cluster_log4j_parameterize">
+          <summary>Updating the Storm cluster Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
+      <!--HIVE-->
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive Log4J Properties">
+        <task xsi:type="configure" id="hive_log4j_parameterize">
+          <summary>Updating the Hive Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive LLAP Log4J Properties">
+        <task xsi:type="configure" id="hive_llap_log4j_parameterize">
+          <summary>Updating the Hive llap Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+      <execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
+        <task xsi:type="configure" id="webhcat_log4j_parameterize">
+          <summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
+        </task>
+      </execute-stage>
+
       <execute-stage service="HIVE" component="HIVE_SERVER" title="Appending heap dump options for Hive">
         <task xsi:type="configure" id="hdp_2_6_0_0_hive_append_heap_dump_options"/>
       </execute-stage>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78fefdd4/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 7f9e986..790e50c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -767,6 +767,8 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_6_0_0_tez_append_heap_dump_options"/>
           <task xsi:type="configure" id="hdp_2_6_0_0_hive_append_heap_dump_options"/>
+          <task xsi:type="configure" id="hive_log4j_parameterize" />
+          <task xsi:type="configure" id="hive_llap_log4j_parameterize" />
         </pre-upgrade>
         
         <pre-downgrade />
@@ -780,6 +782,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_6_0_0_hive_llap_append_java_heap_dump_options"/>
           <task xsi:type="configure" id="hdp_2_6_0_0_hive_llap_append_heap_dump_options"/>
+          <task xsi:type="configure" id="webhcat_log4j_parameterize" />
         </pre-upgrade>
         
         <pre-downgrade />
@@ -927,6 +930,11 @@
 
     <service name="STORM">
       <component name="NIMBUS">
+        <pre-upgrade>
+          <task xsi:type="configure" id="storm_worker_log4j_parameterize" />
+          <task xsi:type="configure" id="storm_cluster_log4j_parameterize" />
+        </pre-upgrade>
+        <pre-downgrade/>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>


[18/50] [abbrv] ambari git commit: AMBARI-19676. ZKFC start failed (aonishuk)

Posted by nc...@apache.org.
AMBARI-19676. ZKFC start failed (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/796658f6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/796658f6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/796658f6

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 796658f679a8176a62445211234426f02172cdbd
Parents: b2d78ac
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Jan 23 19:54:05 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Jan 23 19:54:05 2017 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml      | 2 +-
 .../stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml      | 2 +-
 .../stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml      | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/796658f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 114c965..ef111e0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -183,7 +183,7 @@ fi
 
 # Enable ACLs on zookeper znodes if required
 {% if hadoop_zkfc_opts is defined %}
-  export HADOOP_ZKFC_OPTS={{hadoop_zkfc_opts}}
+  export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}}"
 {% endif %}
     </value>
     <value-attributes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/796658f6/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
index 6d9eaf0..0212ba0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
@@ -159,7 +159,7 @@ fi
 
 # Enable ACLs on zookeper znodes if required
 {% if hadoop_zkfc_opts is defined %}
-  export HADOOP_ZKFC_OPTS={{hadoop_zkfc_opts}}
+  export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}}"
 {% endif %}
     </value>
     <value-attributes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/796658f6/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
index 6d9eaf0..0212ba0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
@@ -159,7 +159,7 @@ fi
 
 # Enable ACLs on zookeper znodes if required
 {% if hadoop_zkfc_opts is defined %}
-  export HADOOP_ZKFC_OPTS={{hadoop_zkfc_opts}}
+  export HADOOP_ZKFC_OPTS="{{hadoop_zkfc_opts}}"
 {% endif %}
     </value>
     <value-attributes>


[48/50] [abbrv] ambari git commit: AMBARI-19695. Remove thrown exceptions while validating Kerberos Authentication configuration (echekanskiy via dlysnichenko)

Posted by nc...@apache.org.
AMBARI-19695. Remove thrown exceptions while validating Kerberos Authentication configuration (echekanskiy via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e5552301
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e5552301
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e5552301

Branch: refs/heads/branch-dev-patch-upgrade
Commit: e55523012a47727116657ce61ec681260b282b7e
Parents: 1c115bc
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Wed Jan 25 17:07:40 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Wed Jan 25 17:09:19 2017 +0200

----------------------------------------------------------------------
 .../ambari/server/configuration/Configuration.java      |  6 ++----
 .../ambari/server/configuration/ConfigurationTest.java  | 12 ------------
 2 files changed, 2 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e5552301/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 73c70dc..0c51576 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -5892,16 +5892,14 @@ public class Configuration {
       File keytabFile = new File(kerberosAuthProperties.getSpnegoKeytabFilePath());
       if (!keytabFile.exists()) {
         String message = String.format("The SPNEGO keytab file path (%s) specified in %s does not exist. " +
-                "This will cause issues authenticating users using Kerberos.",
+                "This will cause issues authenticating users using Kerberos. . Make sure proper keytab file provided later.",
             keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
         LOG.error(message);
-        throw new IllegalArgumentException(message);
       } else if (!keytabFile.canRead()) {
         String message = String.format("The SPNEGO keytab file path (%s) specified in %s cannot be read. " +
-                "This will cause issues authenticating users using Kerberos.",
+                "This will cause issues authenticating users using Kerberos. . Make sure proper keytab file provided later.",
             keytabFile.getAbsolutePath(), KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey());
         LOG.error(message);
-        throw new IllegalArgumentException(message);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e5552301/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
index d7cb8a3..51114f8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/configuration/ConfigurationTest.java
@@ -988,18 +988,6 @@ public class ConfigurationTest {
     new Configuration(properties);
   }
 
-  @Test(expected = IllegalArgumentException.class)
-  public void testKerberosAuthenticationSPNEGOKeytabFileNotFound() {
-    Properties properties = new Properties();
-    properties.put(Configuration.KERBEROS_AUTH_ENABLED.getKey(), "true");
-    properties.put(Configuration.KERBEROS_AUTH_SPNEGO_KEYTAB_FILE.getKey(), "/path/to/missing/spnego/keytab/file");
-    properties.put(Configuration.KERBEROS_AUTH_SPNEGO_PRINCIPAL.getKey(), "spnego/principal@REALM");
-    properties.put(Configuration.KERBEROS_AUTH_USER_TYPES.getKey(), "LDAP, LOCAL");
-    properties.put(Configuration.KERBEROS_AUTH_AUTH_TO_LOCAL_RULES.getKey(), "DEFAULT");
-
-    new Configuration(properties);
-  }
-
   /**
    * Tests the default values for the {@link MetricsRetrievalService}.
    *


[16/50] [abbrv] ambari git commit: AMBARI-19677 - Storm service check failed after HOU to HDP-2.5.3 due to CNF for StormAtlasHook (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-19677 - Storm service check failed after HOU to HDP-2.5.3 due to CNF for StormAtlasHook (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/918fd597
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/918fd597
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/918fd597

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 918fd597bc19958ed16f45b8bfce63debd325fd4
Parents: 8a64be4
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Jan 23 09:54:15 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Jan 23 11:39:53 2017 -0500

----------------------------------------------------------------------
 .../libraries/functions/setup_atlas_hook.py     | 14 ++++++-
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |  2 +-
 .../0.1.0.2.3/package/scripts/status_params.py  |  3 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |  6 +--
 .../0.5.0.2.1/package/scripts/params_linux.py   |  4 +-
 .../STORM/0.9.1/package/scripts/params_linux.py |  2 +-
 .../stacks/2.3/ATLAS/test_metadata_server.py    | 40 ++++++++++----------
 .../stacks/2.5/ATLAS/test_atlas_server.py       | 16 ++++----
 8 files changed, 47 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
index eeae4bc..367afc8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
@@ -24,9 +24,11 @@ __all__ = ["has_atlas_in_cluster", "setup_atlas_hook", "setup_atlas_jar_symlinks
 import os
 
 # Local Imports
+from resource_management.libraries.functions import stack_features
 from resource_management.libraries.resources.properties_file import PropertiesFile
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.script import Script
 from resource_management.core.resources.system import Link
 from resource_management.core.resources.packaging import Package
 from resource_management.core.logger import Logger
@@ -159,11 +161,19 @@ def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
   """
   import params
 
-  atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ \
-    else format("{stack_root}/current/atlas-server")
+  stack_root = Script.get_stack_root()
+  atlas_home_dir = os.path.join(stack_root, "current", "atlas-server")
+
+  # if this is an upgrade/downagrade, then we must link in the correct version
+  # which may not be "current", so change the home directory location
+  upgrade_type = Script.get_upgrade_type(default("/commandParams/upgrade_type", ""))
+  if upgrade_type is not None:
+    version_dir_segment = stack_features.get_stack_feature_version(Script.get_config())
+    atlas_home_dir = os.path.join(stack_root, version_dir_segment, "atlas")
 
   # Will only exist if this host contains Atlas Server
   atlas_hook_dir = os.path.join(atlas_home_dir, "hook", hook_name)
+
   if os.path.exists(atlas_hook_dir):
     Logger.info("Atlas Server is present on this host, will symlink jars inside of %s to %s if not already done." %
                 (jar_source_dir, atlas_hook_dir))

http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index c74d046..afd6dde 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -94,7 +94,7 @@ version = default("/commandParams/version", None)
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 
-metadata_home = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else format('{stack_root}/current/atlas-server')
+metadata_home = format('{stack_root}/current/atlas-server')
 metadata_bin = format("{metadata_home}/bin")
 
 python_binary = os.environ['PYTHON_EXE'] if 'PYTHON_EXE' in os.environ else sys.executable

http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/status_params.py
index 1fd1b9b..852a9cb 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/status_params.py
@@ -30,6 +30,7 @@ from resource_management.libraries.functions import StackFeature
 
 
 config = Script.get_config()
+stack_root = Script.get_stack_root()
 
 default_conf_file = "application.properties"
 
@@ -40,7 +41,7 @@ if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, version_for_stack_fea
   default_conf_file = "atlas-application.properties"
 
 conf_file = default("/configurations/atlas-env/metadata_conf_file", default_conf_file)
-conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf'
+conf_dir = format("{stack_root}/current/atlas-server/conf")
 pid_dir = default("/configurations/atlas-env/metadata_pid_dir", "/var/run/atlas")
 pid_file = format("{pid_dir}/atlas.pid")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index 70caaea..c9c66ac 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -23,24 +23,20 @@ import traceback
 # Local Imports
 from resource_management.core.environment import Environment
 from resource_management.core.source import InlineTemplate
-from resource_management.core.source import Template
 from resource_management.core.source import  DownloadSource
 from resource_management.core.resources import Execute
 from resource_management.core.resources.service import Service
 from resource_management.core.resources.service import ServiceConfig
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.system import File
-from resource_management.libraries.functions import get_user_call_output
 from resource_management.libraries.script import Script
 from resource_management.libraries.resources import PropertiesFile
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.show_logs import show_logs
 from resource_management.libraries.functions import get_user_call_output
-from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster, setup_atlas_hook, install_atlas_hook_packages, setup_atlas_jar_symlinks
-from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster, setup_atlas_hook, install_atlas_hook_packages, setup_atlas_jar_symlinks
+from resource_management.libraries.functions.setup_atlas_hook import setup_atlas_hook, install_atlas_hook_packages, setup_atlas_jar_symlinks
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from ambari_commons.constants import SERVICE
 from resource_management.core.logger import Logger

http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index d667d9f..3773918 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -161,8 +161,8 @@ if enable_atlas_hook:
       or check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT, stack_version_formatted)
 
   if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, stack_version_formatted):
-    atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else format('{stack_root}/current/atlas-server/conf')
-    atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else format('{stack_root}/current/atlas-server')
+    atlas_conf_dir = format('{stack_root}/current/atlas-server/conf')
+    atlas_home_dir = format('{stack_root}/current/atlas-server')
     atlas_hook_cp = atlas_conf_dir + os.pathsep + os.path.join(atlas_home_dir, "hook", "falcon", "*") + os.pathsep
   elif check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, stack_version_formatted):
     atlas_hook_cp = format('{stack_root}/current/atlas-client/hook/falcon/*') + os.pathsep

http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
index 137f29a..a176456 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
@@ -222,7 +222,7 @@ atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'a
 if enable_atlas_hook:
   # Only append /etc/atlas/conf to classpath if on HDP 2.4.*
   if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, stack_version_formatted):
-    atlas_conf_dir = os.environ['METADATA_CONF'] if 'METADATA_CONF' in os.environ else '/etc/atlas/conf'
+    atlas_conf_dir = format('{stack_root}/current/atlas-server/conf')
     jar_jvm_opts += '-Datlas.conf=' + atlas_conf_dir
 #endregion
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
index e97759b..8ae6469 100644
--- a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
@@ -32,7 +32,7 @@ class TestMetadataServer(RMFTestCase):
 
   def configureResourcesCalled(self):
       # Both server and client
-      self.assertResourceCalled('Directory', '/etc/atlas/conf',
+      self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/conf',
                                 owner='atlas',
                                 group='hadoop',
                                 create_parents = True,
@@ -48,7 +48,7 @@ class TestMetadataServer(RMFTestCase):
                                 cd_access='a',
                                 mode=0755
       )
-      self.assertResourceCalled('Directory', '/etc/atlas/conf/solr',
+      self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/conf/solr',
                                 owner='atlas',
                                 group='hadoop',
                                 create_parents = True,
@@ -94,7 +94,7 @@ class TestMetadataServer(RMFTestCase):
       app_props["atlas.server.address.id1"] = u"%s:%s" % (host_name, metadata_port)
       app_props["atlas.server.ha.enabled"] = "false"
 
-      self.assertResourceCalled('File', '/etc/atlas/conf/atlas-log4j.xml',
+      self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/atlas-log4j.xml',
                           content=InlineTemplate(
                             self.getConfig()['configurations'][
                               'atlas-log4j']['content']),
@@ -102,7 +102,7 @@ class TestMetadataServer(RMFTestCase):
                           group='hadoop',
                           mode=0644,
       )
-      self.assertResourceCalled('File', '/etc/atlas/conf/atlas-env.sh',
+      self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/atlas-env.sh',
                                 content=InlineTemplate(
                                     self.getConfig()['configurations'][
                                         'atlas-env']['content']),
@@ -110,7 +110,7 @@ class TestMetadataServer(RMFTestCase):
                                 group='hadoop',
                                 mode=0755,
       )
-      self.assertResourceCalled('File', '/etc/atlas/conf/solr/solrconfig.xml',
+      self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/solr/solrconfig.xml',
                                 content=InlineTemplate(
                                     self.getConfig()['configurations'][
                                       'atlas-solrconfig']['content']),
@@ -120,7 +120,7 @@ class TestMetadataServer(RMFTestCase):
       )
       # application.properties file
       self.assertResourceCalled('PropertiesFile',
-                                '/etc/atlas/conf/application.properties',
+                                '/usr/hdp/current/atlas-server/conf/application.properties',
                                 properties=app_props,
                                 owner=u'atlas',
                                 group=u'hadoop',
@@ -156,7 +156,7 @@ class TestMetadataServer(RMFTestCase):
                                       only_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
       self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_atlas_configs_0.[0-9]* --config-set atlas_configs --retry 30 --interval 5',
                                       only_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
-      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set atlas_configs --retry 30 --interval 5',
+      self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /usr/hdp/current/atlas-server/conf/solr --config-set atlas_configs --retry 30 --interval 5',
                                       not_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
       self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_atlas_configs_0.[0-9]*',
                                       action=['delete'],
@@ -168,7 +168,7 @@ class TestMetadataServer(RMFTestCase):
 
   def configureResourcesCalledSecure(self):
     # Both server and client
-    self.assertResourceCalled('Directory', '/etc/atlas/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/conf',
                               owner='atlas',
                               group='hadoop',
                               create_parents = True,
@@ -184,7 +184,7 @@ class TestMetadataServer(RMFTestCase):
                               cd_access='a',
                               mode=0755
     )
-    self.assertResourceCalled('Directory', '/etc/atlas/conf/solr',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/conf/solr',
                               owner='atlas',
                               group='hadoop',
                               create_parents = True,
@@ -230,7 +230,7 @@ class TestMetadataServer(RMFTestCase):
     app_props["atlas.server.address.id1"] = u"%s:%s" % (host_name, metadata_port)
     app_props["atlas.server.ha.enabled"] = "false"
 
-    self.assertResourceCalled('File', '/etc/atlas/conf/atlas-log4j.xml',
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/atlas-log4j.xml',
                               content=InlineTemplate(
                                 self.getConfig()['configurations'][
                                   'atlas-log4j']['content']),
@@ -238,7 +238,7 @@ class TestMetadataServer(RMFTestCase):
                               group='hadoop',
                               mode=0644,
                               )
-    self.assertResourceCalled('File', '/etc/atlas/conf/atlas-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/atlas-env.sh',
                               content=InlineTemplate(
                                 self.getConfig()['configurations'][
                                   'atlas-env']['content']),
@@ -246,7 +246,7 @@ class TestMetadataServer(RMFTestCase):
                               group='hadoop',
                               mode=0755,
                               )
-    self.assertResourceCalled('File', '/etc/atlas/conf/solr/solrconfig.xml',
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/solr/solrconfig.xml',
                               content=InlineTemplate(
                                 self.getConfig()['configurations'][
                                   'atlas-solrconfig']['content']),
@@ -256,14 +256,14 @@ class TestMetadataServer(RMFTestCase):
                               )
     # application.properties file
     self.assertResourceCalled('PropertiesFile',
-                              '/etc/atlas/conf/application.properties',
+                              '/usr/hdp/current/atlas-server/conf/application.properties',
                               properties=app_props,
                               owner=u'atlas',
                               group=u'hadoop',
                               mode=0644,
                               )
 
-    self.assertResourceCalled('TemplateConfig', '/etc/atlas/conf/atlas_jaas.conf',
+    self.assertResourceCalled('TemplateConfig', '/usr/hdp/current/atlas-server/conf/atlas_jaas.conf',
                               owner = 'atlas',
                               )
 
@@ -298,7 +298,7 @@ class TestMetadataServer(RMFTestCase):
                                     only_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_atlas_configs_0.[0-9]* --config-set atlas_configs --retry 30 --interval 5',
                                     only_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
-    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set atlas_configs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /usr/hdp/current/atlas-server/conf/solr --config-set atlas_configs --retry 30 --interval 5',
                                     not_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_atlas_configs_0.[0-9]*',
                                     action=['delete'],
@@ -324,7 +324,7 @@ class TestMetadataServer(RMFTestCase):
                               group = "hadoop",
                               content=Template("atlas_hbase_setup.rb.j2"))
 
-    self.assertResourceCalled('File', '/etc/atlas/conf/hdfs-site.xml',action = ['delete'],)
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/hdfs-site.xml',action = ['delete'],)
 
     self.assertNoMoreResources()
 
@@ -344,7 +344,7 @@ class TestMetadataServer(RMFTestCase):
                               group = "hadoop",
                               content=Template("atlas_hbase_setup.rb.j2"))
 
-    self.assertResourceCalled('File', '/etc/atlas/conf/hdfs-site.xml',action = ['delete'],)
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/hdfs-site.xml',action = ['delete'],)
 
 
     self.assertNoMoreResources()
@@ -364,10 +364,10 @@ class TestMetadataServer(RMFTestCase):
                               group = "hadoop",
                               content=Template("atlas_hbase_setup.rb.j2"))
 
-    self.assertResourceCalled('File', '/etc/atlas/conf/hdfs-site.xml',action = ['delete'],)
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/hdfs-site.xml',action = ['delete'],)
 
 
-    self.assertResourceCalled('Execute', 'source /etc/atlas/conf/atlas-env.sh ; /usr/hdp/current/atlas-server/bin/atlas_start.py',
+    self.assertResourceCalled('Execute', 'source /usr/hdp/current/atlas-server/conf/atlas-env.sh ; /usr/hdp/current/atlas-server/bin/atlas_start.py',
                               not_if = 'ls /var/run/atlas/atlas.pid >/dev/null 2>&1 && ps -p `cat /var/run/atlas/atlas.pid` >/dev/null 2>&1',
                               user = 'atlas',
     )
@@ -383,7 +383,7 @@ class TestMetadataServer(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', 'source /etc/atlas/conf/atlas-env.sh; /usr/hdp/current/atlas-server/bin/atlas_stop.py',
+    self.assertResourceCalled('Execute', 'source /usr/hdp/current/atlas-server/conf/atlas-env.sh; /usr/hdp/current/atlas-server/bin/atlas_stop.py',
                               user = 'atlas',
     )
     self.assertResourceCalled('File', '/var/run/atlas/atlas.pid',

http://git-wip-us.apache.org/repos/asf/ambari/blob/918fd597/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py b/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py
index bbaef78..cd2fac8 100644
--- a/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py
+++ b/ambari-server/src/test/python/stacks/2.5/ATLAS/test_atlas_server.py
@@ -30,7 +30,7 @@ class TestAtlasServer(RMFTestCase):
 
   def configureResourcesCalled(self):
     # Both server and client
-    self.assertResourceCalled('Directory', '/etc/atlas/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/conf',
                               owner='atlas',
                               group='hadoop',
                               create_parents = True,
@@ -45,7 +45,7 @@ class TestAtlasServer(RMFTestCase):
                               cd_access='a',
                               mode=0755
     )
-    self.assertResourceCalled('Directory', '/etc/atlas/conf/solr',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/atlas-server/conf/solr',
                               owner='atlas',
                               group='hadoop',
                               create_parents = True,
@@ -92,7 +92,7 @@ class TestAtlasServer(RMFTestCase):
     app_props["atlas.server.address.id1"] = u"%s:%s" % (host_name, metadata_port)
     app_props["atlas.server.ha.enabled"] = "false"
 
-    self.assertResourceCalled('File', '/etc/atlas/conf/atlas-log4j.xml',
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/atlas-log4j.xml',
                           content=InlineTemplate(
                             self.getConfig()['configurations'][
                               'atlas-log4j']['content']),
@@ -100,7 +100,7 @@ class TestAtlasServer(RMFTestCase):
                           group='hadoop',
                           mode=0644,
     )
-    self.assertResourceCalled('File', '/etc/atlas/conf/atlas-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/atlas-env.sh',
                               content=InlineTemplate(
                                   self.getConfig()['configurations'][
                                     'atlas-env']['content']),
@@ -108,7 +108,7 @@ class TestAtlasServer(RMFTestCase):
                               group='hadoop',
                               mode=0755,
     )
-    self.assertResourceCalled('File', '/etc/atlas/conf/solr/solrconfig.xml',
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/solr/solrconfig.xml',
                               content=InlineTemplate(
                                   self.getConfig()['configurations'][
                                     'atlas-solrconfig']['content']),
@@ -118,7 +118,7 @@ class TestAtlasServer(RMFTestCase):
     )
     # application.properties file
     self.assertResourceCalled('PropertiesFile',
-                              '/etc/atlas/conf/atlas-application.properties',
+                              '/usr/hdp/current/atlas-server/conf/atlas-application.properties',
                               properties=app_props,
                               owner=u'atlas',
                               group=u'hadoop',
@@ -155,7 +155,7 @@ class TestAtlasServer(RMFTestCase):
                                     only_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /tmp/solr_config_atlas_configs_0.[0-9]* --config-set atlas_configs --retry 30 --interval 5',
                                     only_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
-    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /etc/atlas/conf/solr --config-set atlas_configs --retry 30 --interval 5',
+    self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --upload-config --config-dir /usr/hdp/current/atlas-server/conf/solr --config-set atlas_configs --retry 30 --interval 5',
                                     not_if='test -d /tmp/solr_config_atlas_configs_0.[0-9]*')
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_atlas_configs_0.[0-9]*',
                                     action=['delete'],
@@ -181,7 +181,7 @@ class TestAtlasServer(RMFTestCase):
                               group = "hadoop",
                               content=Template("atlas_hbase_setup.rb.j2"))
 
-    self.assertResourceCalled('File', '/etc/atlas/conf/hdfs-site.xml',action = ['delete'],)
+    self.assertResourceCalled('File', '/usr/hdp/current/atlas-server/conf/hdfs-site.xml',action = ['delete'],)
 
     self.assertNoMoreResources()
 


[17/50] [abbrv] ambari git commit: AMBARI-19636: Provide default values for Kafka nofile and nproc limit properties (dili)

Posted by nc...@apache.org.
AMBARI-19636: Provide default values for Kafka nofile and nproc limit properties (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b2d78ac5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b2d78ac5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b2d78ac5

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b2d78ac5f3136bc9d55e3f8806726146278bd654
Parents: 918fd59
Author: Di Li <di...@apache.org>
Authored: Mon Jan 23 12:03:55 2017 -0500
Committer: Di Li <di...@apache.org>
Committed: Mon Jan 23 12:03:55 2017 -0500

----------------------------------------------------------------------
 .../common-services/KAFKA/0.8.1/package/scripts/params.py        | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b2d78ac5/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index 6c7ff69..1d3a195 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -76,8 +76,8 @@ limits_conf_dir = "/etc/security/limits.d"
 # Used while upgrading the stack in a kerberized cluster and running kafka-acls.sh
 zookeeper_connect = default("/configurations/kafka-broker/zookeeper.connect", None)
 
-kafka_user_nofile_limit = config['configurations']['kafka-env']['kafka_user_nofile_limit']
-kafka_user_nproc_limit = config['configurations']['kafka-env']['kafka_user_nproc_limit']
+kafka_user_nofile_limit = default('/configurations/kafka-env/kafka_user_nofile_limit', 128000)
+kafka_user_nproc_limit = default('/configurations/kafka-env/kafka_user_nproc_limit', 65536)
 
 # parameters for 2.2+
 if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):


[13/50] [abbrv] ambari git commit: AMBARI-18739. Perf: Create Rolling and Express Upgrade Packs. Fix of concurrent cp (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-18739. Perf: Create Rolling and Express Upgrade Packs. Fix of concurrent cp (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/203e9fab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/203e9fab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/203e9fab

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 203e9fab7b6c4ae1895a8c545991ada8b9e7503e
Parents: c32eb48
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Jan 23 16:45:54 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Jan 23 16:46:45 2017 +0200

----------------------------------------------------------------------
 .../stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py         | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/203e9fab/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
index 175f39a..0865ef5 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/hooks/before-INSTALL/scripts/hook.py
@@ -38,10 +38,10 @@ class BeforeInstallHook(Hook):
     conf_select = os.path.join(cache_dir, CONF_SELECT_PY)
     dist_select = os.path.join(cache_dir, DISTRO_SELECT_PY)
     if not os.path.exists(CONF_SELECT_DEST):
-      Execute("cp %s %s" % (conf_select, CONF_SELECT_DEST), user="root")
+      Execute("cp -f %s %s" % (conf_select, CONF_SELECT_DEST), user="root")
       Execute("chmod a+x %s" % (CONF_SELECT_DEST), user="root")
     if not os.path.exists(DISTRO_SELECT_DEST):
-      Execute("cp %s %s" % (dist_select, DISTRO_SELECT_DEST), user="root")
+      Execute("cp -f %s %s" % (dist_select, DISTRO_SELECT_DEST), user="root")
       Execute("chmod a+x %s" % (DISTRO_SELECT_DEST), user="root")
 
   def extrakt_var_from_pythonpath(self, name):


[46/50] [abbrv] ambari git commit: AMBARI-19708. Exception in agent logs (Dmytro Grinenko via smohanty)

Posted by nc...@apache.org.
AMBARI-19708. Exception in agent logs (Dmytro Grinenko via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b60fabad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b60fabad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b60fabad

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b60fabade46fc5f38c7a33a6ee0044ce1931756a
Parents: 8642a46
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed Jan 25 06:47:12 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed Jan 25 06:47:12 2017 -0800

----------------------------------------------------------------------
 .../HIVE/0.12.0.2.0/package/scripts/webhcat_server.py              | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b60fabad/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index 86ecec6..93fa411 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -119,8 +119,6 @@ class WebHCatServerDefault(WebHCatServer):
       )
 
       security_params = {}
-      security_params.update(get_params_from_filesystem(status_params.hive_conf_dir,
-                                                        {'hive-site.xml': FILE_TYPE_XML}))
       security_params.update(get_params_from_filesystem(status_params.webhcat_conf_dir,
                                                         {'webhcat-site.xml': FILE_TYPE_XML}))
       result_issues = validate_security_config_properties(security_params, expectations)


[03/50] [abbrv] ambari git commit: AMBARI-18739. Perf: Create Rolling and Express Upgrade Packs. Fix failing test with xml validation (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-18739. Perf: Create Rolling and Express Upgrade Packs. Fix failing test with xml validation (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8362dcef
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8362dcef
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8362dcef

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8362dcefd6550323f94fabdf40dfcbedd2db4fc2
Parents: 3e5185a
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Mon Jan 23 12:12:07 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Mon Jan 23 12:13:14 2017 +0200

----------------------------------------------------------------------
 .../stacks/PERF/1.0/services/FAKEHDFS/configuration/hadoop-env.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8362dcef/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hadoop-env.xml
index 5aef1e9..09a9081 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hadoop-env.xml
@@ -202,7 +202,7 @@
     <value>/tmp</value>
     <description>FAKEHDFS tmp Dir</description>
     <display-name>FAKEHDFS tmp Dir</display-name>
-    <property-type>NOT_MANAGED_FAKEHDFS_PATH</property-type>
+    <property-type>NOT_MANAGED_HDFS_PATH</property-type>
     <value-attributes>
       <read-only>true</read-only>
       <overridable>false</overridable>


[41/50] [abbrv] ambari git commit: AMBARI-19694. Post user creation hook - input csv generated with READ permissions (Laszlo Puskas via oleewere)

Posted by nc...@apache.org.
AMBARI-19694. Post user creation hook - input csv generated with READ permissions (Laszlo Puskas via oleewere)

Change-Id: I3ae59596f5817dbd60bed37f259ce54a5f16de39


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b9200e0e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b9200e0e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b9200e0e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b9200e0e57111b33b7b998fa59a5d25c0b700869
Parents: ff4babb
Author: Laszlo Puskas <lp...@hortonworks.com>
Authored: Wed Jan 25 11:01:45 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Wed Jan 25 11:03:58 2017 +0100

----------------------------------------------------------------------
 .../users/CsvFilePersisterService.java          | 24 +++++-
 .../CsvFilePersisterServiceFunctionalTest.java  | 91 ++++++++++++++++++++
 2 files changed, 113 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b9200e0e/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
index d8ffe98..fe6bf35 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
@@ -18,12 +18,21 @@
 
 package org.apache.ambari.server.serveraction.users;
 
+import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.FileAttribute;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import javax.inject.Inject;
 import javax.inject.Singleton;
@@ -51,10 +60,21 @@ public class CsvFilePersisterService implements CollectionPersisterService<Strin
     this.csvFile = csvFile;
   }
 
+  public Set<PosixFilePermission> getCsvPermissions() {
+    Set<PosixFilePermission> permissionsSet = new HashSet<>();
+    permissionsSet.add(PosixFilePermission.OWNER_READ);
+    permissionsSet.add(PosixFilePermission.OWNER_WRITE);
+    permissionsSet.add(PosixFilePermission.GROUP_READ);
+    permissionsSet.add(PosixFilePermission.OTHERS_READ);
+    return permissionsSet;
+  }
+
   @Inject
   public void init() throws IOException {
-    // make 3rd party dependencies be managed by the container (probably constructor binding or factory is needed)
-    fileWriter = new FileWriter(csvFile);
+
+    Path csv = Files.createFile(Paths.get(csvFile), PosixFilePermissions.asFileAttribute(getCsvPermissions()));
+    fileWriter = new FileWriter(csv.toFile());
+
     csvPrinter = new CSVPrinter(fileWriter, CSVFormat.DEFAULT.withRecordSeparator(NEW_LINE_SEPARATOR));
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9200e0e/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterServiceFunctionalTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterServiceFunctionalTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterServiceFunctionalTest.java
new file mode 100644
index 0000000..97529fe
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterServiceFunctionalTest.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.users;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.PosixFilePermission;
+
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.assistedinject.FactoryModuleBuilder;
+
+/**
+ * Test cases for the persiter service implementation.
+ * For fully testing the behavior it builds up  GUICE context.
+ */
+public class CsvFilePersisterServiceFunctionalTest {
+
+  private static final String TEST_CSV = "/tmp/users.csv";
+
+  private static Injector injector;
+  private static CollectionPersisterServiceFactory serviceFactory;
+  private CsvFilePersisterService csvFileCsvFilePersisterService;
+  private Path testCsvPath;
+
+  /**
+   * Guice module for testing service / factory behavior
+   */
+  private static class TestPersistServiceModule extends AbstractModule {
+    @Override
+    protected void configure() {
+      install(new FactoryModuleBuilder().implement(CollectionPersisterService.class, CsvFilePersisterService.class).build(CollectionPersisterServiceFactory.class));
+    }
+  }
+
+  @BeforeClass
+  public static void beforeClass() {
+    injector = Guice.createInjector(new TestPersistServiceModule());
+    serviceFactory = (CollectionPersisterServiceFactory) injector.getInstance(CollectionPersisterServiceFactory.class);
+  }
+
+  @Before
+  public void before() {
+    csvFileCsvFilePersisterService = serviceFactory.createCsvFilePersisterService(TEST_CSV);
+    testCsvPath = Paths.get(TEST_CSV);
+  }
+
+  @Test
+  public void shouldCreateCsvFileWithExpectedPermissions() throws IOException {
+
+    Assert.assertNotNull(csvFileCsvFilePersisterService);
+
+    Assert.assertTrue("The generated file couldn't be found", Files.exists(testCsvPath));
+
+    Assert.assertTrue("The generated files doesn't have all the expected permissions", Files.getPosixFilePermissions(testCsvPath).containsAll(csvFileCsvFilePersisterService.getCsvPermissions()));
+
+    Assert.assertFalse("The generated file has more than the required permissions", Files.getPosixFilePermissions(testCsvPath).contains(PosixFilePermission.GROUP_EXECUTE));
+
+  }
+
+  @After
+  public void after() throws IOException {
+    Files.deleteIfExists(Paths.get(TEST_CSV));
+  }
+
+}
\ No newline at end of file


[21/50] [abbrv] ambari git commit: AMBARI-19679. Dangling config-group hostmapping entries result in partial loading of config groups. (swagle)

Posted by nc...@apache.org.
AMBARI-19679. Dangling config-group hostmapping entries result in partial loading of config groups. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a51ede84
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a51ede84
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a51ede84

Branch: refs/heads/branch-dev-patch-upgrade
Commit: a51ede8480e22e21a738bfe9921eb49d7e10a3aa
Parents: 3ade321
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Mon Jan 23 11:14:35 2017 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Mon Jan 23 11:14:35 2017 -0800

----------------------------------------------------------------------
 .../apache/ambari/server/state/configgroup/ConfigGroupImpl.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a51ede84/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
index fe1f338..03edcf8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupImpl.java
@@ -172,8 +172,9 @@ public class ConfigGroupImpl implements ConfigGroup {
         if (host != null && hostEntity != null) {
           m_hosts.put(hostEntity.getHostId(), host);
         }
-      } catch (AmbariException e) {
-        LOG.warn("Host seems to be deleted but Config group mapping still exists !");
+      } catch (Exception e) {
+        LOG.warn("Host {} seems to be deleted but Config group {} mapping " +
+          "still exists !", hostMappingEntity.getHostname(), configGroupName);
         LOG.debug("Host seems to be deleted but Config group mapping still exists !", e);
       }
     }


[10/50] [abbrv] ambari git commit: AMBARI-19669. Not able to add the settings for action nodes in firefox browser (Padma Priya via pallavkul)

Posted by nc...@apache.org.
AMBARI-19669. Not able to add the settings for action nodes in firefox browser (Padma Priya via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cacb1ccc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cacb1ccc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cacb1ccc

Branch: refs/heads/branch-dev-patch-upgrade
Commit: cacb1cccf9eda0ff13890310dc1e99208287818a
Parents: 74aff7c
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 23 18:33:31 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 23 18:33:31 2017 +0530

----------------------------------------------------------------------
 .../ui/app/components/version-settings.js       |  3 +--
 .../main/resources/ui/app/domain/workflow.js    |  3 +--
 .../main/resources/ui/app/utils/common-utils.js |  2 +-
 .../main/resources/ui/app/utils/constants.js    | 28 ++++++++++----------
 4 files changed, 17 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cacb1ccc/contrib/views/wfmanager/src/main/resources/ui/app/components/version-settings.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/version-settings.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/version-settings.js
index 966313f..3c953e0 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/version-settings.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/version-settings.js
@@ -25,8 +25,7 @@ export default Ember.Component.extend({
     this.set('workflowSchemaVersions', this.get('schemaVersions').getSupportedVersions('workflow'));
     this.set('selectedWorkflowVersion', this.get('workflow').schemaVersions.workflowVersion);
     var actionVersions = Ember.A([]);
-    Object.keys(Constants.actions).forEach((key)=>{
-      var action = Constants.actions[key];
+    Constants.actions.forEach((action)=>{
       if(action.supportsSchema){
         actionVersions.push({name:action.name, supporedVersions :this.get('schemaVersions').getSupportedVersions(action.name),
         selectedVersion: this.get('workflow').schemaVersions.actionVersions.get(action.name)});

http://git-wip-us.apache.org/repos/asf/ambari/blob/cacb1ccc/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
index 3c89f5c..9040bb8 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/workflow.js
@@ -39,8 +39,7 @@ var Workflow= Ember.Object.extend(FindNodeMixin,{
     this.schemaVersions = {};
     this.schemaVersions.workflowVersion = schemaVersions.getDefaultVersion('workflow');
     var actionsMap = new Map();
-    Object.keys(Constants.actions).forEach((key)=>{
-      var action = Constants.actions[key];
+    Constants.actions.forEach((action)=>{
       if(action.supportsSchema){
         actionsMap.set(action.name, schemaVersions.getDefaultVersion(action.name));
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/cacb1ccc/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js b/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
index 8cc40d6..e793aac 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
@@ -28,6 +28,6 @@ export default Ember.Object.create({
     window.flowDesignerTestContext=context;
   },
   isSupportedAction(actionType){
-    return Object.values(Constants.actions).findBy('name', actionType)? true : false;
+    return Constants.actions.findBy('name', actionType)? true : false;
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/cacb1ccc/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js b/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
index 9126819..1dd1c31 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
@@ -34,20 +34,20 @@ export default Ember.Object.create({
   useCytoscape : true,
   isProjectManagerEnabled : false,
   autoRestoreWorkflowEnabled : true,
-  actions:{
-    hiveAction:{name : "hive",supportsSchema : true, currentVersion:''},
-    hive2Action:{name : "hive2",supportsSchema : true, currentVersion:''},
-    javaAction:{name : "java",supportsSchema : false, currentVersion:''},
-    pigAction:{name : "pig",supportsSchema : false, currentVersion:''},
-    sqoopAction:{name : "sqoop",supportsSchema : true, currentVersion:''},
-    shellAction:{name : "shell",supportsSchema : true, currentVersion:''},
-    sparkAction:{name : "spark",supportsSchema : true, currentVersion:''},
-    mapReduceAction:{name : "map-reduce",supportsSchema : false, currentVersion:''},
-    subWorkflowAction:{name : "sub-workflow",supportsSchema : false, currentVersion:''},
-    distcpAction:{name : "distcp",supportsSchema : true, currentVersion:''},
-    sshAction:{name : "ssh",supportsSchema : false, currentVersion:''},
-    emailAction:{name : "email",supportsSchema : true, currentVersion:''}
-  },
+  actions: Ember.A([
+    {name : "hive",supportsSchema : true, currentVersion:''},
+    {name : "hive2",supportsSchema : true, currentVersion:''},
+    {name : "java",supportsSchema : false, currentVersion:''},
+    {name : "pig",supportsSchema : false, currentVersion:''},
+    {name : "sqoop",supportsSchema : true, currentVersion:''},
+    {name : "shell",supportsSchema : true, currentVersion:''},
+    {name : "spark",supportsSchema : true, currentVersion:''},
+    {name : "map-reduce",supportsSchema : false, currentVersion:''},
+    {name : "sub-workflow",supportsSchema : false, currentVersion:''},
+    {name : "distcp",supportsSchema : true, currentVersion:''},
+    {name : "ssh",supportsSchema : false, currentVersion:''},
+    {name : "email",supportsSchema : true, currentVersion:''}
+  ]),
   sparkMasterList :Ember.A([{value:'yarn-cluster',displayName:'Yarn Cluster'},
                             {value:'yarn-client',displayName:'Yarn Client'},
                             {value:'local',displayName:'Local'}]),


[20/50] [abbrv] ambari git commit: AMBARI-19619. Ranger load balancer url needs to be updated for dependent properties during HA. (Vivek Ratnavel Subramanian via Jaimin)

Posted by nc...@apache.org.
AMBARI-19619. Ranger load balancer url needs to be updated for dependent properties during HA. (Vivek Ratnavel Subramanian via Jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3ade3214
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3ade3214
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3ade3214

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 3ade321478499ba9eff7cb703b13760130a2cf60
Parents: 3871f4a
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Mon Jan 23 11:11:16 2017 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Mon Jan 23 11:11:16 2017 -0800

----------------------------------------------------------------------
 .../rangerAdmin/step4_controller.js             | 82 +++++++++++++++++++-
 ambari-web/app/messages.js                      |  2 +-
 2 files changed, 79 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3ade3214/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
index 3c32af9..c03c680 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/rangerAdmin/step4_controller.js
@@ -51,11 +51,29 @@ App.RAHighAvailabilityWizardStep4Controller = App.HighAvailabilityProgressPageCo
   },
 
   onLoadConfigsTags: function (data) {
+    var urlParams = [];
+    urlParams.push('(type=admin-properties&tag=' + data.Clusters.desired_configs['admin-properties'].tag + ')');
+    var siteNamesToFetch = [
+      'ranger-hdfs-security',
+      'ranger-yarn-security',
+      'ranger-hbase-security',
+      'ranger-hive-security',
+      'ranger-knox-security',
+      'ranger-kafka-security',
+      'ranger-kms-security',
+      'ranger-storm-security',
+      'ranger-atlas-security'
+    ];
+    siteNamesToFetch.map(function(siteName) {
+      if(siteName in data.Clusters.desired_configs) {
+        urlParams.push('(type=' + siteName + '&tag=' + data.Clusters.desired_configs[siteName].tag + ')');
+      }
+    });
     App.ajax.send({
       name: 'reassign.load_configs',
       sender: this,
       data: {
-        urlParams: '(type=admin-properties&tag=' + data.Clusters.desired_configs['admin-properties'].tag + ')'
+        urlParams: urlParams.join('|')
       },
       success: 'onLoadConfigs',
       error: 'onTaskError'
@@ -63,14 +81,70 @@ App.RAHighAvailabilityWizardStep4Controller = App.HighAvailabilityProgressPageCo
   },
 
   onLoadConfigs: function (data) {
+    var configs = [];
+    var self = this;
     data.items.findProperty('type', 'admin-properties').properties['policymgr_external_url'] = this.get('content.policymgrExternalURL');
-    var configData = this.reconfigureSites(['admin-properties'], data, Em.I18n.t('admin.highAvailability.step4.save.configuration.note').format(App.format.role('RANGER_ADMIN', false)));
+    configs.push({
+      Clusters: {
+        desired_config: this.reconfigureSites(['admin-properties'], data, Em.I18n.t('admin.highAvailability.step4.save.configuration.note').format(App.format.role('RANGER_ADMIN', false)))
+      }
+    });
+
+    var configsToChange = [
+      {
+        siteName: 'ranger-hdfs-security',
+        property: 'ranger.plugin.hdfs.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-yarn-security',
+        property: 'ranger.plugin.yarn.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-hbase-security',
+        property: 'ranger.plugin.hbase.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-hive-security',
+        property: 'ranger.plugin.hive.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-knox-security',
+        property: 'ranger.plugin.knox.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-kafka-security',
+        property: 'ranger.plugin.kafka.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-kms-security',
+        property: 'ranger.plugin.kms.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-storm-security',
+        property: 'ranger.plugin.storm.policy.rest.url'
+      },
+      {
+        siteName: 'ranger-atlas-security',
+        property: 'ranger.plugin.atlas.policy.rest.url'
+      }
+    ];
+    configsToChange.map(function(item) {
+      var config = data.items.findProperty('type', item.siteName);
+      if(config) {
+        config.properties[item.property] = self.get('content.loadBalancerURL');
+        configs.push({
+          Clusters: {
+            desired_config: self.reconfigureSites([item.siteName], data, Em.I18n.t('admin.highAvailability.step4.save.configuration.note').format(App.format.role('RANGER_ADMIN', false)))
+          }
+        });
+      }
+    });
 
     App.ajax.send({
-      name: 'common.service.configurations',
+      name: 'common.service.multiConfigurations',
       sender: this,
       data: {
-        desired_config: configData
+        configs: configs
       },
       success: 'onSaveConfigs',
       error: 'onTaskError'

http://git-wip-us.apache.org/repos/asf/ambari/blob/3ade3214/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 961af55..83a19b0 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1607,7 +1607,7 @@ Em.I18n.translations = {
   'admin.ra_highAvailability.wizard.step4.header': 'Install, Start and Test',
   'admin.ra_highAvailability.wizard.step4.task0.title': 'Stop All Services',
   'admin.ra_highAvailability.wizard.step4.task1.title': 'Install Additional Ranger Admin',
-  'admin.ra_highAvailability.wizard.step4.task2.title': 'Reconfigure Ranger',
+  'admin.ra_highAvailability.wizard.step4.task2.title': 'Reconfigure Services',
   'admin.ra_highAvailability.wizard.step4.task3.title': 'Start All Services',
   'admin.ra_highAvailability.wizard.step4.notice.inProgress': 'Please wait while Ranger Admin HA is being deployed.',
   'admin.ra_highAvailability.wizard.step4.notice.completed': 'Ranger Admin HA has been enabled successfully.',


[19/50] [abbrv] ambari git commit: AMBARI-19678 Confirmation for adding host component from service summary page should be displayed once. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-19678 Confirmation for adding host component from service summary page should be displayed once. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3871f4a8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3871f4a8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3871f4a8

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 3871f4a8d31aff1f61bad695ec5eb884820638c0
Parents: 796658f
Author: ababiichuk <ab...@hortonworks.com>
Authored: Mon Jan 23 19:47:11 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Mon Jan 23 20:36:07 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/main/host/details.js | 202 +++++++++++++++----
 ambari-web/app/controllers/main/service/item.js |  58 +-----
 .../host/details/addDeleteComponentPopup.hbs    |  34 +++-
 .../templates/main/service/add_host_popup.hbs   |  33 ---
 .../test/controllers/main/host/details_test.js  | 144 +++++++++++--
 5 files changed, 317 insertions(+), 154 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3871f4a8/ambari-web/app/controllers/main/host/details.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/details.js b/ambari-web/app/controllers/main/host/details.js
index 1fcc14d..093603e 100644
--- a/ambari-web/app/controllers/main/host/details.js
+++ b/ambari-web/app/controllers/main/host/details.js
@@ -89,27 +89,32 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     'HIVE_METASTORE': {
       deletePropertyName: 'deleteHiveMetaStore',
       hostPropertyName: 'hiveMetastoreHost',
-      configsCallbackName: 'loadHiveConfigs'
+      configTagsCallbackName: 'loadHiveConfigs',
+      configsCallbackName: 'onLoadHiveConfigs'
     },
     'WEBHCAT_SERVER': {
       deletePropertyName: 'deleteWebHCatServer',
       hostPropertyName: 'webhcatServerHost',
-      configsCallbackName: 'loadWebHCatConfigs'
+      configTagsCallbackName: 'loadWebHCatConfigs',
+      configsCallbackName: 'onLoadHiveConfigs'
     },
     'HIVE_SERVER': {
       addPropertyName: 'addHiveServer',
       deletePropertyName: 'deleteHiveServer',
-      configsCallbackName: 'loadHiveConfigs'
+      configTagsCallbackName: 'loadHiveConfigs',
+      configsCallbackName: 'onLoadHiveConfigs'
     },
     'NIMBUS': {
       deletePropertyName: 'deleteNimbusHost',
       hostPropertyName: 'nimbusHost',
-      configsCallbackName: 'loadStormConfigs'
+      configTagsCallbackName: 'loadStormConfigs',
+      configsCallbackName: 'onLoadStormConfigs'
     },
     'RANGER_KMS_SERVER': {
       deletePropertyName: 'deleteRangerKMSServer',
       hostPropertyName: 'rangerKMSServerHost',
-      configsCallbackName: 'loadRangerConfigs'
+      configTagsCallbackName: 'loadRangerConfigs',
+      configsCallbackName: 'onLoadRangerConfigs'
     }
   },
 
@@ -158,6 +163,12 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   isReconfigureRequired: false,
 
   /**
+   * Contains component-related config properties loaded from server
+   * @type {Object|null}
+   */
+  configs: null,
+
+  /**
    * Array of all properties affected by adding/deleting host component
    * @type {Array}
    */
@@ -189,16 +200,38 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     manualKerberosWarning: App.get('router.mainAdminKerberosController.isManualKerberos') ?
       Em.I18n.t('hosts.host.manualKerberosWarning') : '',
     lastComponent: false,
-    lastComponentError: ''
+    lastComponentError: '',
+    hasHostsSelect: false,
+    selectedHost: null,
+    anyHostsWithoutComponent: true
   }),
 
-  clearConfigsChanges: function () {
+  saveLoadedConfigs: function (data) {
+    var configs = {
+      items: []
+    };
+    data.items.forEach(function (item) {
+      var configTypeObject = Em.getProperties(item, ['type', 'properties_attributes']),
+        properties = {};
+      Em.keys(item.properties).forEach(function (propertyName) {
+        properties[propertyName] = item.properties[propertyName];
+      });
+      configTypeObject.properties = properties;
+      configs.items.push(configTypeObject);
+    });
+    this.set('configs', configs);
+  },
+
+  clearConfigsChanges: function (shouldKeepLoadedConfigs) {
     var arrayNames = ['allPropertiesToChange', 'recommendedPropertiesToChange', 'requiredPropertiesToChange', 'groupedPropertiesToChange'];
     this.abortRequests();
     arrayNames.forEach(function (arrayName) {
       this.get(arrayName).clear();
     }, this);
     this.set('isReconfigureRequired', false);
+    if (!shouldKeepLoadedConfigs) {
+      this.set('configs', null);
+    }
   },
 
   applyConfigsCustomization: function () {
@@ -456,7 +489,6 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     var component = event.context;
     var componentName = component.get('componentName');
     var displayName = component.get('displayName');
-    var hostName = event.selectedHost || this.get('content.hostName');
     var returnFunc;
     var componentsMapItem = this.get('addDeleteComponentsMap')[componentName];
     if (componentsMapItem) {
@@ -465,7 +497,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       }
       this.clearConfigsChanges();
       this.set('isReconfigureRequired', true);
-      returnFunc = this.showDeleteComponentPopup(component, componentsMapItem.configsCallbackName);
+      returnFunc = this.showDeleteComponentPopup(component);
     } else if (componentName === 'JOURNALNODE') {
       returnFunc = App.showConfirmationPopup(function () {
         App.router.transitionTo('main.services.manageJournalNode');
@@ -476,18 +508,20 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     return returnFunc;
   },
 
-  showDeleteComponentPopup: function (component, callbackName) {
+  showDeleteComponentPopup: function (component) {
     var self = this,
       isLastComponent = (this.getTotalComponent(component) === 1),
       componentName = component.get('componentName'),
       componentDisplayName = component.get('displayName'),
-      commonMessage = Em.I18n.t('hosts.host.deleteComponent.popup.msg1').format(componentDisplayName);
-    if (this.get('isReconfigureRequired')) {
-      this.set('isConfigsLoadingInProgress', true);
-      this.isServiceMetricsLoaded(function () {
-        self.loadConfigs(callbackName);
-      });
+      componentsMapItem = this.get('addDeleteComponentsMap')[componentName],
+      commonMessage = Em.I18n.t('hosts.host.deleteComponent.popup.msg1').format(componentDisplayName),
+      configTagsCallbackName,
+      configsCallbackName;
+    if (componentsMapItem) {
+      configTagsCallbackName = componentsMapItem.configTagsCallbackName;
+      configsCallbackName = componentsMapItem.configsCallbackName;
     }
+    this.loadComponentRelatedConfigs(configTagsCallbackName, configsCallbackName);
     return App.ModalPopup.show({
       header: Em.I18n.t('popup.confirmation.commonHeader'),
       controller: self,
@@ -711,9 +745,10 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       returnFunc,
       self = this,
       component = event.context,
-      hostName = event.selectedHost || this.get('content.hostName'),
+      hostName = this.get('content.hostName'),
       componentName = component.get('componentName'),
-      missedComponents = event.selectedHost ? [] : this.checkComponentDependencies(componentName, {
+      hasHostsSelect = event.hasOwnProperty('selectedHost'),
+      missedComponents = hasHostsSelect ? [] : this.checkComponentDependencies(componentName, {
         scope: 'host',
         installedComponents: this.get('content.hostComponents').mapProperty('componentName')
       }),
@@ -742,29 +777,40 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       }
       this.clearConfigsChanges();
       this.set('isReconfigureRequired', true);
-      returnFunc = self.showAddComponentPopup(component, hostName, null, componentsMapItem.configsCallbackName, primary);
+      returnFunc = self.showAddComponentPopup(component, hostName, null, primary, hasHostsSelect);
     } else if (componentName === 'JOURNALNODE') {
       returnFunc = App.showConfirmationPopup(function () {
         App.router.transitionTo('main.services.manageJournalNode');
       }, Em.I18n.t('hosts.host.addComponent.' + componentName) + manualKerberosWarning);
     } else {
       returnFunc = this.showAddComponentPopup(component, hostName, function () {
+        if (hasHostsSelect) {
+          hostName = self.get('content.hostName');
+        }
         self.installHostComponentCall(hostName, component);
-      });
+      }, null, hasHostsSelect);
     }
     return returnFunc;
   },
 
-  showAddComponentPopup: function (component, hostName, primary, callbackName, primaryOnReconfigure) {
+  showAddComponentPopup: function (component, hostName, primary, primaryOnReconfigure, hasHostsSelect) {
     var self = this,
       componentName = component.get('componentName'),
       componentDisplayName = component.get('displayName'),
-      commonMessage = Em.I18n.t('hosts.host.addComponent.msg').format(componentDisplayName);
-    if (this.get('isReconfigureRequired')) {
-      this.set('isConfigsLoadingInProgress', true);
-      this.isServiceMetricsLoaded(function () {
-        self.loadConfigs(callbackName);
-      });
+      componentsMapItem = this.get('addDeleteComponentsMap')[componentName],
+      commonMessage = Em.I18n.t('hosts.host.addComponent.msg').format(componentDisplayName),
+      configTagsCallbackName,
+      configsCallbackName;
+    if (componentsMapItem) {
+      configTagsCallbackName = componentsMapItem.configTagsCallbackName || 'loadConfigsSuccessCallback';
+      configsCallbackName = componentsMapItem.configsCallbackName || 'saveZkConfigs';
+    }
+    if (hasHostsSelect) {
+      if (this.get('isReconfigureRequired')) {
+        this.set('isConfigsLoadingInProgress', true);
+      }
+    } else {
+      this.loadComponentRelatedConfigs(configTagsCallbackName, configsCallbackName);
     }
     return App.ModalPopup.show({
       header: Em.I18n.t('popup.confirmation.commonHeader'),
@@ -776,7 +822,51 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       }.property('controller.hasPropertiesToChange'),
       primary: Em.I18n.t('hosts.host.addComponent.popup.confirm'),
       bodyClass: self.get('addDeleteComponentPopupBody').extend({
-        commonMessage: commonMessage
+        commonMessage: commonMessage,
+        hasHostsSelect: hasHostsSelect,
+        addComponentMsg: Em.I18n.t('hosts.host.addComponent.msg').format(componentDisplayName),
+        selectHostMsg: Em.I18n.t('services.summary.selectHostForComponent').format(componentDisplayName),
+        thereIsNoHostsMsg: Em.I18n.t('services.summary.allHostsAlreadyRunComponent').format(componentDisplayName),
+        hostsWithoutComponent: function () {
+          if (this.get('hasHostsSelect')) {
+            var hostsWithComponent = App.HostComponent.find().filterProperty('componentName', componentName).mapProperty('hostName'),
+              result = App.get('allHostNames');
+            hostsWithComponent.forEach(function (host) {
+              result = result.without(host);
+            });
+            return result;
+          } else {
+            return [];
+          }
+        }.property('hasHostsSelect'),
+        anyHostsWithoutComponent: Em.computed.or('!hasHostsSelect', 'hostsWithoutComponent.length'),
+        selectedHostObserver: function () {
+          hostName = this.get('selectedHost');
+          self.clearConfigsChanges(true);
+          if (!self.get('content')) {
+            self.set('content', {});
+          }
+          self.setProperties({
+            'isReconfigureRequired': !!componentsMapItem,
+            'content.hostName': hostName
+          });
+          if (componentsMapItem) {
+            var configs = self.get('configs'),
+              params = configs && configs.params || {};
+            if (componentsMapItem.hostPropertyName) {
+              self.set(componentsMapItem.hostPropertyName, hostName);
+            }
+            if (componentsMapItem.addPropertyName) {
+              self.set(componentsMapItem.addPropertyName, true);
+            }
+            if (configs) {
+              this.set('isConfigsLoadingInProgress', true);
+              self[configsCallbackName](configs, null, params);
+            } else {
+              self.loadComponentRelatedConfigs(configTagsCallbackName, configsCallbackName);
+            }
+          }
+        }.observes('selectedHost')
       }),
       disablePrimary: Em.computed.and('controller.isReconfigureRequired', 'controller.isConfigsLoadingInProgress'),
       onPrimary: function () {
@@ -803,6 +893,16 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     });
   },
 
+  loadComponentRelatedConfigs: function (configTagsCallbackName, configsCallbackName) {
+    var self = this;
+    if (this.get('isReconfigureRequired')) {
+      this.set('isConfigsLoadingInProgress', true);
+      this.isServiceMetricsLoaded(function () {
+        self.loadConfigs(configTagsCallbackName, configsCallbackName);
+      });
+    }
+  },
+
   /**
    * Success callback for install host component request (sent in <code>addNewComponentSuccessCallback</code>)
    * @param {object} data
@@ -870,16 +970,18 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   /**
    * Success callback for Storm load configs request
    * @param {object} data
+   * @param {object} opt
+   * @param {object} params
    * @method loadStormConfigs
    */
-  loadStormConfigs: function (data) {
+  loadStormConfigs: function (data, opt, params) {
     var request = App.ajax.send({
       name: 'admin.get.all_configurations',
       sender: this,
       data: {
         urlParams: '(type=storm-site&tag=' + data.Clusters.desired_configs['storm-site'].tag + ')'
       },
-      success: 'onLoadStormConfigs'
+      success: params.callback
     });
     this.trackRequest(request);
   },
@@ -975,6 +1077,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       attributes = {},
       propertiesToChange = this.get('allPropertiesToChange');
 
+    this.saveLoadedConfigs(data);
     data.items.forEach(function (item) {
       configs[item.type] = item.properties;
       attributes[item.type] = item.properties_attributes || {};
@@ -1015,9 +1118,11 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   /**
    * Success callback for load configs request
    * @param {object} data
+   * @param {object} opt
+   * @param {object} params
    * @method loadWebHCatConfigs
    */
-  loadWebHCatConfigs: function (data) {
+  loadWebHCatConfigs: function (data, opt, params) {
     var request = App.ajax.send({
       name: 'admin.get.all_configurations',
       sender: this,
@@ -1030,7 +1135,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
           '(type=core-site&tag=' + data.Clusters.desired_configs['core-site'].tag + ')'
         ].join('|')
       },
-      success: 'onLoadHiveConfigs'
+      success: params.callback
     });
     this.trackRequest(request);
     return request;
@@ -1043,7 +1148,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
    * @param {object} params
    * @method loadHiveConfigs
    */
-  loadHiveConfigs: function (data) {
+  loadHiveConfigs: function (data, opt, params) {
     var request = App.ajax.send({
       name: 'admin.get.all_configurations',
       sender: this,
@@ -1055,7 +1160,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
           '(type=core-site&tag=' + data.Clusters.desired_configs['core-site'].tag + ')'
         ].join('|')
       },
-      success: 'onLoadHiveConfigs'
+      success: params.callback
     });
     this.trackRequest(request);
     return request;
@@ -1082,6 +1187,10 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       hiveMetastorePort: ""
     };
     var initializer = params.webHCat ? App.AddWebHCatComponentsInitializer : App.AddHiveComponentsInitializer;
+    this.saveLoadedConfigs(data);
+    this.set('configs.params', {
+      webHCat: params.webHCat
+    });
     data.items.forEach(function (item) {
       configs[item.type] = item.properties;
       attributes[item.type] = item.properties_attributes || {};
@@ -1275,16 +1384,18 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   /**
    * Success callback for load configs request
    * @param {object} data
+   * @param {object} opt
+   * @param {object} params
    * @method loadRangerConfigs
    */
-  loadRangerConfigs: function (data) {
+  loadRangerConfigs: function (data, opt, params) {
     var request = App.ajax.send({
       name: 'admin.get.all_configurations',
       sender: this,
       data: {
         urlParams: '(type=core-site&tag=' + data.Clusters.desired_configs['core-site'].tag + ')|(type=hdfs-site&tag=' + data.Clusters.desired_configs['hdfs-site'].tag + ')|(type=kms-env&tag=' + data.Clusters.desired_configs['kms-env'].tag + ')'
       },
-      success: 'onLoadRangerConfigs'
+      success: params.callback
     });
     this.trackRequest(request);
   },
@@ -1325,6 +1436,8 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
       ],
       propertiesToChange = this.get('allPropertiesToChange');
 
+    this.saveLoadedConfigs(data);
+
     properties.forEach(function (property) {
       var typeConfigs = data.items.findProperty('type', property.type).properties,
         currentValue = typeConfigs[property.name],
@@ -1453,11 +1566,14 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
    * This is required to make sure that service metrics API determining the HA state of components is loaded
    * @method loadConfigs
    */
-  loadConfigs: function (callback) {
+  loadConfigs: function (configTagsCallback, configsCallback) {
     var request = App.ajax.send({
       name: 'config.tags',
       sender: this,
-      success: callback ? callback : 'loadConfigsSuccessCallback',
+      data: {
+        callback: configsCallback || 'saveZkConfigs'
+      },
+      success: configTagsCallback || 'loadConfigsSuccessCallback',
       error: 'onLoadConfigsErrorCallback'
     });
     this.trackRequest(request);
@@ -1474,9 +1590,11 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   /**
    * Success callback for load configs request
    * @param {object} data
+   * @param {object} opt
+   * @param {object} params
    * @method loadConfigsSuccessCallback
    */
-  loadConfigsSuccessCallback: function (data) {
+  loadConfigsSuccessCallback: function (data, opt, params) {
     var urlParams = this.constructConfigUrlParams(data);
     if (urlParams.length > 0) {
       var request = App.ajax.send({
@@ -1485,7 +1603,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
         data: {
           urlParams: urlParams.join('|')
         },
-        success: 'saveZkConfigs'
+        success: params.callback || 'saveZkConfigs'
       });
       this.trackRequest(request);
       return true;
@@ -1524,6 +1642,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   saveZkConfigs: function (data) {
     var configs = {};
     var attributes = {};
+    this.saveLoadedConfigs(data);
     data.items.forEach(function (item) {
       configs[item.type] = item.properties;
       attributes[item.type] = item.properties_attributes || {};
@@ -2524,7 +2643,6 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     });
   },
   deleteHostSuccessCallback: function (data, rq, requestBody) {
-    var self = this;
     App.router.get('updateController').updateHost(function () {
       App.router.transitionTo('hosts.index');
     });
@@ -2535,7 +2653,7 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
   deleteHostErrorCallback: function (xhr, textStatus, errorThrown, opt) {
     xhr.responseText = "{\"message\": \"" + xhr.statusText + "\"}";
     var self = this;
-    var callback =   function () {
+    var callback = function () {
       self.loadConfigs();
     };
     self.isServiceMetricsLoaded(callback);

http://git-wip-us.apache.org/repos/asf/ambari/blob/3871f4a8/ambari-web/app/controllers/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/item.js b/ambari-web/app/controllers/main/service/item.js
index 3a60137..9758ac3 100644
--- a/ambari-web/app/controllers/main/service/item.js
+++ b/ambari-web/app/controllers/main/service/item.js
@@ -937,63 +937,15 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
    * @param componentName
    */
   addComponent: function (componentName) {
-    var self = this;
     var component = App.StackServiceComponent.find().findProperty('componentName', componentName);
-    var componentDisplayName = component.get('displayName');
 
     App.get('router.mainAdminKerberosController').getKDCSessionState(function () {
-      return App.ModalPopup.show({
-        primary: Em.computed.ifThenElse('anyHostsWithoutComponent', Em.I18n.t('hosts.host.addComponent.popup.confirm'), undefined),
-
-        header: Em.I18n.t('popup.confirmation.commonHeader'),
-
-        addComponentMsg: Em.I18n.t('hosts.host.addComponent.msg').format(componentDisplayName),
-
-        selectHostMsg: Em.computed.i18nFormat('services.summary.selectHostForComponent', 'componentDisplayName'),
-
-        thereIsNoHostsMsg: Em.computed.i18nFormat('services.summary.allHostsAlreadyRunComponent', 'componentDisplayName'),
-
-        hostsWithoutComponent: function () {
-          var hostsWithComponent = App.HostComponent.find().filterProperty('componentName', componentName).mapProperty('hostName');
-          var result = App.get('allHostNames');
-
-          hostsWithComponent.forEach(function (host) {
-            result = result.without(host);
-          });
-
-          return result;
-        }.property(),
-
-        anyHostsWithoutComponent: Em.computed.gt('hostsWithoutComponent.length', 0),
-
-        selectedHost: null,
-
-        componentName: componentName,
-
-        componentDisplayName: componentDisplayName,
-
-        bodyClass: Em.View.extend({
-          templateName: require('templates/main/service/add_host_popup')
-        }),
-
-        onPrimary: function () {
-          var selectedHost = this.get('selectedHost');
-
-          // Install
-          if (['HIVE_METASTORE', 'RANGER_KMS_SERVER', 'NIMBUS'].contains(component.get('componentName')) && !!selectedHost) {
-            App.router.get('mainHostDetailsController').addComponentWithCheck(
-                {
-                  context: component,
-                  selectedHost: selectedHost
-                }
-            );
-          } else {
-            self.installHostComponentCall(selectedHost, component);
-          }
-
-          this.hide();
+      App.router.get('mainHostDetailsController').addComponentWithCheck(
+        {
+          context: component,
+          selectedHost: null
         }
-      });
+      );
     });
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3871f4a8/ambari-web/app/templates/main/host/details/addDeleteComponentPopup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/details/addDeleteComponentPopup.hbs b/ambari-web/app/templates/main/host/details/addDeleteComponentPopup.hbs
index ed8445e..2aa5e71 100644
--- a/ambari-web/app/templates/main/host/details/addDeleteComponentPopup.hbs
+++ b/ambari-web/app/templates/main/host/details/addDeleteComponentPopup.hbs
@@ -24,17 +24,29 @@
     </div>
   </div>
 {{/if}}
-{{#if controller.isReconfigureRequired}}
-  {{#if controller.isConfigsLoadingInProgress}}
-    {{view App.SpinnerView}}
-  {{else}}
-    {{view.commonMessage}}
-    {{#if controller.hasPropertiesToChange}}
-      {{view App.DependentConfigsListView isAfterRecommendation=false recommendationsBinding="controller.recommendedPropertiesToChange" requiredChangesBinding="controller.requiredPropertiesToChange"}}
+{{#if view.anyHostsWithoutComponent}}
+  {{#if view.hasHostsSelect}}
+    <div>{{view.selectHostMsg}}</div>
+    <div class="row">
+      <div class="col-md-12">
+        {{view Ember.Select contentBinding="view.hostsWithoutComponent" selectionBinding="view.selectedHost" classNames="form-control"}}
+      </div>
+    </div>
+  {{/if}}
+  {{#if controller.isReconfigureRequired}}
+    {{#if controller.isConfigsLoadingInProgress}}
+      {{view App.SpinnerView}}
+    {{else}}
+      {{view.commonMessage}}
+      {{#if controller.hasPropertiesToChange}}
+        {{view App.DependentConfigsListView isAfterRecommendation=false recommendationsBinding="controller.recommendedPropertiesToChange" requiredChangesBinding="controller.requiredPropertiesToChange"}}
+      {{/if}}
+      {{{view.manualKerberosWarning}}}
     {{/if}}
-    {{{view.manualKerberosWarning}}}
+  {{else}}
+    <div>{{view.commonMessage}}</div>
+    <div>{{{view.manualKerberosWarning}}}</div>
   {{/if}}
 {{else}}
-  <div>{{view.commonMessage}}</div>
-  <div>{{{view.manualKerberosWarning}}}</div>
-{{/if}}
+  {{view.thereIsNoHostsMsg}}
+{{/if}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3871f4a8/ambari-web/app/templates/main/service/add_host_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/add_host_popup.hbs b/ambari-web/app/templates/main/service/add_host_popup.hbs
deleted file mode 100644
index e5f2c99..0000000
--- a/ambari-web/app/templates/main/service/add_host_popup.hbs
+++ /dev/null
@@ -1,33 +0,0 @@
-{{!
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-}}
-
-{{#if anyHostsWithoutComponent}}
-  <p>{{selectHostMsg}}</p>
-  <div class="row">
-    <div class="col-md-12">
-      {{view Ember.Select contentBinding="hostsWithoutComponent" selectionBinding="selectedHost" classNames="form-control"}}
-    </div>
-  </div>
-  <div class="row">
-    <div class="col-md-12">
-      <p>{{addComponentMsg}}</p>
-    </div>
-  </div>
-{{else}}
-  <p>{{thereIsNoHostsMsg}}</p>
-{{/if}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3871f4a8/ambari-web/test/controllers/main/host/details_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/host/details_test.js b/ambari-web/test/controllers/main/host/details_test.js
index ff5887c..4eccf95 100644
--- a/ambari-web/test/controllers/main/host/details_test.js
+++ b/ambari-web/test/controllers/main/host/details_test.js
@@ -594,7 +594,7 @@ describe('App.MainHostDetailsController', function () {
             tag: 'tag'
           }
         }
-      }});
+      }}, null, {});
       var args = testHelpers.findAjaxRequest('name', 'admin.get.all_configurations');
       expect(args[0]).exists;
       expect(args[0].sender).to.be.eql(controller);
@@ -619,6 +619,7 @@ describe('App.MainHostDetailsController', function () {
       sinon.stub(controller, 'getStormNimbusHosts').returns("host1");
       sinon.stub(controller, 'updateZkConfigs', Em.K);
       sinon.stub(controller, 'saveConfigsBatch', Em.K);
+      sinon.stub(controller, 'saveLoadedConfigs', Em.K);
       controller.set('nimbusHost', 'host2');
       controller.onLoadStormConfigs(data);
     });
@@ -626,6 +627,7 @@ describe('App.MainHostDetailsController', function () {
       controller.getStormNimbusHosts.restore();
       controller.updateZkConfigs.restore();
       controller.saveConfigsBatch.restore();
+      controller.saveLoadedConfigs.restore();
     });
     it("updateZkConfigs called with valid arguments", function() {
       expect(controller.updateZkConfigs.calledWith({'storm-site': {
@@ -665,7 +667,7 @@ describe('App.MainHostDetailsController', function () {
             tag: 'tag'
           }
         }
-      }});
+      }}, null, {});
       var args = testHelpers.findAjaxRequest('name', 'admin.get.all_configurations');
       expect(args[0]).exists;
       expect(args[0].sender).to.be.eql(controller);
@@ -689,7 +691,7 @@ describe('App.MainHostDetailsController', function () {
             tag: 'tag'
           }
         }
-      }});
+      }}, null, {});
       var args = testHelpers.findAjaxRequest('name', 'admin.get.all_configurations');
       expect(args[0]).exists;
       expect(args[0].sender).to.be.eql(controller);
@@ -952,7 +954,7 @@ describe('App.MainHostDetailsController', function () {
     });
 
     it('url params is empty', function () {
-      expect(controller.loadConfigsSuccessCallback()).to.be.false;
+      expect(controller.loadConfigsSuccessCallback(null, null, {})).to.be.false;
       var args = testHelpers.findAjaxRequest('name', 'reassign.load_configs');
       expect(args).not.exists;
     });
@@ -1018,6 +1020,7 @@ describe('App.MainHostDetailsController', function () {
     beforeEach(function () {
       sinon.stub(controller, 'saveConfigsBatch', Em.K);
       sinon.stub(controller, 'updateZkConfigs', Em.K);
+      sinon.stub(controller, 'saveLoadedConfigs', Em.K);
       sinon.stub(App.Service, 'find', function() {
         return [
           Em.Object.create({ serviceName: 'HIVE' }),
@@ -1034,6 +1037,7 @@ describe('App.MainHostDetailsController', function () {
       App.Service.find.restore();
       controller.updateZkConfigs.restore();
       controller.saveConfigsBatch.restore();
+      controller.saveLoadedConfigs.restore();
     });
 
       it('configs for YARN', function () {
@@ -3270,11 +3274,13 @@ describe('App.MainHostDetailsController', function () {
     ];
 
     beforeEach(function () {
-      sinon.spy(controller, 'saveConfigsBatch')
+      sinon.spy(controller, 'saveConfigsBatch');
+      sinon.stub(controller, 'saveLoadedConfigs', Em.K);
     });
 
     afterEach(function () {
       controller.saveConfigsBatch.restore();
+      controller.saveLoadedConfigs.restore();
     });
 
     cases.forEach(function (item) {
@@ -3518,10 +3524,13 @@ describe('App.MainHostDetailsController', function () {
 
     beforeEach(function() {
       sinon.stub(controller, 'saveConfigsBatch', Em.K);
+      sinon.stub(controller, 'saveLoadedConfigs', Em.K);
+      controller.set('configs', {});
     });
 
     afterEach(function() {
       controller.saveConfigsBatch.restore();
+      controller.saveLoadedConfigs.restore();
     });
 
     var makeHostComponentModel = function(componentName, hostNames) {
@@ -3960,29 +3969,134 @@ describe('App.MainHostDetailsController', function () {
         recommendedPropertiesToChange: [{}],
         requiredPropertiesToChange: [{}],
         groupedPropertiesToChange: [{}],
-        isReconfigureRequired: true
+        isReconfigureRequired: true,
+        configs: {}
       });
-      controller.clearConfigsChanges();
     });
 
     afterEach(function () {
       controller.abortRequests.restore();
     });
 
-    it('allPropertiesToChange', function () {
-      expect(controller.get('allPropertiesToChange')).to.have.length(0);
+    describe('default case', function () {
+
+      beforeEach(function () {
+        controller.clearConfigsChanges();
+      });
+
+      it('allPropertiesToChange', function () {
+        expect(controller.get('allPropertiesToChange')).to.have.length(0);
+      });
+
+      it('recommendedPropertiesToChange', function () {
+        expect(controller.get('recommendedPropertiesToChange')).to.have.length(0);
+      });
+
+      it('groupedPropertiesToChange', function () {
+        expect(controller.get('groupedPropertiesToChange')).to.have.length(0);
+      });
+
+      it('isReconfigureRequired', function () {
+        expect(controller.get('isReconfigureRequired')).to.be.false;
+      });
+
+      it('configs', function () {
+        expect(controller.get('configs')).to.be.null;
+      });
+
     });
 
-    it('recommendedPropertiesToChange', function () {
-      expect(controller.get('recommendedPropertiesToChange')).to.have.length(0);
+    describe('no loaded configs cleanup', function () {
+
+      beforeEach(function () {
+        controller.clearConfigsChanges(true);
+      });
+
+      it('configs shouldn\'t be cleared', function () {
+        expect(controller.get('configs')).to.not.be.null;
+      });
+
     });
 
-    it('groupedPropertiesToChange', function () {
-      expect(controller.get('groupedPropertiesToChange')).to.have.length(0);
+  });
+
+  describe('#saveLoadedConfigs', function () {
+
+    var data = {
+      items: [
+        {
+          type: 't0',
+          properties: {
+            p0: 'v0',
+            p1: 'v1'
+          },
+          properties_attributes: {}
+        },
+        {
+          type: 't1',
+          properties: {
+            p2: 'v2',
+            p3: 'v3'
+          },
+          properties_attributes: {}
+        }
+      ]
+    };
+
+    it('should store data in configs object', function () {
+      controller.set('configs', null);
+      controller.saveLoadedConfigs(data);
+      expect(controller.get('configs')).to.eql(data);
     });
 
-    it('isReconfigureRequired', function () {
-      expect(controller.get('isReconfigureRequired')).to.be.false;
+  });
+
+  describe('#loadComponentRelatedConfigs', function () {
+
+    var testCases = [
+      {
+        isReconfigureRequired: true,
+        loadConfigsCallCount: 1,
+        isConfigsLoadingInProgress: true,
+        message: 'reconfigure required'
+      },
+      {
+        isReconfigureRequired: false,
+        loadConfigsCallCount: 0,
+        isConfigsLoadingInProgress: false,
+        message: 'no reconfigure required'
+      }
+    ];
+
+    testCases.forEach(function (test) {
+
+      describe(test.message, function () {
+
+        beforeEach(function () {
+          sinon.stub(controller, 'isServiceMetricsLoaded', Em.clb);
+          sinon.stub(controller, 'loadConfigs', Em.K);
+          controller.setProperties({
+            isReconfigureRequired: test.isReconfigureRequired,
+            isConfigsLoadingInProgress: false
+          });
+          controller.loadComponentRelatedConfigs();
+        });
+
+        afterEach(function () {
+          controller.isServiceMetricsLoaded.restore();
+          controller.loadConfigs.restore();
+        });
+
+        it('loadConfigs', function () {
+          expect(controller.loadConfigs.callCount).to.equal(test.loadConfigsCallCount);
+        });
+
+        it('isConfigsLoadingInProgress', function () {
+          expect(controller.get('isConfigsLoadingInProgress')).to.equal(test.isConfigsLoadingInProgress);
+        });
+
+      });
+
     });
 
   });


[29/50] [abbrv] ambari git commit: AMBARI-19592 : Create grafana dashboards for Druid Metrics and configure druid to send metrics to AMS. (Nishant Bangarwa via avijayan)

Posted by nc...@apache.org.
AMBARI-19592 : Create grafana dashboards for Druid Metrics and configure druid to send metrics to AMS. (Nishant Bangarwa via avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ad0f4ecc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ad0f4ecc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ad0f4ecc

Branch: refs/heads/branch-dev-patch-upgrade
Commit: ad0f4ecc67e39d109449e945b5108440059b4240
Parents: d0dc19e
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Jan 24 10:52:56 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Jan 24 10:52:56 2017 -0800

----------------------------------------------------------------------
 .../ambari-metrics/datasource.js                |  97 ++
 .../HDP/grafana-druid-home.json                 | 995 +++++++++++++++++++
 .../HDP/grafana-druid-ingestion.json            | 776 +++++++++++++++
 .../HDP/grafana-druid-query.json                | 858 ++++++++++++++++
 .../DRUID/0.9.2/configuration/druid-common.xml  |  57 ++
 .../DRUID/0.9.2/package/scripts/params.py       |  36 +
 .../stacks/HDP/2.6/services/stack_advisor.py    |   3 +
 7 files changed, 2822 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ad0f4ecc/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js b/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
index a1e6164..3a0fb66 100644
--- a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
+++ b/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
@@ -136,6 +136,12 @@ define([
             if(!_.isEmpty(templateSrv.variables[1]) && templateSrv.variables[1].name === "component") {
               alias = alias + ' on ' + target.sTopology + ' for ' + target.sComponent;
             }
+
+            // Aliases for Druid Datasources.
+            if(!_.isEmpty(templateSrv.variables) && templateSrv.variables[0].query === "druidDataSources" &&
+                        !templateSrv.variables[1]) {
+              alias = alias.replace('$druidDataSource', target.sDataSource);
+            }
             return function (res) {
               console.log('processing metric ' + target.metric);
               if (!res.metrics[0] || target.hide) {
@@ -320,6 +326,19 @@ define([
             );
           };
 
+          // Druid calls.
+          var getDruidData = function(target) {
+            var precision = target.precision === 'default' || typeof target.precision == 'undefined'  ? '' : '&precision='
+            + target.precision;
+            var metricAggregator = target.aggregator === "none" ? '' : '._' + target.aggregator;
+            var metricTransform = !target.transform || target.transform === "none" ? '' : '._' + target.transform;
+            var seriesAggregator = !target.seriesAggregator || target.seriesAggregator === "none" ? '' : '&seriesAggregateFunction=' + target.seriesAggregator;
+            return backendSrv.get(self.url + '/ws/v1/timeline/metrics?metricNames=' + target.sDataSourceMetric + metricTransform
+                          + metricAggregator + '&appId=druid&startTime=' + from + '&endTime=' + to + precision + seriesAggregator).then(
+                          allHostMetricsData(target)
+            );
+          };
+
           // Time Ranges
           var from = Math.floor(options.range.from.valueOf() / 1000);
           var to = Math.floor(options.range.to.valueOf() / 1000);
@@ -469,6 +488,23 @@ define([
               }));
             }
 
+            //Templatized Dashboards for Druid
+            if (templateSrv.variables[0].query === "druidDataSources" && !templateSrv.variables[1]) {
+              var allDataSources = templateSrv.variables.filter(function(variable) { return variable.query === "druidDataSources";});
+              var selectedDataSources = (_.isEmpty(allDataSources)) ? "" : allDataSources[0].options.filter(function(dataSource)
+                            { return dataSource.selected; }).map(function(dataSourceName) { return dataSourceName.value; });
+               selectedDataSources = templateSrv._values.druidDataSources.lastIndexOf('}') > 0 ? templateSrv._values.druidDataSources.slice(1,-1) :
+                                              templateSrv._values.druidDataSources;
+              var selectedDataSource = selectedDataSources.split(',');
+              _.forEach(selectedDataSource, function(processDataSource) {
+                metricsPromises.push(_.map(options.targets, function(target) {
+                  target.sDataSource = processDataSource;
+                  target.sDataSourceMetric = target.metric.replace('*', target.sDataSource);
+                  return getDruidData(target);
+                }));
+              });
+            }
+
             // To speed up querying on templatized dashboards.
             if (templateSrv.variables[1] && templateSrv.variables[1].name === "hosts") {
               var allHosts = templateSrv._values.hosts.lastIndexOf('}') > 0 ? templateSrv._values.hosts.slice(1,-1) :
@@ -737,6 +773,67 @@ define([
                 });
               });
           }
+
+          // Templated Variable for DruidServices.
+          // It will search the cluster and populate the druid service names.
+          if(interpolated === "druidServices") {
+            return this.initMetricAppidMapping()
+              .then(function () {
+                var druidMetrics = allMetrics["druid"];
+                // Assumption: each node always emits jvm metrics
+                var extractNodeTypes = druidMetrics.filter(/./.test.bind(new RegExp("jvm/gc/time", 'g')));
+                var nodeTypes = _.map(extractNodeTypes, function(metricName) {
+                  return metricName.substring(0, metricName.indexOf("."));
+                });
+                nodeTypes = _.sortBy(_.uniq(nodeTypes));
+                return _.map(nodeTypes, function (nodeType) {
+                  return {
+                    text: nodeType
+                  };
+                });
+              });
+          }
+
+          // Templated Variable for Druid datasources.
+          // It will search the cluster and populate the druid datasources.
+          if(interpolated === "druidDataSources") {
+            return this.initMetricAppidMapping()
+              .then(function () {
+                var druidMetrics = allMetrics["druid"];
+                // Assumption: query/time is emitted for each datasource
+                var extractDataSources = druidMetrics.filter(/./.test.bind(new RegExp("query/time", 'g')));
+                var dataSources = _.map(extractDataSources, function(metricName) {
+                  return metricName.split('.')[1]
+                });
+                dataSources = _.sortBy(_.uniq(dataSources));
+                return _.map(dataSources, function (dataSource) {
+                  return {
+                    text: dataSource
+                  };
+                });
+              });
+          }
+
+          // Templated Variable for Druid query type.
+          // It will search the cluster and populate the druid query types.
+          if(interpolated === "druidQueryTypes") {
+            return this.initMetricAppidMapping()
+              .then(function () {
+                var druidMetrics = allMetrics["druid"];
+                // Assumption: query/time is emitted for each query type.
+                var extractQueryTypes = druidMetrics.filter(/./.test.bind(new RegExp("query/time", 'g')));
+                var queryTypes = _.map(extractQueryTypes, function(metricName) {
+                  return metricName.split('.')[2]
+                });
+                queryTypes = _.sortBy(_.uniq(queryTypes));
+                return _.map(queryTypes, function (queryType) {
+                  return {
+                    text: queryType
+                  };
+                });
+              });
+          }
+
           // Templated Variable that will populate all hosts on the cluster.
           // The variable needs to be set to "hosts".
           if (!tComponent){

http://git-wip-us.apache.org/repos/asf/ambari/blob/ad0f4ecc/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-home.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-home.json
new file mode 100644
index 0000000..b0ce06e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-home.json
@@ -0,0 +1,995 @@
+
+{
+  "id": null,
+  "title": "Druid - Home",
+  "originalTitle": "Druid - Home",
+  "tags": ["druid"],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Metrics to see the overall status for the Druid cluster. Click on each row title to expand on demand to look at various metrics. </h4>\n<h6 style=\"color:red;\" align=\"center\">This dashboard is managed by Ambari.  You may lose any changes made to this dashboard.  If you want to customize, make your own copy.</h6>",
+          "editable": true,
+          "error": false,
+          "id": 3,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "showTitle": false,
+      "title": "New row"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 1,
+          "isNew": true,
+          "leftYAxisLabel": "Memory",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Heap Used",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/broker.heap.jvm/mem/used",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "Heap Max",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/broker.heap.jvm/mem/max",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM Heap",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "bytes"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 5,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Jvm GC Time",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/broker.jvm/gc/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM GC Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "ms"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Druid Broker"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 4,
+          "isNew": true,
+          "leftYAxisLabel": "Memory",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Heap Used",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/historical.heap.jvm/mem/used",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "Heap Max",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/historical.heap.jvm/mem/max",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM Heap",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "bytes"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 2,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Jvm GC Time",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/historical.jvm/gc/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM GC Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "ms"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Druid Historical"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 6,
+          "isNew": true,
+          "leftYAxisLabel": "Memory",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Heap Used",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/coordinator.heap.jvm/mem/used",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "Heap Max",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/coordinator.heap.jvm/mem/max",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM Heap",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "bytes"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 7,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Jvm GC Time",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/coordinator.jvm/gc/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM GC Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "ms"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Druid Coordinator"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 8,
+          "isNew": true,
+          "leftYAxisLabel": "Memory",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Heap Used",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/overlord.heap.jvm/mem/used",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "Heap Max",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/overlord.heap.jvm/mem/max",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM Heap",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "bytes"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 9,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Jvm GC Time",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/overlord.jvm/gc/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM GC Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "ms"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Druid Overlord"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 10,
+          "isNew": true,
+          "leftYAxisLabel": "Memory",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Heap Used",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.heap.jvm/mem/used",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            },
+            {
+              "aggregator": "avg",
+              "alias": "Heap Max",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.heap.jvm/mem/max",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM Heap",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "bytes"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 11,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Jvm GC Time",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.jvm/gc/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "JVM GC Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "ms"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Druid Middlemanager"
+    }
+  ],
+  "time": {
+    "from": "now-6h",
+    "to": "now"
+  },
+  "timepicker": {
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": []
+  },
+  "annotations": {
+    "list": []
+  },
+  "schemaVersion": 8,
+  "version": 1,
+  "links": [
+    {
+      "asDropdown": true,
+      "icon": "external link",
+      "keepTime": true,
+      "tags": [
+        "druid"
+      ],
+      "title": "Druid Dashboards",
+      "type": "dashboards"
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ad0f4ecc/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-ingestion.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-ingestion.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-ingestion.json
new file mode 100644
index 0000000..a023065
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-ingestion.json
@@ -0,0 +1,776 @@
+{
+  "id": null,
+  "title": "Druid - Ingestion",
+  "originalTitle": "Druid - Ingestion",
+  "tags": [],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Metrics to see the overall status for the Druid cluster. Click on each row title to expand on demand to look at various metrics. </h4>\n<h6 style=\"color:red;\" align=\"center\">This dashboard is managed by Ambari.  You may lose any changes made to this dashboard.  If you want to customize, make your own copy.</h6>",
+          "editable": true,
+          "error": false,
+          "id": 3,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "showTitle": false,
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 1,
+          "isNew": true,
+          "leftYAxisLabel": "Event Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.ingest/events/processed",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Ingested Events",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 12,
+          "isNew": true,
+          "leftYAxisLabel": "Event Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.ingest/events/thrownAway",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "sum",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Events Thrown Away",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 13,
+          "isNew": true,
+          "leftYAxisLabel": "Event Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.ingest/events/unparseable",
+              "precision": "default",
+              "refId": "B",
+              "seriesAggregator": "sum",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Unparseable Events",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "none"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Ingestion Metrics"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 14,
+          "isNew": true,
+          "leftYAxisLabel": "Row Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.ingest/rows/output",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Persisted Rows",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 15,
+          "isNew": true,
+          "leftYAxisLabel": "Row Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.ingest/persists/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Average Persist Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 16,
+          "isNew": true,
+          "leftYAxisLabel": "Row Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.ingest/persists/count",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "none",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Intermediate Persist Count",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "none"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Intermediate Persists Metrics"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 17,
+          "isNew": true,
+          "leftYAxisLabel": "Size",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/overlord.*.segment/added/bytes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Avg Segment Size",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 18,
+          "isNew": true,
+          "leftYAxisLabel": "Size",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "sum",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/overlord.*.segment/added/bytes",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Total Segment Size",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "bytes",
+            "none"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Segment Size Metrics"
+    }
+  ],
+  "time": {
+    "from": "now-6h",
+    "to": "now"
+  },
+  "timepicker": {
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": [
+      {
+        "allFormat": "glob",
+        "current": {
+          "tags": ["druid"],
+          "text": "All",
+          "value": ""
+        },
+        "datasource": null,
+        "includeAll": true,
+        "multi": true,
+        "multiFormat": "glob",
+        "name": "druidDataSources",
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": ""
+          }
+        ],
+        "query": "druidDataSources",
+        "refresh": true,
+        "regex": "",
+        "type": "query"
+      }
+    ]
+  },
+  "annotations": {
+    "list": []
+  },
+  "schemaVersion": 8,
+  "version": 15,
+  "links": [
+    {
+      "asDropdown": true,
+      "icon": "external link",
+      "keepTime": true,
+      "tags": [
+        "druid"
+      ],
+      "title": "Druid Dashboards",
+      "type": "dashboards"
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ad0f4ecc/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-query.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-query.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-query.json
new file mode 100644
index 0000000..2a57194
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-druid-query.json
@@ -0,0 +1,858 @@
+{
+  "id": null,
+  "title": "Druid - Query",
+  "originalTitle": "Druid - Query",
+  "tags": ["druid"],
+  "style": "dark",
+  "timezone": "browser",
+  "editable": true,
+  "hideControls": false,
+  "sharedCrosshair": false,
+  "rows": [
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "25px",
+      "panels": [
+        {
+          "content": "<h4 align=\"center\">Metrics to see the overall Query Performance Stats for Druid Cluster Click on each row title to expand on demand to look at various metrics. </h4>\n<h6 style=\"color:red;\" align=\"center\">This dashboard is managed by Ambari.  You may lose any changes made to this dashboard.  If you want to customize, make your own copy.</h6>",
+          "editable": true,
+          "error": false,
+          "id": 3,
+          "isNew": true,
+          "links": [],
+          "mode": "html",
+          "span": 12,
+          "style": {},
+          "title": "",
+          "type": "text"
+        }
+      ],
+      "showTitle": false,
+      "title": "New row"
+    },
+    {
+      "collapse": false,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 1,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/broker.*.%.query/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Broker Query Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 19,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/historical.*.%.query/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Historical Query Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 20,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 4,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.%.query/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "templatedHost": "%",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Realtime Query Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Query Time Metrics"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "250px",
+      "panels": [
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 21,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/historical.*.%.query/segment/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Historical Segment Scan Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": null,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 22,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.%.query/segment/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "sum",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Realtime Segment Scan Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 25,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/historical.*.%.query/wait/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Historical Query Wait Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 26,
+          "isNew": true,
+          "leftYAxisLabel": "Time",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "$druidDataSource",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.*.%.query/wait/time",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Realtime Query Wait Time",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "ms",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 23,
+          "isNew": true,
+          "leftYAxisLabel": "Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Avg Pending Segment Scans",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/historical.segment/scan/pending",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Pending Historical Segment Scans",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "none"
+          ]
+        },
+        {
+          "aliasColors": {},
+          "bars": false,
+          "datasource": null,
+          "editable": true,
+          "error": false,
+          "fill": 1,
+          "grid": {
+            "leftLogBase": 1,
+            "leftMax": null,
+            "leftMin": 0,
+            "rightLogBase": 1,
+            "rightMax": null,
+            "rightMin": null,
+            "threshold1": null,
+            "threshold1Color": "rgba(216, 200, 27, 0.27)",
+            "threshold2": null,
+            "threshold2Color": "rgba(234, 112, 112, 0.22)"
+          },
+          "id": 24,
+          "isNew": true,
+          "leftYAxisLabel": "Count",
+          "legend": {
+            "alignAsTable": false,
+            "avg": false,
+            "current": false,
+            "hideEmpty": false,
+            "hideZero": false,
+            "max": false,
+            "min": false,
+            "rightSide": false,
+            "show": true,
+            "total": false,
+            "values": false
+          },
+          "lines": true,
+          "linewidth": 2,
+          "links": [],
+          "minSpan": null,
+          "nullPointMode": "connected",
+          "percentage": false,
+          "pointradius": 5,
+          "points": false,
+          "renderer": "flot",
+          "rightYAxisLabel": "",
+          "seriesOverrides": [],
+          "span": 6,
+          "stack": false,
+          "steppedLine": false,
+          "targets": [
+            {
+              "aggregator": "avg",
+              "alias": "Avg Pending Segment Scans",
+              "app": "druid",
+              "downsampleAggregator": "avg",
+              "errors": {},
+              "hosts": "",
+              "metric": "druid/middlemanager.segment/scan/pending",
+              "precision": "default",
+              "refId": "A",
+              "seriesAggregator": "avg",
+              "transform": "none"
+            }
+          ],
+          "timeFrom": null,
+          "timeShift": null,
+          "title": "Pending Realtime Segment Scans",
+          "tooltip": {
+            "shared": true,
+            "value_type": "cumulative"
+          },
+          "transparent": false,
+          "type": "graph",
+          "x-axis": true,
+          "y-axis": true,
+          "y_formats": [
+            "short",
+            "none"
+          ]
+        }
+      ],
+      "showTitle": true,
+      "title": "Segment Scan Metrics"
+    },
+    {
+      "collapse": true,
+      "editable": true,
+      "height": "250px",
+      "panels": [],
+      "showTitle": true,
+      "title": "Query Cache Performance Metrics"
+    }
+  ],
+  "time": {
+    "from": "now-6h",
+    "to": "now"
+  },
+  "timepicker": {
+    "refresh_intervals": [
+      "5s",
+      "10s",
+      "30s",
+      "1m",
+      "5m",
+      "15m",
+      "30m",
+      "1h",
+      "2h",
+      "1d"
+    ],
+    "time_options": [
+      "5m",
+      "15m",
+      "1h",
+      "6h",
+      "12h",
+      "24h",
+      "2d",
+      "7d",
+      "30d"
+    ]
+  },
+  "templating": {
+    "list": [
+      {
+        "allFormat": "glob",
+        "current": {
+          "tags": [],
+          "text": "All",
+          "value": ""
+        },
+        "datasource": null,
+        "includeAll": true,
+        "multi": true,
+        "multiFormat": "glob",
+        "name": "druidDataSources",
+        "options": [
+          {
+            "selected": true,
+            "text": "All",
+            "value": ""
+          }
+        ],
+        "query": "druidDataSources",
+        "refresh": true,
+        "regex": "",
+        "type": "query"
+      }
+    ]
+  },
+  "annotations": {
+    "list": []
+  },
+  "schemaVersion": 8,
+  "version": 1,
+  "links": [
+    {
+      "asDropdown": true,
+      "icon": "external link",
+      "keepTime": true,
+      "tags": [
+        "druid"
+      ],
+      "title": "Druid Dashboards",
+      "type": "dashboards"
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ad0f4ecc/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
index 265bf46..e00480e 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/configuration/druid-common.xml
@@ -204,4 +204,61 @@
     <on-ambari-upgrade add="false"/>
   </property>
 
+  <property>
+    <name>druid.emitter</name>
+    <value>{{metric_emitter_type}}</value>
+    <description>Emitter used to emit metrics. Values - "noop", "logging", "ambari-metrics", or "http" will initialize
+      one of the emitter modules.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.emitter.ambari-metrics.hostname</name>
+    <value>{{metric_collector_host}}</value>
+    <description>Timeline host</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.emitter.ambari-metrics.port</name>
+    <value>{{metric_collector_port}}</value>
+    <description>Timeline port</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.emitter.ambari-metrics.protocol</name>
+    <value>{{metric_collector_protocol}}</value>
+    <description>Timeline protocol(http or https)</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.emitter.ambari-metrics.trustStorePath</name>
+    <value>{{metric_truststore_path}}</value>
+    <description>Location of the trust store file.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.emitter.ambari-metrics.trustStoreType</name>
+    <value>{{metric_truststore_type}}</value>
+    <description>Optional. Default value is "jks".</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.emitter.ambari-metrics.trustStorePassword</name>
+    <value>{{metric_truststore_password}}</value>
+    <description>Password to open the trust store file.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.emitter.ambari-metrics.eventConverter</name>
+    <value>{"type":"whiteList"}</value>
+    <description>Password to open the trust store file.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>druid.monitoring.monitors</name>
+    <value>["com.metamx.metrics.JvmMonitor"]</value>
+    <description>Password to open the trust store file.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+
 </configuration>


[05/50] [abbrv] ambari git commit: AMBARI-19651 Allow user to add custom property for log4j. (atkach)

Posted by nc...@apache.org.
AMBARI-19651 Allow user to add custom property for log4j. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/09c1894f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/09c1894f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/09c1894f

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 09c1894f7544067e5ab53fedafabea030815be9e
Parents: 49ba635
Author: Andrii Tkach <at...@apache.org>
Authored: Mon Jan 23 12:35:28 2017 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Mon Jan 23 14:30:09 2017 +0200

----------------------------------------------------------------------
 .../AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml       | 2 +-
 .../ATLAS/0.7.0.2.5/configuration/atlas-log4j.xml                | 2 +-
 .../FALCON/0.5.0.2.1/configuration/falcon-log4j.xml              | 2 +-
 .../HBASE/0.96.0.2.0/configuration/hbase-log4j.xml               | 2 +-
 .../common-services/HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml  | 2 +-
 .../common-services/HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml  | 4 ++--
 .../HIVE/0.12.0.2.0/configuration/hive-exec-log4j.xml            | 2 +-
 .../common-services/HIVE/0.12.0.2.0/configuration/hive-log4j.xml | 2 +-
 .../HIVE/0.12.0.2.0/configuration/webhcat-log4j.xml              | 2 +-
 .../common-services/KAFKA/0.8.1/configuration/kafka-log4j.xml    | 2 +-
 .../KNOX/0.5.0.2.2/configuration/gateway-log4j.xml               | 2 +-
 .../common-services/KNOX/0.5.0.2.2/configuration/ldap-log4j.xml  | 2 +-
 .../OOZIE/4.0.0.2.0/configuration/oozie-log4j.xml                | 2 +-
 .../OOZIE/4.2.0.2.3/configuration/oozie-log4j.xml                | 2 +-
 .../common-services/RANGER/0.6.0/configuration/admin-log4j.xml   | 2 +-
 .../common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml | 2 +-
 .../RANGER/0.6.0/configuration/usersync-log4j.xml                | 2 +-
 .../RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml             | 2 +-
 .../STORM/0.10.0/configuration/storm-cluster-log4j.xml           | 2 +-
 .../STORM/0.10.0/configuration/storm-worker-log4j.xml            | 2 +-
 .../STORM/1.0.1/configuration/storm-cluster-log4j.xml            | 2 +-
 .../STORM/1.0.1/configuration/storm-worker-log4j.xml             | 2 +-
 .../common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml  | 2 +-
 .../common-services/YARN/3.0.0.3.0/configuration/yarn-log4j.xml  | 2 +-
 .../ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml            | 2 +-
 .../stacks/HDP/2.2/services/HDFS/configuration/hdfs-log4j.xml    | 2 +-
 .../stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml    | 2 +-
 .../HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml     | 2 +-
 .../stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml   | 2 +-
 .../HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml      | 2 +-
 .../HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml    | 2 +-
 .../stacks/HDP/2.6/services/ATLAS/configuration/atlas-log4j.xml  | 2 +-
 32 files changed, 33 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
index 5a97804..f5a2640 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
    <property>
     <name>ams_hbase_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/configuration/atlas-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/configuration/atlas-log4j.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/configuration/atlas-log4j.xml
index 05c8f0a..e5e4d23 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/configuration/atlas-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/configuration/atlas-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
   <property>
     <name>atlas_log_level</name>
     <value>info</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-log4j.xml b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-log4j.xml
index 2662b51..b2b1fe1 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
   <!-- log4j.xml -->
   <property>
     <name>falcon_log_maxfilesize</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-log4j.xml b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-log4j.xml
index 2177705..0fa33ba 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
  <property>
     <name>hbase_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml
index 3f73c19..b1db232 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
    <property>
     <name>hadoop_security_log_max_backup_size</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml
index 37b339e..35554e6 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <!-- These configs were inherited from HDP 2.2 -->
   <property>
     <name>content</name>
@@ -223,4 +223,4 @@
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
-</configuration>
\ No newline at end of file
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-exec-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-exec-log4j.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-exec-log4j.xml
index d90a679..61d0b85 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-exec-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-exec-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>content</name>
     <display-name>hive-exec-log4j template</display-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-log4j.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-log4j.xml
index 3473b8c..9bf70fc 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
         <name>hive_log_maxfilesize</name>
         <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/webhcat-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/webhcat-log4j.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/webhcat-log4j.xml
index 4ec22770..4751085 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/webhcat-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/webhcat-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
      <name>webhcat_log_maxfilesize</name>
      <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-log4j.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-log4j.xml
index f6045cd..0eed025 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
    <property>
     <name>kafka_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/gateway-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/gateway-log4j.xml b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/gateway-log4j.xml
index 613d75f..ccefa54 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/gateway-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/gateway-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
    <property>
     <name>knox_gateway_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/ldap-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/ldap-log4j.xml b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/ldap-log4j.xml
index e078d9d..11595d5 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/ldap-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/ldap-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
    <property>
     <name>knox_ldap_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-log4j.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-log4j.xml
index ff1e388..42a2604 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>oozie_log_maxhistory</name>
     <value>720</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-log4j.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-log4j.xml
index 123d570..c8f9cfd 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.2.3/configuration/oozie-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
    <property>
     <name>oozie_log_maxhistory</name>
     <value>720</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
index fdffcdf..6108c36 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/admin-log4j.xml
@@ -18,7 +18,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
   <property>
     <name>ranger_xa_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
index 141c8c9..6384302 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/tagsync-log4j.xml
@@ -18,7 +18,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
   <property>
     <name>ranger_tagsync_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
index 4cbc34e..8843a2a 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/configuration/usersync-log4j.xml
@@ -18,7 +18,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
   <property>
     <name>ranger_usersync_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
index 3a63f38..bac2e84 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-log4j.xml
@@ -18,7 +18,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
    <property>
     <name>ranger_kms_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-cluster-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-cluster-log4j.xml b/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-cluster-log4j.xml
index 92d6c38..9de0017 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-cluster-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-cluster-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
       <name>storm_a1_maxfilesize</name>
       <value>100</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-worker-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-worker-log4j.xml b/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-worker-log4j.xml
index 45bf289..383c972 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-worker-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/0.10.0/configuration/storm-worker-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>storm_wrkr_a1_maxfilesize</name>
     <value>100</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-cluster-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-cluster-log4j.xml b/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-cluster-log4j.xml
index 1cb17f0..4ba4bd6 100644
--- a/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-cluster-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-cluster-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>storm_a1_maxfilesize</name>
     <value>100</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-worker-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-worker-log4j.xml b/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-worker-log4j.xml
index c3de7cd..180208c 100644
--- a/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-worker-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/STORM/1.0.1/configuration/storm-worker-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>storm_wrkr_a1_maxfilesize</name>
     <value>100</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
index 31f0ff3..41a3c0e 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>yarn_rm_summary_log_max_backup_size</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-log4j.xml b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-log4j.xml
index 1d828ee..8d205f8 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>content</name>
     <display-name>yarn-log4j template</display-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
index 151ce3b..75dba02 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>zookeeper_log_max_backup_size</name>
     <value>10</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-log4j.xml
index d052621..280c43a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hdfs-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
     <property>
     <name>hadoop_security_log_max_backup_size</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
index 34e97d4..7816591 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>yarn_rm_summary_log_max_backup_size</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
index 03e8dae..3b935f8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-exec-log4j2.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>content</name>
     <display-name>hive-exec-log4j2 template</display-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
index aac3216..abf307a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>content</name>
     <display-name>hive-log4j2 template</display-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
index 25315b4..67f5830 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>content</name>
     <display-name>llap-cli-log4j2 template</display-name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
index 4790f0f..9fe45b0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-daemon-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_final="false" supports_adding_forbidden="true">
+<configuration supports_final="false" supports_adding_forbidden="false">
   <property>
     <name>hive_llap_log_maxfilesize</name>
     <value>256</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/09c1894f/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/atlas-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/atlas-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/atlas-log4j.xml
index ec93b63..95bc0ea 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/atlas-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ATLAS/configuration/atlas-log4j.xml
@@ -19,7 +19,7 @@
  * limitations under the License.
  */
 -->
-<configuration supports_adding_forbidden="true">
+<configuration supports_adding_forbidden="false">
    <property>
     <name>atlas_log_max_backup_size</name>
     <value>256</value>


[38/50] [abbrv] ambari git commit: AMBARI-19671. Hive View 2.0: Incorrect behaviour on select of database (Abhishek Kumar via pallavkul)

Posted by nc...@apache.org.
AMBARI-19671. Hive View 2.0: Incorrect behaviour on select of database (Abhishek Kumar via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/72e02ab0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/72e02ab0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/72e02ab0

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 72e02ab089b07246b2c79eb5e94825f1c19ad3cf
Parents: bb8b44c
Author: pallavkul <pa...@gmail.com>
Authored: Wed Jan 25 09:09:47 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Jan 25 09:09:47 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/templates/queries/query.hbs | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/72e02ab0/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 950e4f3..9e9e542 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -84,21 +84,21 @@
 <div class="col-md-3 database-panel">
   <div class="database-container">
     <div class="row">
-    <div class="panel-group database-panel" id="accordion" role="tablist" aria-multiselectable="true">
+    <div class="panel-group database-panel" id="db_accordion" role="tablist" aria-multiselectable="true">
       {{#each selectedTablesModels as |tableModel|}}
         <div class="panel panel-default">
-          <div class="panel-heading" role="tab">
+          <div class="panel-heading" role="tab" id={{concat 'db_heading_' tableModel.dbname}}>
             <h4 class="panel-title">
-              <a role="button" data-toggle="collapse" data-parent="#accordion"
+              <a role="button" data-i-toggle="collapse" data-i-parent="#db_accordion"
                  href="javascript:void(0)" {{action 'showTables' tableModel.dbname }} aria-expanded="true"
-                 aria-controls={{tableModel.dbname}}>
-                {{ tableModel.dbname }} {{#if tableModel.isSelected}} {{fa-icon "check"}}  {{/if}}
+                 aria-controls={{concat 'db_body_' tableModel.dbname}}>
+                {{ tableModel.dbname }} {{#if (eq tableModel.dbname worksheet.selectedDb)}} {{fa-icon "check"}}  {{/if}}
               </a>
               <small class="pull-right">Tables({{tableModel.tables.length}})</small>
             </h4>
           </div>
-          <div id={{ tableModel.dbname }} class="db-tables collapse panel-collapse {{if singleDbModel 'in'}}" role="tabpanel"
-          aria-labelledby="headingOne">
+          <div id={{concat 'db_body_' tableModel.dbname}} class="db-tables collapse panel-collapse {{if (eq tableModel.dbname worksheet.selectedDb) 'in'}}" role="tabpanel"
+          aria-labelledby={{concat 'db_heading_' tableModel.dbname}}>
           <div class="panel-body">
             {{#if tableModel.tables.length }}
               {{#list-filter header="tables" items=tableModel.tables


[08/50] [abbrv] ambari git commit: AMBARI-19624. Missing fields for workflow SLA (Padma Priya via pallavkul)

Posted by nc...@apache.org.
AMBARI-19624. Missing fields for workflow SLA (Padma Priya via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a6fd5cb0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a6fd5cb0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a6fd5cb0

Branch: refs/heads/branch-dev-patch-upgrade
Commit: a6fd5cb04e47d85a6de43b403276fd5992361745
Parents: b6b4378
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 23 18:18:33 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 23 18:18:33 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/domain/actionjob_hanlder.js  |  1 +
 .../src/main/resources/ui/app/domain/mapping-utils.js  | 13 ++++++++++++-
 .../resources/ui/app/templates/components/sla-info.hbs | 12 ++++++++++++
 3 files changed, 25 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a6fd5cb0/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
index 34a9a4a..c9d9887 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
@@ -367,6 +367,7 @@ var FSActionJobHandler=ActionJobHandler.extend({
       {xml:"delete"},
       {xml:"mkdir"},
       {xml:"move"},
+	  {xml:"chmod"},
       {xml:"touchz"},
       {xml:"chgrp"}
     ];

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6fd5cb0/contrib/views/wfmanager/src/main/resources/ui/app/domain/mapping-utils.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/mapping-utils.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/mapping-utils.js
index 1929ddf..b918d70 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/mapping-utils.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/mapping-utils.js
@@ -233,7 +233,12 @@ var SLAMapper= Ember.Object.extend({
       if (sla.alertContact){
         slaInfo[slaPrefix+":"+"alert-contact"]=sla.alertContact;
       }
-
+      if(sla.notificationMessage){
+        slaInfo[slaPrefix+":"+"notification-msg"]=sla.notificationMessage;
+      }
+      if(sla.upstreamApps){
+        slaInfo[slaPrefix+":"+"upstream-apps"]=sla.upstreamApps;
+      }
     }
     return nodeObj;
   },
@@ -248,6 +253,12 @@ var SLAMapper= Ember.Object.extend({
     if (infoJson["alert-events"] && infoJson["alert-events"].__text){
       sla.alertEvents=infoJson["alert-events"].__text;
     }
+    if (infoJson["notification-msg"] && infoJson["notification-msg"].__text){
+      sla.notificationMessage=infoJson["notification-msg"].__text;
+    }
+    if (infoJson["upstream-apps"] && infoJson["upstream-apps"].__text){
+      sla.upstreamApps=infoJson["upstream-apps"].__text;
+    }
     this.processTimePeriods(sla,infoJson,"should-start","shouldStart");
     this.processTimePeriods(sla,infoJson,"should-end","shouldEnd");
     this.processTimePeriods(sla,infoJson,"max-duration","maxDuration");

http://git-wip-us.apache.org/repos/asf/ambari/blob/a6fd5cb0/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
index 28bdf15..447bac2 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
@@ -90,5 +90,17 @@
           {{input type="text" class="form-control" name="name" value=slaInfo.alertContact placeholder="Comma seperated Email IDs"}}
         </div>
       </div>
+      <div class="form-group">
+        <label class="control-label col-xs-2">Notification Message</label>
+        <div class="col-xs-5">
+          {{input type="text" class="form-control" name="name" value=slaInfo.notificationMessage placeholder="Notification Message"}}
+        </div>
+      </div>
+      <div class="form-group">
+        <label class="control-label col-xs-2">Upstream Apps</label>
+        <div class="col-xs-5">
+          {{input type="text" class="form-control" name="name" value=slaInfo.upstreamApps placeholder="Upstream Apps"}}
+        </div>
+      </div>
     </div>
   </div>


[15/50] [abbrv] ambari git commit: AMBARI-19670. Trailing slash (/) on cluster resource causes incorrect authorization logic flow (rlevas)

Posted by nc...@apache.org.
AMBARI-19670. Trailing slash (/) on cluster resource causes incorrect authorization logic flow (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8a64be42
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8a64be42
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8a64be42

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8a64be42043380ad5c35b0517a92e9c0239d2d4b
Parents: 9bb27b4
Author: Robert Levas <rl...@hortonworks.com>
Authored: Mon Jan 23 11:36:53 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Mon Jan 23 11:36:53 2017 -0500

----------------------------------------------------------------------
 .../security/authorization/AmbariAuthorizationFilter.java |  2 +-
 .../authorization/AmbariAuthorizationFilterTest.java      | 10 ++++++++++
 2 files changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8a64be42/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
index 1faadb6..ce9a790 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilter.java
@@ -68,7 +68,7 @@ public class AmbariAuthorizationFilter implements Filter {
   private static final String API_USERS_ALL_PATTERN = API_VERSION_PREFIX + "/users.*";
   private static final String API_PRIVILEGES_ALL_PATTERN = API_VERSION_PREFIX + "/privileges.*";
   private static final String API_GROUPS_ALL_PATTERN = API_VERSION_PREFIX + "/groups.*";
-  private static final String API_CLUSTERS_PATTERN = API_VERSION_PREFIX + "/clusters/(\\w+)?";
+  private static final String API_CLUSTERS_PATTERN = API_VERSION_PREFIX + "/clusters/(\\w+/?)?";
   private static final String API_WIDGET_LAYOUTS_PATTERN = API_VERSION_PREFIX + "/clusters/.*?/widget_layouts.*?";
   private static final String API_CLUSTERS_ALL_PATTERN = API_VERSION_PREFIX + "/clusters.*";
   private static final String API_VIEWS_ALL_PATTERN = API_VERSION_PREFIX + "/views.*";

http://git-wip-us.apache.org/repos/asf/ambari/blob/8a64be42/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
index 0ab75c5..15e243e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
@@ -72,6 +72,8 @@ public class AmbariAuthorizationFilterTest {
     final Table<String, String, Boolean> urlTests = HashBasedTable.create();
     urlTests.put("/api/v1/clusters/cluster", "GET",  true);
     urlTests.put("/api/v1/clusters/cluster", "POST",  true);
+    urlTests.put("/api/v1/clusters/cluster/", "GET",  true);  // This should probably be an invalid URL, but Ambari seems to allow it.
+    urlTests.put("/api/v1/clusters/cluster/", "POST",  true); // This should probably be an invalid URL, but Ambari seems to allow it.
     urlTests.put("/api/v1/views", "GET", true);
     urlTests.put("/api/v1/views", "POST", true);
     urlTests.put("/api/v1/persist/SomeValue", "GET", true);
@@ -113,6 +115,8 @@ public class AmbariAuthorizationFilterTest {
     final Table<String, String, Boolean> urlTests = HashBasedTable.create();
     urlTests.put("/api/v1/clusters/cluster", "GET",  true);
     urlTests.put("/api/v1/clusters/cluster", "POST",  true);
+    urlTests.put("/api/v1/clusters/cluster/", "GET",  true);  // This should probably be an invalid URL, but Ambari seems to allow it.
+    urlTests.put("/api/v1/clusters/cluster/", "POST",  true); // This should probably be an invalid URL, but Ambari seems to allow it.
     urlTests.put("/api/v1/views", "GET", true);
     urlTests.put("/api/v1/views", "POST", true);
     urlTests.put("/api/v1/persist/SomeValue", "GET", true);
@@ -154,6 +158,8 @@ public class AmbariAuthorizationFilterTest {
     final Table<String, String, Boolean> urlTests = HashBasedTable.create();
     urlTests.put("/api/v1/clusters/cluster", "GET",  true);
     urlTests.put("/api/v1/clusters/cluster", "POST",  true);
+    urlTests.put("/api/v1/clusters/cluster/", "GET",  true);  // This should probably be an invalid URL, but Ambari seems to allow it.
+    urlTests.put("/api/v1/clusters/cluster/", "POST",  true); // This should probably be an invalid URL, but Ambari seems to allow it.
     urlTests.put("/api/v1/views", "GET", true);
     urlTests.put("/api/v1/views", "POST", true);
     urlTests.put("/api/v1/persist/SomeValue", "GET", true);
@@ -195,6 +201,8 @@ public class AmbariAuthorizationFilterTest {
     final Table<String, String, Boolean> urlTests = HashBasedTable.create();
     urlTests.put("/api/v1/clusters/cluster", "GET",  true);
     urlTests.put("/api/v1/clusters/cluster", "POST",  true);
+    urlTests.put("/api/v1/clusters/cluster/", "GET",  true);  // This should probably be an invalid URL, but Ambari seems to allow it.
+    urlTests.put("/api/v1/clusters/cluster/", "POST",  true); // This should probably be an invalid URL, but Ambari seems to allow it.
     urlTests.put("/api/v1/views", "GET", true);
     urlTests.put("/api/v1/views", "POST", true);
     urlTests.put("/api/v1/persist/SomeValue", "GET", true);
@@ -236,6 +244,8 @@ public class AmbariAuthorizationFilterTest {
     final Table<String, String, Boolean> urlTests = HashBasedTable.create();
     urlTests.put("/api/v1/clusters/cluster", "GET",  true);
     urlTests.put("/api/v1/clusters/cluster", "POST",  true);
+    urlTests.put("/api/v1/clusters/cluster/", "GET",  true);  // This should probably be an invalid URL, but Ambari seems to allow it.
+    urlTests.put("/api/v1/clusters/cluster/", "POST",  true); // This should probably be an invalid URL, but Ambari seems to allow it.
     urlTests.put("/api/v1/views", "GET", true);
     urlTests.put("/api/v1/views", "POST", true);
     urlTests.put("/api/v1/persist/SomeValue", "GET", true);


[36/50] [abbrv] ambari git commit: AMBARI-19688 - Ubuntu14 base url fields on "Select version" page are duplicates (rzang)

Posted by nc...@apache.org.
AMBARI-19688 - Ubuntu14 base url fields on "Select version" page are duplicates (rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b3a070c0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b3a070c0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b3a070c0

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b3a070c0011a542504c9fd586b26bccfaf1568f1
Parents: 6046200
Author: Richard Zang <rz...@apache.org>
Authored: Tue Jan 24 15:15:47 2017 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Tue Jan 24 15:28:38 2017 -0800

----------------------------------------------------------------------
 ambari-web/app/controllers/installer.js | 12 +++++++++++-
 1 file changed, 11 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b3a070c0/ambari-web/app/controllers/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/installer.js b/ambari-web/app/controllers/installer.js
index 44e7907..35056f7 100644
--- a/ambari-web/app/controllers/installer.js
+++ b/ambari-web/app/controllers/installer.js
@@ -21,7 +21,7 @@ var App = require('app');
 var stringUtils = require('utils/string_utils');
 var validator = require('utils/validator');
 
-App.InstallerController = App.WizardController.extend({
+App.InstallerController = App.WizardController.extend(App.UserPref, {
 
   name: 'installerController',
 
@@ -754,6 +754,16 @@ App.InstallerController = App.WizardController.extend({
           this.setSelected(data.stackInfo.isStacksExistInDb);
         }
       }
+      // log diagnosis data for abnormal number of repos
+      var post_diagnosis = false;
+      data.versionDefinition.operating_systems.map(function(item) {
+        if (item.repositories.length > 2) {
+          post_diagnosis = true;
+        }
+      });
+      if (post_diagnosis) {
+        this.postUserPref('stack_response_diagnosis', data);
+      }
     }
   },
 


[49/50] [abbrv] ambari git commit: AMBARI-19690: NM Memory can end up being too high on nodes with many components (jluniya)

Posted by nc...@apache.org.
AMBARI-19690: NM Memory can end up being too high on nodes with many components (jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6a811557
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6a811557
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6a811557

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6a8115572b328785532aed27c1dc44a1bac17a01
Parents: e555230
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Wed Jan 25 09:40:56 2017 -0800
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Wed Jan 25 09:40:56 2017 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  60 +++++++++-
 .../stacks/HDP/2.5/services/stack_advisor.py    |  33 +-----
 .../src/main/resources/stacks/stack_advisor.py  |  18 +++
 .../stacks/2.0.6/common/test_stack_advisor.py   | 113 ++++++++++++++++++-
 .../stacks/2.5/common/test_stack_advisor.py     |   4 +-
 5 files changed, 191 insertions(+), 37 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6a811557/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 7ed1b77..55f3d30 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -1350,6 +1350,35 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         totalMemoryRequired += self.formatXmxSizeToBytes(heapsize)
     return totalMemoryRequired
 
+  def get_yarn_nm_mem_in_mb(self, services, configurations):
+    """
+    Gets YARN NodeManager memory in MB (yarn.nodemanager.resource.memory-mb).
+    Reads from:
+      - configurations (if changed as part of current Stack Advisor invocation (output)), and services["changed-configurations"]
+        is empty, else
+      - services['configurations'] (input).
+
+    services["changed-configurations"] would be empty is Stack Advisor call if made from Blueprints (1st invocation). Subsequent
+    Stack Advisor calls will have it non-empty. We do this because in subsequent invocations, even if Stack Advsior calculates this
+    value (configurations), it is finally not recommended, making 'input' value to survive.
+    """
+    yarn_nm_mem_in_mb = None
+
+    yarn_site = getServicesSiteProperties(services, "yarn-site")
+    yarn_site_properties = getSiteProperties(configurations, "yarn-site")
+
+    # Check if services["changed-configurations"] is empty and 'yarn.nodemanager.resource.memory-mb' is modified in current ST invocation.
+    if not ("changed-configurations" in services and services["changed-configurations"]) and yarn_site_properties and 'yarn.nodemanager.resource.memory-mb' in yarn_site_properties:
+      yarn_nm_mem_in_mb = float(yarn_site_properties['yarn.nodemanager.resource.memory-mb'])
+    elif yarn_site and 'yarn.nodemanager.resource.memory-mb' in yarn_site:
+      # Check if 'yarn.nodemanager.resource.memory-mb' is input in services array.
+      yarn_nm_mem_in_mb = float(yarn_site['yarn.nodemanager.resource.memory-mb'])
+
+    if yarn_nm_mem_in_mb <= 0.0:
+      Logger.warning("'yarn.nodemanager.resource.memory-mb' current value : {0}. Expected value : > 0".format(yarn_nm_mem_in_mb))
+
+    return yarn_nm_mem_in_mb
+
   def getPreferredMountPoints(self, hostInfo):
 
     # '/etc/resolv.conf', '/etc/hostname', '/etc/hosts' are docker specific mount points
@@ -1438,10 +1467,37 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
   def validateYARNConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
     clusterEnv = getSiteProperties(configurations, "cluster-env")
-    validationItems = [ {"config-name": 'yarn.nodemanager.resource.memory-mb', "item": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.nodemanager.resource.memory-mb')},
+
+    validationItems = [ {"config-name": 'yarn.nodemanager.resource.memory-mb', "item": self.validatorGreaterThenDefaultValue(properties, recommendedDefaults, 'yarn.nodemanager.resource.memory-mb')},
                         {"config-name": 'yarn.scheduler.minimum-allocation-mb', "item": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.scheduler.minimum-allocation-mb')},
                         {"config-name": 'yarn.nodemanager.linux-container-executor.group', "item": self.validatorEqualsPropertyItem(properties, "yarn.nodemanager.linux-container-executor.group", clusterEnv, "user_group")},
-                        {"config-name": 'yarn.scheduler.maximum-allocation-mb', "item": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'yarn.scheduler.maximum-allocation-mb')} ]
+                        {"config-name": 'yarn.scheduler.maximum-allocation-mb', "item": self.validatorGreaterThenDefaultValue(properties, recommendedDefaults, 'yarn.scheduler.maximum-allocation-mb')} ]
+    nmMemory = int(self.get_yarn_nm_mem_in_mb(services, configurations))
+    if "items" in hosts and len(hosts["items"]) > 0:
+      nodeManagerHosts = self.getHostsWithComponent("YARN", "NODEMANAGER", services, hosts)
+      nmLowMemoryHosts = []
+      # NodeManager host with least memory is generally used in calculations as it will work in larger hosts.
+      if nodeManagerHosts is not None and len(nodeManagerHosts) > 0:
+        for nmHost in nodeManagerHosts:
+          nmHostName = nmHost["Hosts"]["host_name"]
+          componentNames = []
+          for service in services["services"]:
+            for component in service["components"]:
+              if not self.isClientComponent(component) and component["StackServiceComponents"]["hostnames"] is not None:
+                if nmHostName in component["StackServiceComponents"]["hostnames"]:
+                  componentNames.append(component["StackServiceComponents"]["component_name"])
+          requiredMemory = self.getMemorySizeRequired(services, componentNames, configurations)
+          unusedMemory = int((nmHost["Hosts"]["total_mem"] * 1024 - requiredMemory)/ (1024 * 1024)) # in MB
+          if nmMemory > unusedMemory:
+            nmLowMemoryHosts.append(nmHostName)
+
+        if len(nmLowMemoryHosts) > 0:
+          validationItems.append({"config-name": "yarn.nodemanager.resource.memory-mb",
+            "item": self.getWarnItem(
+                "Node manager hosts with high memory usage found (examples : {0}). Consider reducing the allocated "
+                "memory for containers or moving other co-located components "
+                "to a different host.".format(",".join(nmLowMemoryHosts[:3])))})
+
     return self.toConfigurationValidationProblems(validationItems, "yarn-site")
 
   def validateYARNEnvConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):

http://git-wip-us.apache.org/repos/asf/ambari/blob/6a811557/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index d2c0459..17f0c59 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -109,7 +109,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       "ATLAS": {"application-properties": self.validateAtlasConfigurations},
       "HIVE": {"hive-interactive-env": self.validateHiveInteractiveEnvConfigurations,
                "hive-interactive-site": self.validateHiveInteractiveSiteConfigurations},
-      "YARN": {"yarn-site": self.validateYarnConfigurations},
+      "YARN": {"yarn-site": self.validateYARNConfigurations},
       "RANGER": {"ranger-tagsync-site": self.validateRangerTagsyncConfigurations},
       "SPARK2": {"spark2-defaults": self.validateSpark2Defaults,
                  "spark2-thrift-sparkconf": self.validateSpark2ThriftSparkConf},
@@ -247,7 +247,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     ]
     return self.toConfigurationValidationProblems(validationItems, "spark2-thrift-sparkconf")
 
-  def validateYarnConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
+  def validateYARNConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
     parentValidationProblems = super(HDP25StackAdvisor, self).validateYARNConfigurations(properties, recommendedDefaults, configurations, services, hosts)
     yarn_site_properties = self.getSiteProperties(configurations, "yarn-site")
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
@@ -1361,35 +1361,6 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     if yarn_min_container_size < 256:
       return 256
 
-  def get_yarn_nm_mem_in_mb(self, services, configurations):
-    """
-    Gets YARN NodeManager memory in MB (yarn.nodemanager.resource.memory-mb).
-    Reads from:
-      - configurations (if changed as part of current Stack Advisor invocation (output)), and services["changed-configurations"]
-        is empty, else
-      - services['configurations'] (input).
-
-    services["changed-configurations"] would be empty is Stack Advisor call if made from Blueprints (1st invocation). Subsequent
-    Stack Advisor calls will have it non-empty. We do this because in subsequent invocations, even if Stack Advsior calculates this
-    value (configurations), it is finally not recommended, making 'input' value to survive.
-    """
-    yarn_nm_mem_in_mb = None
-
-    yarn_site = self.getServicesSiteProperties(services, "yarn-site")
-    yarn_site_properties = self.getSiteProperties(configurations, "yarn-site")
-
-    # Check if services["changed-configurations"] is empty and 'yarn.nodemanager.resource.memory-mb' is modified in current ST invocation.
-    if not services["changed-configurations"] and yarn_site_properties and 'yarn.nodemanager.resource.memory-mb' in yarn_site_properties:
-      yarn_nm_mem_in_mb = float(yarn_site_properties['yarn.nodemanager.resource.memory-mb'])
-    elif yarn_site and 'yarn.nodemanager.resource.memory-mb' in yarn_site:
-      # Check if 'yarn.nodemanager.resource.memory-mb' is input in services array.
-        yarn_nm_mem_in_mb = float(yarn_site['yarn.nodemanager.resource.memory-mb'])
-
-    if yarn_nm_mem_in_mb <= 0.0:
-      Logger.warning("'yarn.nodemanager.resource.memory-mb' current value : {0}. Expected value : > 0".format(yarn_nm_mem_in_mb))
-
-    return yarn_nm_mem_in_mb
-
   def calculate_tez_am_container_size(self, services, total_cluster_capacity):
     """
     Calculates Tez App Master container size (tez.am.resource.memory.mb) for tez_hive2/tez-site on initialization if values read is 0.

http://git-wip-us.apache.org/repos/asf/ambari/blob/6a811557/ambari-server/src/main/resources/stacks/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py b/ambari-server/src/main/resources/stacks/stack_advisor.py
index ad3b510..6fb014e 100644
--- a/ambari-server/src/main/resources/stacks/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/stack_advisor.py
@@ -2151,6 +2151,24 @@ class DefaultStackAdvisor(StackAdvisor):
       return self.getWarnItem("Value is less than the recommended default of {0}".format(defaultValue))
     return None
 
+  def validatorGreaterThenDefaultValue(self, properties, recommendedDefaults, propertyName):
+    if propertyName not in recommendedDefaults:
+      # If a property name exists in say hbase-env and hbase-site (which is allowed), then it will exist in the
+      # "properties" dictionary, but not necessarily in the "recommendedDefaults" dictionary". In this case, ignore it.
+      return None
+
+    if not propertyName in properties:
+      return self.getErrorItem("Value should be set")
+    value = self.to_number(properties[propertyName])
+    if value is None:
+      return self.getErrorItem("Value should be integer")
+    defaultValue = self.to_number(recommendedDefaults[propertyName])
+    if defaultValue is None:
+      return None
+    if value > defaultValue:
+      return self.getWarnItem("Value is greater than the recommended default of {0}".format(defaultValue))
+    return None
+
   def validatorEqualsPropertyItem(self, properties1, propertyName1,
                                   properties2, propertyName2,
                                   emptyAllowed=False):

http://git-wip-us.apache.org/repos/asf/ambari/blob/6a811557/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index ff25512..a6931c5 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -776,7 +776,7 @@ class TestHDP206StackAdvisor(TestCase):
 
     # Test - Cluster data with 2 hosts - pick minimum memory
     servicesList.append("YARN")
-    services = services = {"services":
+    services = {"services":
                   [{"StackServices":
                       {"service_name" : "YARN",
                        "service_version" : "2.6.0.2.2"
@@ -3401,7 +3401,41 @@ class TestHDP206StackAdvisor(TestCase):
         }
       }
     }
-    services = {'configurations': {} }
+
+    services = {"services":
+      [{"StackServices":
+        {"service_name" : "YARN",
+          "service_version" : "2.6.0.2.2"
+        },
+        "components":[
+          {
+            "StackServiceComponents":{
+              "advertise_version":"true",
+              "cardinality":"1+",
+              "component_category":"SLAVE",
+              "component_name":"NODEMANAGER",
+              "custom_commands":[
+
+              ],
+              "display_name":"NodeManager",
+              "is_client":"false",
+              "is_master":"false",
+              "service_name":"YARN",
+              "stack_name":"HDP",
+              "stack_version":"2.2",
+              "hostnames":[
+                "host1",
+                "host2"
+              ]
+            },
+            "dependencies":[
+            ]
+          }
+        ],
+      }],
+      "configurations": {}
+    }
+
     recommendedDefaults = {'yarn.nodemanager.resource.memory-mb' : '12288',
       'yarn.scheduler.minimum-allocation-mb' : '3072',
       'yarn.nodemanager.linux-container-executor.group': 'hadoop',
@@ -3413,3 +3447,78 @@ class TestHDP206StackAdvisor(TestCase):
 
     res = self.stackAdvisor.validateYARNConfigurations(properties, recommendedDefaults, configurations, services, {})
     self.assertFalse(res)
+
+    hosts = {
+      "items" : [
+        {
+          "Hosts" : {
+            "host_name" : "host1",
+            "cpu_count" : 2,
+            "total_mem" : 12582912,
+            "disk_info" : [
+              {
+                "available" : "21052800",
+                "device" : "/dev/vda1",
+                "used" : "3303636",
+                "percent" : "14%",
+                "size" : "25666616",
+                "type" : "ext4",
+                "mountpoint" : "/"
+              },
+              {
+                "available" : "244732200",
+                "device" : "/dev/vdb",
+                "used" : "60508",
+                "percent" : "1%",
+                "size" : "257899908",
+                "type" : "ext4",
+                "mountpoint" : "/grid/0"
+              }
+            ]
+          }
+        }
+      ]
+    }
+    # Cluster RAM = 12 GB (12582912 KB)
+    # YARN NodeManager HeapSize = 1024 MB (default)
+    # Max Container Allocation = 11264 MB ( user set to 12288)
+    expectedItems = [
+      {
+        'config-type':  'yarn-site',
+        'message': 'Node manager hosts with high memory usage found (examples : host1). '
+                   'Consider reducing the allocated memory for containers or '
+                   'moving other co-located components to a different host.',
+        'type': 'configuration',
+        'config-name': 'yarn.nodemanager.resource.memory-mb',
+        'level': 'WARN'
+      }
+    ]
+    items = self.stackAdvisor.validateYARNConfigurations(properties, recommendedDefaults, configurations, services, hosts)
+    self.assertEquals(expectedItems, items)
+
+
+    recommendedDefaults = {'yarn.nodemanager.resource.memory-mb' : '10240',
+      'yarn.scheduler.minimum-allocation-mb' : '3072',
+      'yarn.nodemanager.linux-container-executor.group': 'hadoop',
+      'yarn.scheduler.maximum-allocation-mb': '10240'}
+
+    expectedItems = [
+      {
+        'config-type':  'yarn-site',
+        'message': 'Value is greater than the recommended default of 10240',
+        'type': 'configuration',
+        'config-name': 'yarn.nodemanager.resource.memory-mb',
+        'level': 'WARN'
+      },
+      {
+        'config-type':  'yarn-site',
+        'message': 'Value is greater than the recommended default of 10240',
+        'type': 'configuration',
+        'config-name': 'yarn.scheduler.maximum-allocation-mb',
+        'level': 'WARN'
+      }
+    ]
+
+    items = self.stackAdvisor.validateYARNConfigurations(properties, recommendedDefaults, configurations, services, {})
+    self.assertEquals(expectedItems, items)
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/6a811557/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index ad962fd..a53cb25 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -444,7 +444,7 @@ class TestHDP25StackAdvisor(TestCase):
     self.assertEquals(validations[0], expected)
 
 
-  def test_validateYarnConfigurations(self):
+  def test_validateYARNConfigurations(self):
     properties = {'enable_hive_interactive': 'true',
                   'hive.tez.container.size': '2048', "yarn.nodemanager.linux-container-executor.group": "hadoop"}
     recommendedDefaults = {'enable_hive_interactive': 'true',
@@ -473,7 +473,7 @@ class TestHDP25StackAdvisor(TestCase):
     res_expected = [
       {'config-type': 'yarn-site', 'message': 'While enabling HIVE_SERVER_INTERACTIVE it is recommended that you enable work preserving restart in YARN.', 'type': 'configuration', 'config-name': 'yarn.resourcemanager.work-preserving-recovery.enabled', 'level': 'WARN'}
     ]
-    res = self.stackAdvisor.validateYarnConfigurations(properties, recommendedDefaults, configurations, services, {})
+    res = self.stackAdvisor.validateYARNConfigurations(properties, recommendedDefaults, configurations, services, {})
     self.assertEquals(res, res_expected)
     pass
 


[04/50] [abbrv] ambari git commit: AMBARI-19662. Unknown attributes should not be allowed in quick link filter definitions (Balazs Bence Sari via magyari_sandor)

Posted by nc...@apache.org.
AMBARI-19662. Unknown attributes should not be allowed in quick link filter definitions (Balazs Bence Sari via magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/49ba6359
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/49ba6359
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/49ba6359

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 49ba635950d2632dcac406e242df94f8a131ce2f
Parents: 8362dce
Author: Balazs Bence Sari <bs...@hortonworks.com>
Authored: Mon Jan 23 11:54:37 2017 +0100
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Mon Jan 23 12:55:15 2017 +0100

----------------------------------------------------------------------
 .../QuickLinksProfileBuilder.java               | 26 +++++++++++++++-----
 .../QuickLinksProfileParser.java                | 21 +++++++++++++---
 .../QuickLinksProfileBuilderTest.java           | 13 ++++++++--
 .../QuickLinksProfileParserTest.java            | 11 +++++++--
 .../inconsistent_quicklinks_profile_3.json      |  9 +++++++
 5 files changed, 67 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/49ba6359/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilder.java
index fca1155..627b1bc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilder.java
@@ -26,10 +26,14 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import javax.annotation.Nullable;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Sets;
 
 /**
  * Class to create a {@link QuickLinksProfile} based on data received in a request
@@ -39,6 +43,8 @@ public class QuickLinksProfileBuilder {
   public static final String NAME = "name";
   public static final String COMPONENTS = "components";
   public static final String FILTERS = "filters";
+  public static final Set<String> ALLOWED_FILTER_ATTRIBUTES =
+      ImmutableSet.of(VISIBLE, LINK_NAME, LINK_ATTRIBUTE);
 
   /**
    *
@@ -102,16 +108,24 @@ public class QuickLinksProfileBuilder {
     }
     List<Filter> filters  = new ArrayList<>();
     for (Map<String, String> filterAsMap: (Collection<Map<String, String>>)filtersRaw) {
+      Set<String> invalidAttributes = Sets.difference(filterAsMap.keySet(), ALLOWED_FILTER_ATTRIBUTES);
+
+      Preconditions.checkArgument(invalidAttributes.isEmpty(),
+          "%s%s",
+          QuickLinksFilterDeserializer.PARSE_ERROR_MESSAGE_INVALID_JSON_TAG,
+          invalidAttributes);
+
       String linkName = filterAsMap.get(LINK_NAME);
       String attributeName = filterAsMap.get(LINK_ATTRIBUTE);
       boolean visible = Boolean.parseBoolean(filterAsMap.get(VISIBLE));
 
-      if (null != linkName && null != attributeName) {
-        throw new IllegalArgumentException(
-            String.format("%s link_name: %s, link_attribute: %s",
-                QuickLinksFilterDeserializer.PARSE_ERROR_MESSAGE, linkName, attributeName));
-      }
-      else if (null != linkName) {
+      Preconditions.checkArgument(null == linkName || null == attributeName,
+         "%s link_name: %s, link_attribute: %s",
+          QuickLinksFilterDeserializer.PARSE_ERROR_MESSAGE_AMBIGUOUS_FILTER,
+          linkName,
+          attributeName);
+
+      if (null != linkName) {
         filters.add(Filter.linkNameFilter(linkName, visible));
       }
       else if (null != attributeName) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/49ba6359/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParser.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParser.java b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParser.java
index 150b7d4..1891061 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParser.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParser.java
@@ -20,6 +20,8 @@ package org.apache.ambari.server.state.quicklinksprofile;
 
 import java.io.IOException;
 import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.codehaus.jackson.JsonParseException;
 import org.codehaus.jackson.JsonParser;
@@ -65,9 +67,12 @@ public class QuickLinksProfileParser {
  * Custom deserializer is needed to handle filter polymorphism.
  */
 class QuickLinksFilterDeserializer extends StdDeserializer<Filter> {
-  static final String PARSE_ERROR_MESSAGE =
+  static final String PARSE_ERROR_MESSAGE_AMBIGUOUS_FILTER =
       "A filter is not allowed to declare both link_name and link_attribute at the same time.";
 
+  static final String PARSE_ERROR_MESSAGE_INVALID_JSON_TAG =
+      "Invalid attribute(s) in filter declaration: ";
+
   QuickLinksFilterDeserializer() {
     super(Filter.class);
   }
@@ -88,22 +93,32 @@ class QuickLinksFilterDeserializer extends StdDeserializer<Filter> {
     ObjectMapper mapper = (ObjectMapper) parser.getCodec();
     ObjectNode root = (ObjectNode) mapper.readTree(parser);
     Class<? extends Filter> filterClass = null;
+    List<String> invalidAttributes = new ArrayList<>();
     for (String fieldName: ImmutableList.copyOf(root.getFieldNames())) {
       switch(fieldName) {
         case LinkAttributeFilter.LINK_ATTRIBUTE:
           if (null != filterClass) {
-            throw new JsonParseException(PARSE_ERROR_MESSAGE, parser.getCurrentLocation());
+            throw new JsonParseException(PARSE_ERROR_MESSAGE_AMBIGUOUS_FILTER, parser.getCurrentLocation());
           }
           filterClass = LinkAttributeFilter.class;
           break;
         case LinkNameFilter.LINK_NAME:
           if (null != filterClass) {
-            throw new JsonParseException(PARSE_ERROR_MESSAGE, parser.getCurrentLocation());
+            throw new JsonParseException(PARSE_ERROR_MESSAGE_AMBIGUOUS_FILTER, parser.getCurrentLocation());
           }
           filterClass = LinkNameFilter.class;
           break;
+        case Filter.VISIBLE:
+          // silently ignore here, will be parsed later in mapper.readValue
+          break;
+        default:
+          invalidAttributes.add(fieldName);
       }
     }
+    if (!invalidAttributes.isEmpty()) {
+      throw new JsonParseException(PARSE_ERROR_MESSAGE_INVALID_JSON_TAG + invalidAttributes,
+          parser.getCurrentLocation());
+    }
     if (null == filterClass) {
       filterClass = AcceptAllFilter.class;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/49ba6359/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilderTest.java
index 1cc3fd3..49244d4 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileBuilderTest.java
@@ -33,6 +33,7 @@ import javax.annotation.Nullable;
 
 import org.junit.Test;
 
+import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Sets;
 
 public class QuickLinksProfileBuilderTest {
@@ -130,14 +131,22 @@ public class QuickLinksProfileBuilderTest {
   }
 
   @Test(expected = QuickLinksProfileEvaluationException.class)
-  public void testBuildProfileInvalidProfileDefiniton() throws Exception {
+  public void testBuildProfileInvalidProfileDefiniton_contradictingFilters() throws Exception {
     // Contradicting rules in the profile
     Set<Map<String, String>> filters = newHashSet(
         filter(null, "sso", true),
         filter(null, "sso", false)
     );
 
-    String profileJson = new QuickLinksProfileBuilder().buildQuickLinksProfile(filters, null);
+    new QuickLinksProfileBuilder().buildQuickLinksProfile(filters, null);
+  }
+
+  @Test(expected = QuickLinksProfileEvaluationException.class)
+  public void testBuildProfileInvalidProfileDefiniton_invalidAttribute() throws Exception {
+    Map<String, String> badFilter = ImmutableMap.of("visible", "true", "linkkk_atirbuteee", "sso");
+    Set<Map<String, String>> filters = newHashSet(badFilter);
+
+    new QuickLinksProfileBuilder().buildQuickLinksProfile(filters, null);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/49ba6359/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParserTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParserTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParserTest.java
index 57badb8..8b01ca5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParserTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/quicklinksprofile/QuickLinksProfileParserTest.java
@@ -67,10 +67,17 @@ public class QuickLinksProfileParserTest {
   }
 
   @Test(expected = JsonParseException.class)
-  public void testParseInconsistentProfile() throws Exception {
+  public void testParseInconsistentProfile_ambigousFilterDefinition() throws Exception {
     String profileName = "inconsistent_quicklinks_profile.json";
     QuickLinksProfileParser parser = new QuickLinksProfileParser();
-    QuickLinksProfile profile = parser.parse(Resources.getResource(profileName));
+    parser.parse(Resources.getResource(profileName));
+  }
+
+  @Test(expected = JsonParseException.class)
+  public void testParseInconsistentProfile_misspelledFilerDefinition() throws Exception {
+    String profileName = "inconsistent_quicklinks_profile_3.json";
+    QuickLinksProfileParser parser = new QuickLinksProfileParser();
+    parser.parse(Resources.getResource(profileName));
   }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/49ba6359/ambari-server/src/test/resources/inconsistent_quicklinks_profile_3.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/inconsistent_quicklinks_profile_3.json b/ambari-server/src/test/resources/inconsistent_quicklinks_profile_3.json
new file mode 100644
index 0000000..c349bb2
--- /dev/null
+++ b/ambari-server/src/test/resources/inconsistent_quicklinks_profile_3.json
@@ -0,0 +1,9 @@
+{
+  "filters": [
+    {
+      "linkkkk_attirubutee": "sso",
+      "visible": true
+    }
+  ],
+  "services": []
+}
\ No newline at end of file


[25/50] [abbrv] ambari git commit: AMBARI-19684. Agent registration fails as local OS is not compatible with primary OS family.(vbrodetskyi)

Posted by nc...@apache.org.
AMBARI-19684. Agent registration fails as local OS is not compatible with primary OS family.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c6c35555
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c6c35555
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c6c35555

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c6c35555eb157f3cd80397db428d21441d3ec5e6
Parents: 796f52a
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Tue Jan 24 13:19:28 2017 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Tue Jan 24 13:19:28 2017 +0200

----------------------------------------------------------------------
 ambari-server/conf/unix/create-python-wrap.sh  | 40 +++++++++++++++++++++
 ambari-server/src/main/assemblies/server.xml   |  5 +++
 ambari-server/src/main/python/bootstrap.py     | 31 ++++++++++++++++
 ambari-server/src/main/python/os_check_type.py |  2 +-
 ambari-server/src/test/python/TestBootstrap.py |  4 +--
 5 files changed, 79 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c6c35555/ambari-server/conf/unix/create-python-wrap.sh
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/create-python-wrap.sh b/ambari-server/conf/unix/create-python-wrap.sh
new file mode 100644
index 0000000..3190073
--- /dev/null
+++ b/ambari-server/conf/unix/create-python-wrap.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+PYTHON_WRAPER_DIR="${ROOT}/usr/bin/"
+PYTHON_WRAPER_TARGET="${PYTHON_WRAPER_DIR}/ambari-python-wrap"
+
+# remove old python wrapper
+rm -f "$PYTHON_WRAPER_TARGET"
+
+AMBARI_PYTHON=""
+python_binaries=( "/usr/bin/python" "/usr/bin/python2" "/usr/bin/python2.7" "/usr/bin/python2.6" )
+for python_binary in "${python_binaries[@]}"
+do
+  $python_binary -c "import sys ; ver = sys.version_info ; sys.exit(not (ver >= (2,6) and ver<(3,0)))" 1>/dev/null 2>/dev/null
+
+  if [ $? -eq 0 ] ; then
+    AMBARI_PYTHON="$python_binary"
+    break;
+  fi
+done
+
+if [ -z "$AMBARI_PYTHON" ] ; then
+  >&2 echo "Cannot detect python for ambari to use. Please manually set $PYTHON_WRAPER link to point to correct python binary"
+else
+  mkdir -p "$PYTHON_WRAPER_DIR"
+  ln -s "$AMBARI_PYTHON" "$PYTHON_WRAPER_TARGET"
+fi
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6c35555/ambari-server/src/main/assemblies/server.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/assemblies/server.xml b/ambari-server/src/main/assemblies/server.xml
index 5055d46..d65232c 100644
--- a/ambari-server/src/main/assemblies/server.xml
+++ b/ambari-server/src/main/assemblies/server.xml
@@ -237,6 +237,11 @@
       <outputDirectory>/var/lib/ambari-server/</outputDirectory>
     </file>
     <file>
+      <fileMode>755</fileMode>
+      <source>conf/unix/create-python-wrap.sh</source>
+      <outputDirectory>/var/lib/ambari-server/</outputDirectory>
+    </file>
+    <file>
       <fileMode>700</fileMode>
       <source>conf/unix/install-helper.sh</source>
       <outputDirectory>/var/lib/ambari-server/</outputDirectory>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6c35555/ambari-server/src/main/python/bootstrap.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/bootstrap.py b/ambari-server/src/main/python/bootstrap.py
index e576fc8..d836040 100755
--- a/ambari-server/src/main/python/bootstrap.py
+++ b/ambari-server/src/main/python/bootstrap.py
@@ -54,6 +54,8 @@ DEFAULT_AGENT_DATA_FOLDER = "/var/lib/ambari-agent/data"
 DEFAULT_AGENT_LIB_FOLDER = "/var/lib/ambari-agent"
 PYTHON_ENV="env PYTHONPATH=$PYTHONPATH:" + DEFAULT_AGENT_TEMP_FOLDER
 SERVER_AMBARI_SUDO = os.getenv('ROOT','/').rstrip('/') + "/var/lib/ambari-server/ambari-sudo.sh"
+CREATE_PYTHON_WRAP_SCRIPT = os.getenv('ROOT','/').rstrip('/') + "/var/lib/ambari-server/create-python-wrap.sh"
+REMOTE_CREATE_PYTHON_WRAP_SCRIPT = os.path.join(DEFAULT_AGENT_TEMP_FOLDER, 'create-python-wrap.sh')
 AMBARI_SUDO = os.path.join(DEFAULT_AGENT_TEMP_FOLDER, 'ambari-sudo.sh')
 
 class HostLog:
@@ -466,6 +468,19 @@ class BootstrapDefault(Bootstrap):
     self.host_log.write("\n")
     return result
 
+  def copyCreatePythonWrapScript(self):
+    # Copying the script which will create python wrap
+    fileToCopy = CREATE_PYTHON_WRAP_SCRIPT
+    target = self.TEMP_FOLDER
+    params = self.shared_state
+    self.host_log.write("==========================\n")
+    self.host_log.write("Copying create-python-wrap script...")
+    scp = SCP(params.user, params.sshPort, params.sshkey_file, self.host, fileToCopy,
+              target, params.bootdir, self.host_log)
+    result = scp.run()
+    self.host_log.write("\n")
+    return result
+
   def copyOsCheckScript(self):
     # Copying the os check script file
     fileToCopy = self.getOsCheckScript()
@@ -610,6 +625,20 @@ class BootstrapDefault(Bootstrap):
            " " + str(passphrase) + " " + str(server)+ " " + quote_bash_args(str(user_run_as)) + " " + str(version) + \
            " " + str(port)
 
+  def runCreatePythonWrapScript(self):
+    params = self.shared_state
+    self.host_log.write("==========================\n")
+    self.host_log.write("Running create-python-wrap script...")
+
+    command = "chmod a+x %s && %s" % \
+              (REMOTE_CREATE_PYTHON_WRAP_SCRIPT, REMOTE_CREATE_PYTHON_WRAP_SCRIPT)
+
+    ssh = SSH(params.user, params.sshPort, params.sshkey_file, self.host, command,
+              params.bootdir, self.host_log)
+    retcode = ssh.run()
+    self.host_log.write("\n")
+    return retcode
+
   def runOsCheckScript(self):
     params = self.shared_state
     self.host_log.write("==========================\n")
@@ -725,7 +754,9 @@ class BootstrapDefault(Bootstrap):
     action_queue = [self.createTargetDir,
                     self.copyAmbariSudo,
                     self.copyCommonFunctions,
+                    self.copyCreatePythonWrapScript,
                     self.copyOsCheckScript,
+                    self.runCreatePythonWrapScript,
                     self.runOsCheckScript,
                     self.checkSudoPackage
     ]

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6c35555/ambari-server/src/main/python/os_check_type.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/os_check_type.py b/ambari-server/src/main/python/os_check_type.py
index f890504..34de34b 100644
--- a/ambari-server/src/main/python/os_check_type.py
+++ b/ambari-server/src/main/python/os_check_type.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/ambari-python-wrap
 
 '''
 Licensed to the Apache Software Foundation (ASF) under one

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6c35555/ambari-server/src/test/python/TestBootstrap.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestBootstrap.py b/ambari-server/src/test/python/TestBootstrap.py
index 8356f91..bea47f4 100644
--- a/ambari-server/src/test/python/TestBootstrap.py
+++ b/ambari-server/src/test/python/TestBootstrap.py
@@ -747,7 +747,7 @@ class TestBootstrap(TestCase):
     hasPassword_mock.return_value = False
     try_to_execute_mock.return_value = {"exitstatus": 0, "log":"log0", "errormsg":"errormsg0"}
     bootstrap_obj.run()
-    self.assertEqual(try_to_execute_mock.call_count, 8) # <- Adjust if changed
+    self.assertEqual(try_to_execute_mock.call_count, 10) # <- Adjust if changed
     self.assertTrue(createDoneFile_mock.called)
     self.assertEqual(bootstrap_obj.getStatus()["return_code"], 0)
 
@@ -758,7 +758,7 @@ class TestBootstrap(TestCase):
     hasPassword_mock.return_value = True
     try_to_execute_mock.return_value = {"exitstatus": 0, "log":"log0", "errormsg":"errormsg0"}
     bootstrap_obj.run()
-    self.assertEqual(try_to_execute_mock.call_count, 11) # <- Adjust if changed
+    self.assertEqual(try_to_execute_mock.call_count, 13) # <- Adjust if changed
     self.assertTrue(createDoneFile_mock.called)
     self.assertEqual(bootstrap_obj.getStatus()["return_code"], 0)
 


[26/50] [abbrv] ambari git commit: AMBARI-19689. Error on Files view-SIMPLE authentication is not enabled. (gauravn7)

Posted by nc...@apache.org.
AMBARI-19689. Error on Files view-SIMPLE authentication is not enabled. (gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/75b30a4e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/75b30a4e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/75b30a4e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 75b30a4e974bce0df3cd9d2d0efc428f101d8601
Parents: c6c3555
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Tue Jan 24 20:28:17 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Tue Jan 24 20:29:12 2017 +0530

----------------------------------------------------------------------
 .../org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java  | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/75b30a4e/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
index d1d1bb0..714e229 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/ConfigurationBuilder.java
@@ -321,10 +321,10 @@ public class ConfigurationBuilder {
    * @throws HdfsApiException if configuration parsing failed
    */
   public Configuration buildConfig() throws HdfsApiException {
-    parseProperties();
-    setAuthParams(buildAuthenticationConfig());
     copyPropertiesBySite(CORE_SITE);
     copyPropertiesBySite(HDFS_SITE);
+    parseProperties();
+    setAuthParams(buildAuthenticationConfig());
 
     String umask = context.getProperties().get(UMASK_INSTANCE_PROPERTY);
     if (umask != null && !umask.isEmpty()) conf.set(UMASK_CLUSTER_PROPERTY, umask);


[06/50] [abbrv] ambari git commit: AMBARI-19664. Log Search: Snapshot tour fix & configurable CORS filter (oleewere)

Posted by nc...@apache.org.
AMBARI-19664. Log Search: Snapshot tour fix & configurable CORS filter (oleewere)

Change-Id: I763e9ac69d1058f85a2be3a2aa036e008ff89b95


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9bd8b7f2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9bd8b7f2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9bd8b7f2

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9bd8b7f2ff1c7d2ca2cfd4bf8b0833a1bec98bb9
Parents: 09c1894
Author: oleewere <ol...@gmail.com>
Authored: Sun Jan 22 22:29:19 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Mon Jan 23 13:33:53 2017 +0100

----------------------------------------------------------------------
 .../conf/LogSearchHttpHeaderConfig.java         | 70 ++++++++++++++++++++
 .../ambari/logsearch/conf/SecurityConfig.java   | 11 +++
 .../web/filters/LogsearchCorsFilter.java        | 59 +++++++++++++++++
 .../src/main/webapp/scripts/utils/Tour.js       | 30 ++++-----
 4 files changed, 154 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd8b7f2/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpHeaderConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpHeaderConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpHeaderConfig.java
new file mode 100644
index 0000000..cb8c097
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/LogSearchHttpHeaderConfig.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class LogSearchHttpHeaderConfig {
+
+  @Value("${logsearch.http.header.access-control-allow-origin:*}")
+  private String accessControlAllowOrigin;
+
+  @Value("${logsearch.http.header.access-control-allow-headers:origin, content-type, accept, authorization}")
+  private String accessControlAllowHeaders;
+
+  @Value("${logsearch.http.header.access-control-allow-credentials:true}")
+  private String accessControlAllowCredentials;
+
+  @Value("${logsearch.http.header.access-control-allow-methods:GET, POST, PUT, DELETE, OPTIONS, HEAD}")
+  private String accessControlAllowMethods;
+
+  public String getAccessControlAllowOrigin() {
+    return accessControlAllowOrigin;
+  }
+
+  public void setAccessControlAllowOrigin(String accessControlAllowOrigin) {
+    this.accessControlAllowOrigin = accessControlAllowOrigin;
+  }
+
+  public String getAccessControlAllowHeaders() {
+    return accessControlAllowHeaders;
+  }
+
+  public void setAccessControlAllowHeaders(String accessControlAllowHeaders) {
+    this.accessControlAllowHeaders = accessControlAllowHeaders;
+  }
+
+  public String getAccessControlAllowCredentials() {
+    return accessControlAllowCredentials;
+  }
+
+  public void setAccessControlAllowCredentials(String accessControlAllowCredentials) {
+    this.accessControlAllowCredentials = accessControlAllowCredentials;
+  }
+
+  public String getAccessControlAllowMethods() {
+    return accessControlAllowMethods;
+  }
+
+  public void setAccessControlAllowMethods(String accessControlAllowMethods) {
+    this.accessControlAllowMethods = accessControlAllowMethods;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd8b7f2/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
index 115dcc3..b15ae43 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
@@ -25,6 +25,7 @@ import org.apache.ambari.logsearch.web.authenticate.LogsearchAuthSuccessHandler;
 import org.apache.ambari.logsearch.web.authenticate.LogsearchLogoutSuccessHandler;
 import org.apache.ambari.logsearch.web.filters.LogsearchAuditLogsStateFilter;
 import org.apache.ambari.logsearch.web.filters.LogsearchAuthenticationEntryPoint;
+import org.apache.ambari.logsearch.web.filters.LogsearchCorsFilter;
 import org.apache.ambari.logsearch.web.filters.LogsearchKRBAuthenticationFilter;
 import org.apache.ambari.logsearch.web.filters.LogsearchJWTFilter;
 import org.apache.ambari.logsearch.web.filters.LogsearchSecurityContextFormationFilter;
@@ -46,6 +47,7 @@ import org.springframework.security.web.util.matcher.RequestMatcher;
 
 import javax.inject.Inject;
 import javax.inject.Named;
+import javax.servlet.Filter;
 import java.util.List;
 
 import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_SESSION_ID;
@@ -58,6 +60,9 @@ public class SecurityConfig extends WebSecurityConfigurerAdapter {
   private AuthPropsConfig authPropsConfig;
 
   @Inject
+  private LogSearchHttpHeaderConfig logSearchHttpHeaderConfig;
+
+  @Inject
   private SolrServiceLogPropsConfig solrServiceLogPropsConfig;
 
   @Inject
@@ -104,6 +109,7 @@ public class SecurityConfig extends WebSecurityConfigurerAdapter {
       .addFilterAfter(logsearchUserConfigFilter(), LogsearchSecurityContextFormationFilter.class)
       .addFilterAfter(logsearchAuditLogFilter(), LogsearchSecurityContextFormationFilter.class)
       .addFilterAfter(logsearchServiceLogFilter(), LogsearchSecurityContextFormationFilter.class)
+      .addFilterBefore(corsFilter(), LogsearchSecurityContextFormationFilter.class)
       .addFilterBefore(logsearchJwtFilter(), LogsearchSecurityContextFormationFilter.class)
       .logout()
         .logoutUrl("/logout.html")
@@ -112,6 +118,11 @@ public class SecurityConfig extends WebSecurityConfigurerAdapter {
   }
 
   @Bean
+  public LogsearchCorsFilter corsFilter() {
+    return new LogsearchCorsFilter(logSearchHttpHeaderConfig);
+  }
+
+  @Bean
   public LogsearchSecurityContextFormationFilter securityContextFormationFilter() {
     return new LogsearchSecurityContextFormationFilter();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd8b7f2/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchCorsFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchCorsFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchCorsFilter.java
new file mode 100644
index 0000000..f5e7bca
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchCorsFilter.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.web.filters;
+
+import org.apache.ambari.logsearch.conf.LogSearchHttpHeaderConfig;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+public class LogsearchCorsFilter implements Filter {
+
+  private LogSearchHttpHeaderConfig logSearchHttpHeaderConfig;
+
+  public LogsearchCorsFilter(LogSearchHttpHeaderConfig logSearchHttpHeaderConfig) {
+    this.logSearchHttpHeaderConfig = logSearchHttpHeaderConfig;
+  }
+
+  @Override
+  public void init(FilterConfig filterConfig) throws ServletException {
+  }
+
+  @Override
+  public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
+    throws IOException, ServletException {
+    HttpServletResponse response = (HttpServletResponse) servletResponse;
+    response.setHeader("Access-Control-Allow-Origin", logSearchHttpHeaderConfig.getAccessControlAllowOrigin());
+    response.setHeader("Access-Control-Allow-Headers", logSearchHttpHeaderConfig.getAccessControlAllowHeaders());
+    response.setHeader("Access-Control-Allow-Credentials", logSearchHttpHeaderConfig.getAccessControlAllowCredentials());
+    response.setHeader("Access-Control-Allow-Methods", logSearchHttpHeaderConfig.getAccessControlAllowMethods());
+    filterChain.doFilter(servletRequest, servletResponse);
+  }
+
+  @Override
+  public void destroy() {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9bd8b7f2/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Tour.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Tour.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Tour.js
index fff82ed..f9d093b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Tour.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Tour.js
@@ -56,6 +56,10 @@ define(['require', 'tour'], function (require, TourJs) {
         content: "This filter allows you to filter the log data depending upon the component selection. Include Component is again \"or\" condition and Exclude Component is \"and\" condition between multiple selection.",
         placement: "bottom"
       }, {
+        element: $('[data-id="startStop"]').get(0),
+        content: "Snapshot helps you quickly see all logs generated while you reproduce an issue. Click start, reproduce your issue, click stop and we'll load all logs that were produced during that time frame.",
+        placement: "bottom"
+      }, {
         element: $('#r_Histogram').get(0),
         content: "Histogram displays comparative ratios of log severity during the currently defined time filter.",
         placement: "top"
@@ -64,10 +68,6 @@ define(['require', 'tour'], function (require, TourJs) {
         content: "The Log Data default view displays consolidated for all hosts.",
         placement: "top",
       }, {
-        element: 'a[data-id="timerBtn"]',
-        content: "Snapshot helps you quickly see all logs generated while you reproduce an issue. Click start, reproduce your issue, click stop and we'll load all logs that were produced during that time frame.",
-        placement: "top"
-      }, {
         element: document.querySelectorAll('#r_BubbleTable')[1],
         content: "Expand the Log Data tree view and choose components to further refine your diagnostics.",
         placement: "top",
@@ -146,8 +146,6 @@ define(['require', 'tour'], function (require, TourJs) {
         } else if (tour._state.current_step == 6) {
         } else if (tour._state.current_step == 7) {
           appendFingerAndOverlayDiv(tour._options.showFinger[tour._state.current_step]);
-        } else if (tour._state.current_step == 8) {
-          appendFingerAndOverlayDiv(tour._options.showFinger[tour._state.current_step]);
         } else if (tour._state.current_step == 9) {
           appendFingerAndOverlayDiv(tour._options.showFinger[tour._state.current_step]);
         } else if (tour._state.current_step == 10) {
@@ -257,26 +255,26 @@ define(['require', 'tour'], function (require, TourJs) {
       },
       8: {
         css: {
-          'top': '45px',
-          'left': '122px'
+          'top': '30px',
+          'left': '309px'
         },
-        handDirection: 'up',
+        handDirection: 'down',
         handClass: "up-down"
       },
       9: {
         css: {
           'top': '45px',
-          'left': '100px'
+          'left': '122px'
         },
-        appendIndex: 0,
-        handDirection: 'down',
+        handDirection: 'up',
         handClass: "up-down"
       },
       10: {
         css: {
-          'top': '30px',
-          'left': '309px'
+          'top': '45px',
+          'left': '100px'
         },
+        appendIndex: 0,
         handDirection: 'down',
         handClass: "up-down"
       },
@@ -396,13 +394,13 @@ define(['require', 'tour'], function (require, TourJs) {
         $('#r_BubbleTable input[value="H"]').click();
         removeFingerAndOverlayDiv();
       } else if (tour._state.current_step == 8) {
-        $('#r_BubbleTable li[data-parent="true"]').first().find('span[data-state="collapse"]').first().click();
-        $('#r_BubbleTable li[data-parent="true"]').first().find('a[data-type="C"]').first().removeClass('hidden');
         removeFingerAndOverlayDiv();
       } else if (tour._state.current_step == 9) {
+        $('#r_BubbleTable li[data-parent="true"]').first().find('span[data-state="collapse"]').first().click();
         $('#r_BubbleTable li[data-parent="true"]').first().find('a[data-type="C"]').first().removeClass('hidden');
         removeFingerAndOverlayDiv();
       } else if (tour._state.current_step == 10) {
+        $('#r_BubbleTable li[data-parent="true"]').first().find('a[data-type="C"]').first().removeClass('hidden');
         removeFingerAndOverlayDiv();
       } else if (tour._state.current_step == 11) {
         $('#r_BubbleTable input[value="T"]').click();


[22/50] [abbrv] ambari git commit: AMBARI-19657: Downgrade button does not work after restart Ambari server when upgrade wizard was left open (dili)

Posted by nc...@apache.org.
AMBARI-19657: Downgrade button does not work after restart Ambari server when upgrade wizard was left open (dili)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7b0ee28e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7b0ee28e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7b0ee28e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7b0ee28ef2a313335ff81ef62574c37ca6fe1347
Parents: a51ede8
Author: Di Li <di...@apache.org>
Authored: Mon Jan 23 15:26:52 2017 -0500
Committer: Di Li <di...@apache.org>
Committed: Mon Jan 23 15:26:52 2017 -0500

----------------------------------------------------------------------
 .../main/admin/stack_and_upgrade_controller.js  | 21 +++++++++++++-------
 1 file changed, 14 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7b0ee28e/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index 4f88d2f..a585615 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -385,13 +385,7 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
       self.loadStackVersionsToModel(true).done(function () {
         self.loadRepoVersionsToModel().done(function() {
           self.loadCompatibleVersions().done(function() {
-            var currentVersion = App.StackVersion.find().findProperty('state', 'CURRENT');
-            if (currentVersion) {
-              self.set('currentVersion', {
-                repository_version: currentVersion.get('repositoryVersion.repositoryVersion'),
-                repository_name: currentVersion.get('repositoryVersion.displayName')
-              });
-            }
+            self.updateCurrentStackVersion();
             dfd.resolve();
           });
         });
@@ -400,6 +394,16 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
     return dfd.promise();
   },
 
+  updateCurrentStackVersion: function(){
+    var currentVersion = App.StackVersion.find().findProperty('state', 'CURRENT');
+    if (currentVersion) {
+      this.set('currentVersion', {
+        repository_version: currentVersion.get('repositoryVersion.repositoryVersion'),
+        repository_name: currentVersion.get('repositoryVersion.displayName')
+      });
+    }
+  },
+
   /**
    * load upgrade tasks by upgrade id
    * @return {$.Deferred}
@@ -656,6 +660,9 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage,
    */
   confirmDowngrade: function (event) {
     var self = this;
+    if(!this.get('currentVersion')){
+      this.updateCurrentStackVersion();
+    }
     var currentVersion = this.get('currentVersion');
     return App.showConfirmationPopup(
       function() {


[28/50] [abbrv] ambari git commit: AMBARI-19592 : Create grafana dashboards for Druid Metrics and configure druid to send metrics to AMS. (Nishant Bangarwa via avijayan)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/ad0f4ecc/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
index 032b019..558087d 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
@@ -145,3 +145,39 @@ HdfsResource = functools.partial(
   immutable_paths=get_not_managed_resources(),
   dfs_type=dfs_type
 )
+
+
+# Ambari Metrics
+metric_emitter_type = "noop"
+metric_collector_host = ""
+metric_collector_port = ""
+metric_collector_protocol = ""
+metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.truststore.location", "")
+metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "")
+metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "")
+
+ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
+has_metric_collector = not len(ams_collector_hosts) == 0
+
+if has_metric_collector:
+    metric_emitter_type = "ambari-metrics-emitter"
+    if 'cluster-env' in config['configurations'] and \
+                    'metrics_collector_vip_host' in config['configurations']['cluster-env']:
+        metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host']
+    else:
+        metric_collector_host = ams_collector_hosts[0]
+    if 'cluster-env' in config['configurations'] and \
+                    'metrics_collector_vip_port' in config['configurations']['cluster-env']:
+        metric_collector_port = config['configurations']['cluster-env']['metrics_collector_vip_port']
+    else:
+        metric_collector_web_address = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "localhost:6188")
+        if metric_collector_web_address.find(':') != -1:
+            metric_collector_port = metric_collector_web_address.split(':')[1]
+        else:
+            metric_collector_port = '6188'
+    if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY":
+        metric_collector_protocol = 'https'
+    else:
+        metric_collector_protocol = 'http'
+    pass
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/ad0f4ecc/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
index c9c590d..465f218 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
@@ -105,6 +105,9 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
       if "KAFKA" in servicesList:
           extensions_load_list = self.addToList(extensions_load_list, "druid-kafka-indexing-service")
 
+      if 'AMBARI_METRICS' in servicesList:
+        extensions_load_list = self.addToList(extensions_load_list, "ambari-metrics-emitter")
+
       putCommonProperty('druid.extensions.loadList', extensions_load_list)
 
       # JVM Configs go to env properties


[09/50] [abbrv] ambari git commit: AMBARI-19674. Asset Manager import and validation issues (Madhan Mohan Reddy via pallavkul)

Posted by nc...@apache.org.
AMBARI-19674. Asset Manager import and validation issues (Madhan Mohan Reddy via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/74aff7c6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/74aff7c6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/74aff7c6

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 74aff7c645c987b692188e0a48e627896e6cf70a
Parents: a6fd5cb
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 23 18:23:57 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 23 18:23:57 2017 +0530

----------------------------------------------------------------------
 .../org/apache/oozie/ambari/view/Constants.java | 16 +++--
 .../apache/oozie/ambari/view/HDFSFileUtils.java | 19 ++++++
 .../ambari/view/OozieProxyImpersonator.java     | 40 ++++++++++-
 .../oozie/ambari/view/WorkflowFilesService.java | 70 ++++++++++++--------
 .../oozie/ambari/view/assets/AssetRepo.java     | 10 +++
 .../oozie/ambari/view/assets/AssetResource.java |  7 ++
 .../oozie/ambari/view/assets/AssetService.java  |  4 ++
 .../workflowmanager/WorkflowManagerService.java | 46 ++++++-------
 .../view/workflowmanager/WorkflowsRepo.java     | 19 +++++-
 .../resources/ui/app/components/asset-config.js | 18 ++++-
 .../resources/ui/app/components/asset-list.js   | 20 +++++-
 .../ui/app/components/asset-manager.js          | 51 ++++++++++++--
 .../ui/app/components/designer-workspace.js     | 23 +------
 .../ui/app/components/flow-designer.js          | 59 ++++++++---------
 .../resources/ui/app/services/asset-manager.js  | 19 ++++++
 .../src/main/resources/ui/app/styles/app.less   |  4 ++
 .../app/templates/components/asset-config.hbs   |  8 +++
 .../ui/app/templates/components/asset-list.hbs  | 12 +++-
 .../app/templates/components/asset-manager.hbs  | 26 +++++++-
 .../templates/components/designer-workspace.hbs |  2 +-
 .../app/templates/components/flow-designer.hbs  |  4 +-
 21 files changed, 347 insertions(+), 130 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Constants.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Constants.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Constants.java
index 238b002..f7c1936 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Constants.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/Constants.java
@@ -17,9 +17,15 @@
  */
 package org.apache.oozie.ambari.view;
 
-public class Constants {
-  public static final String STATUS_FAILED = "failed";
-  public static final String STATUS_OK = "ok";
-  public static final String STATUS_KEY = "status";
-  public static final String MESSAGE_KEY = "message";
+public interface Constants {
+  String STATUS_FAILED = "failed";
+  String STATUS_OK = "ok";
+  String STATUS_KEY = "status";
+  String MESSAGE_KEY = "message";
+  String WF_DRAFT_EXTENSION = ".wfdraft";
+  String WF_EXTENSION = ".xml";
+  String DEFAULT_WORKFLOW_FILENAME="workflow.xml";
+  String DEFAULT_DRAFT_FILENAME="workflow"+WF_DRAFT_EXTENSION;
+  String WF_ASSET_EXTENSION = ".wfasset";
+  String DEFAULT_WORKFLOW_ASSET_FILENAME="asset"+WF_ASSET_EXTENSION;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/HDFSFileUtils.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/HDFSFileUtils.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/HDFSFileUtils.java
index 53b43f9..d06eb07 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/HDFSFileUtils.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/HDFSFileUtils.java
@@ -19,6 +19,7 @@ package org.apache.oozie.ambari.view;
 
 import com.google.common.base.Optional;
 import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.commons.hdfs.UserService;
 import org.apache.ambari.view.commons.hdfs.ViewPropertyHelper;
 import org.apache.ambari.view.utils.hdfs.HdfsApi;
 import org.apache.ambari.view.utils.hdfs.HdfsUtil;
@@ -30,6 +31,7 @@ import org.slf4j.LoggerFactory;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Map;
 
 public class HDFSFileUtils {
@@ -118,5 +120,22 @@ public class HDFSFileUtils {
 		}
 
 	}
+	public boolean hdfsCheck()  {
+		try {
+			getHdfsgetApi().getStatus();
+			return true;
+		} catch (Exception e) {
+			throw new RuntimeException(e);
+		}
+	}
 
+	public boolean homeDirCheck(){
+		UserService userservice = new UserService(viewContext, getViewConfigs(viewContext));
+		userservice.homeDir();
+		return true;
+	}
+	private Map<String,String> getViewConfigs(ViewContext context) {
+		Optional<Map<String, String>> props = ViewPropertyHelper.getViewConfigs(context, VIEW_CONF_KEYVALUES);
+		return props.isPresent()? props.get() : new HashMap<String, String>();
+	}
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
index 7596de0..d029c39 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/OozieProxyImpersonator.java
@@ -122,6 +122,16 @@ public class OozieProxyImpersonator {
       viewContext.getInstanceName()));
 
   }
+  @Path("hdfsCheck")
+  public Response hdfsCheck(){
+    hdfsFileUtils.hdfsCheck();
+    return Response.ok().build();
+  }
+  @Path("homeDirCheck")
+  public Response homeDirCheck(){
+    hdfsFileUtils.homeDirCheck();
+    return Response.ok().build();
+  }
 
   @Path("/fileServices")
   public FileServices fileServices() {
@@ -210,7 +220,7 @@ public class OozieProxyImpersonator {
     if (StringUtils.isEmpty(appPath)) {
       throw new RuntimeException("app path can't be empty.");
     }
-    appPath = workflowFilesService.getWorkflowDrafFileName(appPath.trim());
+    appPath = workflowFilesService.getWorkflowDraftFileName(appPath.trim());
     workflowFilesService.createFile(appPath, postBody, overwrite);
     if (PROJ_MANAGER_ENABLED) {
       JobType jobType = StringUtils.isEmpty(jobTypeStr) ? JobType.WORKFLOW : JobType.valueOf(jobTypeStr);
@@ -263,6 +273,30 @@ public class OozieProxyImpersonator {
       return getRespCodeForException(ex);
     }
   }
+  @GET
+  @Path("/readAsset")
+  public Response readAsset(
+          @QueryParam("assetPath") String assetPath) {
+    if (StringUtils.isEmpty(assetPath)) {
+      throw new RuntimeException("assetPath can't be empty.");
+    }
+    try {
+      final InputStream is = workflowFilesService
+              .readAssset(assetPath);
+      StreamingOutput streamer = new StreamingOutput() {
+        @Override
+        public void write(OutputStream os) throws IOException,
+                WebApplicationException {
+          IOUtils.copy(is, os);
+          is.close();
+          os.close();
+        }
+      };
+      return Response.ok(streamer).status(200).build();
+    } catch (IOException e) {
+      return getRespCodeForException(e);
+    }
+  }
 
 
   @GET
@@ -375,7 +409,7 @@ public class OozieProxyImpersonator {
 
   @GET
   @Path("/readWorkflowDetail")
-  public Response isDraftAvailable(
+  public Response getWorkflowDetail(
     @QueryParam("workflowXmlPath") String workflowPath) {
     WorkflowFileInfo workflowDetails = workflowFilesService
       .getWorkflowDetails(workflowPath);
@@ -384,7 +418,7 @@ public class OozieProxyImpersonator {
 
   @GET
   @Path("/readWorkflowXml")
-  public Response readWorkflowXxml(
+  public Response readWorkflowXml(
     @QueryParam("workflowXmlPath") String workflowPath) {
     if (StringUtils.isEmpty(workflowPath)) {
       throw new RuntimeException("workflowXmlPath can't be empty.");

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/WorkflowFilesService.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/WorkflowFilesService.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/WorkflowFilesService.java
index d8bf9ff..289e68b 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/WorkflowFilesService.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/WorkflowFilesService.java
@@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory;
 
 public class WorkflowFilesService {
   private final static Logger LOGGER = LoggerFactory
-    .getLogger(WorkflowFilesService.class);
+          .getLogger(WorkflowFilesService.class);
   private HDFSFileUtils hdfsFileUtils;
 
   public WorkflowFilesService(HDFSFileUtils hdfsFileUtils) {
@@ -35,86 +35,104 @@ public class WorkflowFilesService {
   }
 
   public String createFile(String appPath, String content,
-                                   boolean overwrite) throws IOException {
+                           boolean overwrite) throws IOException {
     return hdfsFileUtils.writeToFile(appPath, content,
-      overwrite);
+            overwrite);
   }
 
   public String createAssetFile(String appPath, String content,
                                 boolean overwrite) throws IOException {
     return hdfsFileUtils.writeToFile(appPath, content,
-      overwrite);
+            overwrite);
   }
 
   public InputStream readDraft(String appPath) throws IOException {
-    return hdfsFileUtils.read(getWorkflowDrafFileName(appPath));
+    return hdfsFileUtils.read(getWorkflowDraftFileName(appPath));
   }
 
   public InputStream readWorkflowXml(String appPath) throws IOException {
     return hdfsFileUtils.read(getWorkflowFileName(appPath));
   }
 
-  public String getWorkflowDrafFileName(String appPath) {
-    if (appPath.endsWith(".draft.json")){
+  public InputStream readAssset(String assetPath) throws IOException {
+    return hdfsFileUtils.read(getAssetFileName(assetPath));
+  }
+
+  public String getWorkflowDraftFileName(String appPath) {
+    if (appPath.endsWith(Constants.WF_DRAFT_EXTENSION)) {
       return appPath;
-    }else{
-      return getWorkflowFileName(appPath).concat(".draft.json");
+    } else if (appPath.endsWith(Constants.WF_EXTENSION)) {
+      String folderPath = appPath.substring(0, appPath.lastIndexOf(Constants.WF_EXTENSION));
+      return folderPath + Constants.WF_DRAFT_EXTENSION;
+    } else if (appPath.endsWith("/")) {
+      return appPath + Constants.DEFAULT_DRAFT_FILENAME;
+    } else {
+      return appPath + "/" + Constants.DEFAULT_DRAFT_FILENAME;
     }
   }
 
   public String getWorkflowFileName(String appPath) {
-    String workflowFile = null;
-    if (appPath.endsWith(".xml")) {
-      workflowFile = appPath;
+    if (appPath.endsWith(Constants.WF_EXTENSION)) {
+      return appPath;
+    } else if (appPath.endsWith(Constants.WF_DRAFT_EXTENSION)) {
+      String folderPath = appPath.substring(0, appPath.lastIndexOf(Constants.WF_DRAFT_EXTENSION));
+      return folderPath + Constants.WF_EXTENSION;
+    } else if (appPath.endsWith("/")) {
+      return appPath + Constants.DEFAULT_WORKFLOW_FILENAME;
     } else {
-      workflowFile = appPath + (appPath.endsWith("/") ? "" : "/")
-        + "workflow.xml";
+      return appPath + "/" + Constants.DEFAULT_WORKFLOW_FILENAME;
     }
-    return workflowFile;
   }
 
   public String getAssetFileName(String appPath) {
     String assetFile = null;
-    if (appPath.endsWith(".xml")) {
+    if (appPath.endsWith(Constants.WF_ASSET_EXTENSION)) {
       assetFile = appPath;
     } else {
       assetFile = appPath + (appPath.endsWith("/") ? "" : "/")
-        + "asset.xml";
+              + Constants.DEFAULT_WORKFLOW_ASSET_FILENAME;
     }
     return assetFile;
   }
 
   public void discardDraft(String workflowPath) throws IOException {
-    hdfsFileUtils.deleteFile(getWorkflowDrafFileName(workflowPath));
-
+    hdfsFileUtils.deleteFile(getWorkflowDraftFileName(workflowPath));
   }
 
   public WorkflowFileInfo getWorkflowDetails(String appPath) {
     WorkflowFileInfo workflowInfo = new WorkflowFileInfo();
     workflowInfo.setWorkflowPath(getWorkflowFileName(appPath));
     boolean draftExists = hdfsFileUtils
-      .fileExists(getWorkflowDrafFileName(appPath));
+            .fileExists(getWorkflowDraftFileName(appPath));
     workflowInfo.setDraftExists(draftExists);
     boolean workflowExists = hdfsFileUtils.fileExists(getWorkflowFileName(appPath));
     FileStatus workflowFileStatus = null;
     if (workflowExists) {
       workflowFileStatus = hdfsFileUtils
-        .getFileStatus(getWorkflowFileName(appPath));
+              .getFileStatus(getWorkflowFileName(appPath));
       workflowInfo.setWorkflowModificationTime(workflowFileStatus
-        .getModificationTime());
+              .getModificationTime());
     }
     if (draftExists) {
       FileStatus draftFileStatus = hdfsFileUtils
-        .getFileStatus(getWorkflowDrafFileName(appPath));
+              .getFileStatus(getWorkflowDraftFileName(appPath));
       workflowInfo.setDraftModificationTime(draftFileStatus
-        .getModificationTime());
+              .getModificationTime());
       if (!workflowExists) {
         workflowInfo.setIsDraftCurrent(true);
       } else {
         workflowInfo.setIsDraftCurrent(draftFileStatus.getModificationTime()
-          - workflowFileStatus.getModificationTime() > 0);
+                - workflowFileStatus.getModificationTime() > 0);
       }
     }
     return workflowInfo;
   }
-}
+  public void deleteWorkflowFile(String fullWorkflowFilePath){
+    try {
+      hdfsFileUtils.deleteFile(fullWorkflowFilePath);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetRepo.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetRepo.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetRepo.java
index df936a4..6e9b7ee 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetRepo.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetRepo.java
@@ -36,4 +36,14 @@ public class AssetRepo extends BaseRepo<ActionAsset> {
       throw new RuntimeException(e);
     }
   }
+
+  public boolean assetNameAvailable(String name) {
+    try {
+      Collection<ActionAsset> assets=dataStore.findAll(ActionAsset.class, " name='" + name + "'");
+      boolean assetExists= assets!=null && !assets.isEmpty();
+      return !assetExists;
+    } catch (PersistenceException e) {
+      throw new RuntimeException(e);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
index 0622971..af86810 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetResource.java
@@ -145,6 +145,13 @@ public class AssetResource {
   }
 
   @GET
+  @Path("/assetNameAvailable")
+  public Response assetNameAvailable(@QueryParam("name") String name){
+    boolean available=assetService.isAssetNameAvailable(name);
+    return Response.ok(available).build();
+  }
+
+  @GET
   @Path("/{id}")
   public Response getAssetDetail(@PathParam("id") String id) {
     try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetService.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetService.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetService.java
index 9fe2f9c..4bac2cd 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetService.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/assets/AssetService.java
@@ -112,4 +112,8 @@ public class AssetService {
   public Collection<ActionAsset> getMyAssets() {
     return assetRepo.getMyAsets(viewContext.getUsername());
   }
+
+  public boolean isAssetNameAvailable(String name) {
+    return assetRepo.assetNameAvailable(name);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowManagerService.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowManagerService.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowManagerService.java
index afdee9e..7ce6081 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowManagerService.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowManagerService.java
@@ -17,26 +17,26 @@
  */
 package org.apache.oozie.ambari.view.workflowmanager;
 
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Date;
-
 import org.apache.ambari.view.ViewContext;
 import org.apache.oozie.ambari.view.HDFSFileUtils;
 import org.apache.oozie.ambari.view.JobType;
+import org.apache.oozie.ambari.view.WorkflowFilesService;
 import org.apache.oozie.ambari.view.workflowmanager.model.Workflow;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.Collection;
+
 public class WorkflowManagerService {
   private final static Logger LOGGER = LoggerFactory
-    .getLogger(WorkflowManagerService.class);
+          .getLogger(WorkflowManagerService.class);
   private final WorkflowsRepo workflowsRepository;
-  private final HDFSFileUtils hdfsFileUtils;
+  private final WorkflowFilesService workflowFilesService;
 
   public WorkflowManagerService(ViewContext viewContext) {
     workflowsRepository = new WorkflowsRepo(viewContext.getDataStore());
-    hdfsFileUtils = new HDFSFileUtils(viewContext);
+
+    workflowFilesService = new WorkflowFilesService(new HDFSFileUtils(viewContext));
   }
 
   public void saveWorkflow(String projectId, String path, JobType jobType,
@@ -46,16 +46,24 @@ public class WorkflowManagerService {
       Workflow workflowById = workflowsRepository.findById(projectId);
       if (workflowById == null) {
         throw new RuntimeException("could not find project with id :"
-          + projectId);
+                + projectId);
       }
       setWorkflowAttributes(jobType, userName, name, workflowById);
       workflowsRepository.update(workflowById);
+
     } else {
-      Workflow wf = new Workflow();
-      wf.setId(workflowsRepository.generateId());
-      setWorkflowAttributes(jobType, userName, name, wf);
-      wf.setWorkflowDefinitionPath(path);
-      workflowsRepository.create(wf);
+      String workflowFileName = workflowFilesService.getWorkflowFileName(path);
+      Workflow workflowByPath = workflowsRepository.getWorkflowByPath(workflowFileName);
+      if (workflowByPath!=null){
+        setWorkflowAttributes(jobType, userName, name, workflowByPath);
+        workflowsRepository.update(workflowByPath);
+      }else{
+        Workflow wf = new Workflow();
+        wf.setId(workflowsRepository.generateId());
+        setWorkflowAttributes(jobType, userName, name, wf);
+        wf.setWorkflowDefinitionPath(workflowFileName);
+        workflowsRepository.create(wf);
+      }
     }
   }
 
@@ -70,19 +78,11 @@ public class WorkflowManagerService {
     return workflowsRepository.findAll();
   }
 
-  public Workflow getWorkflowByPath(String path) {
-    return workflowsRepository.getWorkflowByPath(path);
-  }
-
   public void deleteWorkflow(String projectId, Boolean deleteDefinition) {
     Workflow workflow = workflowsRepository.findById(projectId);
     if (deleteDefinition) {
-      try {
-        hdfsFileUtils.deleteFile(workflow.getWorkflowDefinitionPath());
-      } catch (IOException e) {
-        throw new RuntimeException(e);
-      }
+      workflowFilesService.deleteWorkflowFile(workflow.getWorkflowDefinitionPath());
     }
     workflowsRepository.delete(workflow);
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsRepo.java
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsRepo.java b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsRepo.java
index 7787bda..1fc0c5f 100644
--- a/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsRepo.java
+++ b/contrib/views/wfmanager/src/main/java/org/apache/oozie/ambari/view/workflowmanager/WorkflowsRepo.java
@@ -21,9 +21,14 @@ import org.apache.ambari.view.DataStore;
 import org.apache.ambari.view.PersistenceException;
 import org.apache.oozie.ambari.view.repo.BaseRepo;
 import org.apache.oozie.ambari.view.workflowmanager.model.Workflow;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-public class WorkflowsRepo extends BaseRepo<Workflow> {
+import java.util.Collection;
 
+public class WorkflowsRepo extends BaseRepo<Workflow> {
+  private final static Logger LOGGER = LoggerFactory
+          .getLogger(WorkflowsRepo.class);
   public WorkflowsRepo(DataStore dataStore) {
     super(Workflow.class, dataStore);
 
@@ -31,8 +36,16 @@ public class WorkflowsRepo extends BaseRepo<Workflow> {
 
   public Workflow getWorkflowByPath(String path) {
     try {
-      return this.dataStore.find(Workflow.class,
-        "workflowDefinitionPath='" + path + "'");
+      Collection<Workflow> workflows = this.dataStore.findAll(Workflow.class,
+              "workflowDefinitionPath='" + path + "'");
+      if (workflows == null || workflows.isEmpty()) {
+        return null;
+      } else if (workflows.size() > 1) {
+        LOGGER.error("Duplicate workflows found having same path");
+        throw new RuntimeException("Duplicate workflows");
+      } else {
+        return workflows.iterator().next();
+      }
     } catch (PersistenceException e) {
       throw new RuntimeException(e);
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-config.js
index dcfba34..62d3be3 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-config.js
@@ -24,6 +24,7 @@ const Validations = buildValidations({
 });
 
 export default Ember.Component.extend(Validations, {
+  assetManager : Ember.inject.service('asset-manager'),
   initialize: function(){
     this.$('#asset_config_dialog').modal('show');
     this.$('#asset_config_dialog').modal().on('hidden.bs.modal', function() {
@@ -39,8 +40,21 @@ export default Ember.Component.extend(Validations, {
         this.set('showErrorMessage', true);
         return;
       }
-      this.$('#asset_config_dialog').modal('hide');
-      this.sendAction('saveAssetConfig');
+      this.set("inProgress", true);
+      var assetNameAvailableDefered=this.get("assetManager").assetNameAvailable(this.get("assetModel.name"));
+      assetNameAvailableDefered.promise.then(function(data){
+        this.set("inProgress", false);
+        if (data === "false") {
+          this.set("assetErrorMsg", "Asset name already exists");
+          return;
+        } else {
+          this.$('#asset_config_dialog').modal('hide');
+          this.sendAction('saveAssetConfig');
+        }
+      }.bind(this)).catch(function(data){
+        model.set("inProgress", false);
+        return "There is some problem while checking asset name availability. Please try again.";
+      });
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-list.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-list.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-list.js
index 9ad0494..ae695fe 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-list.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-list.js
@@ -17,9 +17,11 @@
 import Ember from 'ember';
 
 export default Ember.Component.extend({
+  assetManager : Ember.inject.service('asset-manager'),
   currentAssetId: null,
   assetNotSelected: true,
   assetSearchCriteria: "",
+  filteredAssetList:  Ember.A([]),
   fuseSearchOptions: {
     shouldSort: true,
     threshold: 0.1,
@@ -46,7 +48,23 @@ export default Ember.Component.extend({
     this.$('#asset_list_dialog').modal().on('hidden.bs.modal', function() {
       this.sendAction('showAssetList', false);
     }.bind(this));
-    this.initializeFuseSearch();
+
+    self.set("inProgress", true);
+    self.set("assetErrorMsg", "");
+    var fetchAssetsDefered=self.get("assetManager").fetchAssets();
+    fetchAssetsDefered.promise.then(function(response){
+      var assetData = JSON.parse(response).data;
+      if (self.get("assetListType") && self.get("assetListType") !== "") {
+        assetData = assetData.filterBy('type', self.get("assetListType"));
+      }
+      self.set('assetList', assetData);
+      self.initializeFuseSearch();
+      self.set("inProgress", false);
+    }.bind(this)).catch(function(data){
+      self.set("assetErrorMsg", "There is some problem while fetching assets. Please try again.");
+      self.set("inProgress", false);
+    });
+
   }.on('didInsertElement'),
   initializeFuseSearch() {
      this.set('fuse', new Fuse(this.get("assetList"), this.get('fuseSearchOptions')));

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-manager.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-manager.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-manager.js
index 4674533..5228324 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-manager.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/asset-manager.js
@@ -17,7 +17,10 @@
 import Ember from 'ember';
 
 export default Ember.Component.extend({
+  assetManager : Ember.inject.service('asset-manager'),
   assetSearchCriteria: "",
+  currentAsset: null,
+  filteredAssetList:  Ember.A([]),
   fuseSearchOptions: {
     shouldSort: true,
     threshold: 0.1,
@@ -45,7 +48,17 @@ export default Ember.Component.extend({
       this.sendAction('showAssetManager', false);
     }.bind(this));
 
-    this.initializeFuseSearch();
+    self.set("inProgress", true);
+    self.set("errorMsg", "");
+    var fetchAssetsDefered=self.get("assetManager").fetchMyAssets();
+    fetchAssetsDefered.promise.then(function(response){
+      self.set('assetList', JSON.parse(response).data);
+      self.initializeFuseSearch();
+      self.set("inProgress", false);
+    }.bind(this)).catch(function(data){
+      self.set("errorMsg", "There is some problem while fetching assets. Please try again.");
+      self.set("inProgress", false);
+    });
   }.on('didInsertElement'),
   initializeFuseSearch() {
      this.set('fuse', new Fuse(this.get("assetList"), this.get('fuseSearchOptions')));
@@ -62,9 +75,39 @@ export default Ember.Component.extend({
     close() {
       this.$('#asset_manager_dialog').modal('hide');
     },
-    deleteAsset(asset) {
-      this.$('#asset_manager_dialog').modal('hide');
-      this.sendAction('deleteAsset', asset);
+    deleteAsset() {
+      var self=this;
+      self.set("inProgress", true);
+      self.set("errorMsg", "");
+      self.set("successMsg", "");
+      var deleteAssetDefered=self.get("assetManager").deleteAsset(self.get("currentAsset").id);
+      deleteAssetDefered.promise.then(function(response){
+        var fetchAssetsDefered=self.get("assetManager").fetchMyAssets();
+        fetchAssetsDefered.promise.then(function(response){
+          self.get("assetList").clear();
+          self.get("assetList").pushObjects(JSON.parse(response).data);
+          if (self.get("assetSearchCriteria") !== "") {
+            self.set('filteredAssetList', self.get('fuse').search(self.get("assetSearchCriteria")));
+          } else {
+            self.set('filteredAssetList', self.get("assetList"));
+          }
+          self.set("successMsg", "Asset got deleted successfully");
+          self.set("inProgress", false);
+        }.bind(this)).catch(function(data){
+          self.set("errorMsg", "There is some problem while fetching assets. Please try again.");
+          self.set("inProgress", false);
+        });
+      }.bind(this)).catch(function(data){
+        self.set("errorMsg", "There is some problem while deleting asset. Please try again.");
+        self.set("inProgress", false);
+      });
+    },
+    showDeleteAssetWarning(asset) {
+      this.set("currentAsset", asset);
+      this.set('showingDeleteAssetWarning', true);
+      Ember.run.later(()=>{
+        this.$('#ConfirmDialog').modal('show');
+      });
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
index a04c66d..422253d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
@@ -19,7 +19,6 @@ import CommonUtils from "../utils/common-utils";
 import Constants from '../utils/constants';
 export default Ember.Component.extend({
   workspaceManager : Ember.inject.service('workspace-manager'),
-  assetManager : Ember.inject.service('asset-manager'),
   xmlAppPath : null,
   appPath : null,
   type : 'wf',
@@ -236,27 +235,7 @@ export default Ember.Component.extend({
       }
     },
     showAssetManager(value) {
-      var self=this;
-      if (value) {
-        var fetchAssetsDefered=self.get("assetManager").fetchMyAssets();
-        fetchAssetsDefered.promise.then(function(response){
-          self.set('assetList', JSON.parse(response).data);
-          self.set('showingAssetManager', value);
-        }.bind(this)).catch(function(data){
-          self.set("errorMsg", "There is some problem while fetching assets. Please try again.");
-        });
-      } else {
-        self.set('showingAssetManager', value);
-      }
-    },
-    deleteAsset(asset) {
-      var self=this;
-      var deleteAssetDefered=self.get("assetManager").deleteAsset(asset.id);
-      deleteAssetDefered.promise.then(function(response){
-        console.log("Asset deleted..");
-      }.bind(this)).catch(function(data){
-        self.set("errorMsg", "There is some problem while deleting asset. Please try again.");
-      });
+      this.set('showingAssetManager', value);
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index 1822a20..c682ea9 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -349,6 +349,24 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     });
     return deferred;
   },
+  getAssetFromHdfs(filePath){
+    var url = Ember.ENV.API_URL + "/readAsset?assetPath="+filePath;
+    var deferred = Ember.RSVP.defer();
+    Ember.$.ajax({
+      url: url,
+      method: 'GET',
+      dataType: "text",
+      beforeSend: function (xhr) {
+        xhr.setRequestHeader("X-XSRF-HEADER", Math.round(Math.random()*100000));
+        xhr.setRequestHeader("X-Requested-By", "Ambari");
+      }
+    }).done(function(data){
+      deferred.resolve(data);
+    }).fail(function(data){
+      deferred.reject(data);
+    });
+    return deferred;
+  },
   importActionSettingsFromString(actionSettings) {
     var x2js = new X2JS();
     var actionSettingsObj = x2js.xml_str2json(actionSettings);
@@ -945,13 +963,13 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("showingActionSettingsFileBrowser", false);
       if(this.get('actionSettingsFilePath')){
         self.set("errorMsg", "");
-        var actionSettingsXmlDefered=this.getWorkflowFromHdfs(this.get('actionSettingsFilePath'));
+        var actionSettingsXmlDefered=this.getAssetFromHdfs(this.get('actionSettingsFilePath'));
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionSettingsFromString(data);
         }.bind(this)).catch(function(data){
           console.error(data);
           var stackTraceMsg = self.getStackTrace(data.responseText);
-          self.set("errorMsg", "There is some problem while importing.Please try again.");
+          self.set("errorMsg", "There is some problem while importing asset.Please try again.");
           self.showingErrorMsgInDesigner(data);
         });
       }
@@ -964,13 +982,13 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set("showingImportActionNodeFileBrowser", false);
       if(this.get('actionNodeFilePath')){
         self.set("errorMsg", "");
-        var actionSettingsXmlDefered=this.getWorkflowFromHdfs(this.get('actionNodeFilePath'));
+        var actionSettingsXmlDefered=this.getAssetFromHdfs(this.get('actionNodeFilePath'));
         actionSettingsXmlDefered.promise.then(function(data){
           this.importActionNodeFromString(data);
         }.bind(this)).catch(function(data){
           console.error(data);
           var stackTraceMsg = self.getStackTrace(data.responseText);
-          self.set("errorMsg", "There is some problem while importing.Please try again.");
+          self.set("errorMsg", "There is some problem while importing asset. Please try again.");
           self.showingErrorMsgInDesigner(data);
         });
       }
@@ -1109,22 +1127,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     },
     showAssetList(value) {
       var self=this;
-      if (value) {
-        self.set("errorMsg", "");
-        var fetchAssetsDefered=self.get("assetManager").fetchAssets();
-        fetchAssetsDefered.promise.then(function(response){
-          var assetData = JSON.parse(response).data;
-          assetData = assetData.filterBy('type', self.flowRenderer.currentCyNode.data().node.actionType);
-          self.set('assetList', assetData);
-          self.set('assetListType', self.flowRenderer.currentCyNode.data().node.actionType);
-          self.set('showingAssetList', value);
-        }.bind(this)).catch(function(data){
-          self.set("errorMsg", "There is some problem while fetching assets. Please try again.");
-          self.showingErrorMsgInDesigner(data);
-        });
-      } else {
-        self.set('showingAssetList', value);
-      }
+      self.set('showingAssetList', value);
+      self.set('assetListType', self.flowRenderer.currentCyNode.data().node.actionType);
     },
     importAsset(asset) {
       var self=this;
@@ -1143,19 +1147,8 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
     },
     showAssetNodeList(value) {
       var self=this;
-      if (value) {
-        self.set("errorMsg", "");
-        var fetchAssetsDefered=self.get("assetManager").fetchAssets();
-        fetchAssetsDefered.promise.then(function(response){
-          self.set('assetList', JSON.parse(response).data);
-          self.set('showingAssetNodeList', value);
-        }.bind(this)).catch(function(data){
-          self.set("errorMsg", "There is some problem while fetching assets. Please try again.");
-          self.showingErrorMsgInDesigner(data);
-        });
-      } else {
-        self.set('showingAssetNodeList', value);
-      }
+      self.set('showingAssetNodeList', value);
+      self.set('assetListType', "");
     },
     importAssetNode(asset) {
       var self=this;

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/services/asset-manager.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/services/asset-manager.js b/contrib/views/wfmanager/src/main/resources/ui/app/services/asset-manager.js
index 3e87644..45f99e7 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/services/asset-manager.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/services/asset-manager.js
@@ -138,5 +138,24 @@ export default Ember.Service.extend({
       deferred.reject(data);
     });
     return deferred;
+  },
+  assetNameAvailable(assetName) {
+    var url = Ember.ENV.API_URL + "/assets/assetNameAvailable?name=" + assetName;
+    var deferred = Ember.RSVP.defer();
+    Ember.$.ajax({
+      url: url,
+      method: "GET",
+      dataType: "text",
+      contentType: "text/plain;charset=utf-8",
+      beforeSend: function (xhr) {
+        xhr.setRequestHeader("X-XSRF-HEADER", Math.round(Math.random()*100000));
+        xhr.setRequestHeader("X-Requested-By", "workflow-designer");
+      }
+    }).done(function(data){
+      deferred.resolve(data);
+    }).fail(function(data){
+      deferred.reject(data);
+    });
+    return deferred;
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index d91eb3b..755a6ad 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -1636,3 +1636,7 @@ input:invalid {
   width: 100%;
   min-height: 175px;
 }
+#asset-delete-confirm-dialog .modal-dialog{
+  width: 650px;
+  top: 60px;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-config.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-config.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-config.hbs
index 3ae4bff..7bc48d7 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-config.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-config.hbs
@@ -25,6 +25,11 @@
         <h4 class="modal-title">Publish Asset</h4>
       </div>
       <div class="modal-body">
+          {{#if (and assetErrorMsg (not (eq assetErrorMsg "")))}}
+            <div id="alert"class="alert alert-danger alert-dismissible" role="alert">
+                {{assetErrorMsg}}
+            </div>
+          {{/if}}
           <form class="form-horizontal">
             <div class="panel panel-default">
               <div class="panel-body">
@@ -52,3 +57,6 @@
     </div>
   </div>
 </div>
+{{#if inProgress}}
+  {{spin-spinner lines=13 length=20 width=10 top=200}}
+{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-list.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-list.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-list.hbs
index bc5201d..7fd1236 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-list.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-list.hbs
@@ -26,6 +26,11 @@
       </div>
       <div class="modal-body">
         {{input type="text" class="form-control marginBottom10" name="assetSearchCriteria" value=assetSearchCriteria placeholder="Asset Search"}}
+        {{#if (and assetErrorMsg (not (eq assetErrorMsg "")))}}
+          <div id="alert"class="alert alert-danger alert-dismissible" role="alert">
+              {{assetErrorMsg}}
+          </div>
+        {{/if}}
         <div class="panel panel-default mb0 asset-list-panel-header">
           <table class="table asset-list-header-table listing table-striped table-hover table-bordered" cellspacing="0" width="100%">
             <thead>
@@ -38,8 +43,8 @@
             </thead>
           </table>
         </div>
-        <div class="panel panel-default asset-list-panel-body {{if (eq filteredAssetList.length 0) 'no-asset-records'}}">
-          {{#if (eq filteredAssetList.length 0)}}
+        <div class="panel panel-default asset-list-panel-body {{if (and (eq filteredAssetList.length 0) (eq assetErrorMsg "")) 'no-asset-records'}}">
+          {{#if (and (eq filteredAssetList.length 0) (eq assetErrorMsg ""))}}
             No {{#if (not-eq assetListType "")}} {{assetListType}}{{/if}} assets
           {{/if}}
           <table id="asset-list" class="table asset-list listing table-striped table-hover table-bordered" cellspacing="0" width="100%">
@@ -64,3 +69,6 @@
     </div>
   </div>
 </div>
+{{#if inProgress}}
+  {{spin-spinner lines=13 length=20 width=10 top=200}}
+{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-manager.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-manager.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-manager.hbs
index 7a21af1..6ca85c6 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-manager.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/asset-manager.hbs
@@ -26,6 +26,16 @@
       </div>
       <div class="modal-body">
         {{input type="text" class="form-control marginBottom10" name="assetSearchCriteria" value=assetSearchCriteria placeholder="Asset Search"}}
+        {{#if (and errorMsg (not (eq errorMsg "")))}}
+          <div id="alert"class="alert alert-danger alert-dismissible" role="alert">
+              {{errorMsg}}
+          </div>
+        {{/if}}
+        {{#if (and successMsg (not (eq successMsg "")))}}
+            <div id="alert"class="alert alert-success alert-dismissible" role="alert">
+                {{successMsg}}
+            </div>
+        {{/if}}
         <div class="panel panel-default mb0 asset-list-panel-header">
           <table class="table asset-list-header-table listing table-striped table-hover table-bordered" cellspacing="0" width="100%">
             <thead>
@@ -38,8 +48,8 @@
             </thead>
           </table>
         </div>
-        <div class="panel panel-default asset-list-panel-body {{if (eq filteredAssetList.length 0) 'no-asset-records'}}">
-          {{#if (eq filteredAssetList.length 0)}}
+        <div class="panel panel-default asset-list-panel-body {{if (and (eq filteredAssetList.length 0) (eq errorMsg "")) 'no-asset-records'}}">
+          {{#if (and (eq filteredAssetList.length 0) (eq errorMsg ""))}}
             <span>No assets</span>
           {{/if}}
           <table id="asset-list" class="table asset-list listing table-striped table-hover table-bordered" cellspacing="0" width="100%">
@@ -49,7 +59,7 @@
                   <td class="col-xs-3">{{asset.name}}</td>
                   <td class="col-xs-3">{{asset.type}}</td>
                   <td class="col-xs-3">{{asset.description}}</td>
-                  <td class="col-xs-3"><span {{action 'deleteAsset' asset}}><i class="fa fa-trash-o"></i></span></td>
+                  <td class="col-xs-3"><span {{action 'showDeleteAssetWarning' asset}}><i class="fa fa-trash-o"></i></span></td>
                 </tr>
               {{/each}}
             </tbody>
@@ -63,3 +73,13 @@
     </div>
   </div>
 </div>
+{{#if showingDeleteAssetWarning}}
+  <div id="asset-delete-confirm-dialog">
+    {{#confirmation-dialog title="Confirm asset delete"
+      confirmationMessage="Do you want to delete this asset ?"
+      okBtnText="Continue" cancelBtnText="Cancel" onOk="deleteAsset"}}{{/confirmation-dialog}}
+  </div>
+{{/if}}
+{{#if inProgress}}
+  {{spin-spinner lines=13 length=20 width=10 top=200}}
+{{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
index 7a6005a..46a3f63 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
@@ -126,7 +126,7 @@
     </div>
   </div>
   {{#if showingAssetManager}}
-    {{#asset-manager showAssetManager="showAssetManager" deleteAsset="deleteAsset" assetList=assetList}}{{/asset-manager}}
+    {{#asset-manager showAssetManager="showAssetManager"}}{{/asset-manager}}
   {{/if}}
   {{#if showingWarning}}
   {{#confirmation-dialog title="Confirm workflow reset"

http://git-wip-us.apache.org/repos/asf/ambari/blob/74aff7c6/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 38d8eaf..1758946 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -201,7 +201,7 @@
               </div>
             {{/if}}
           {{/each}}
-          <div class="overlay-node-label">asd</div>
+          <div class="overlay-node-label"></div>
           <div class="overlay-transition-content">
             <div class="decision-condition-label">
               <div class="decision-condition-header">Condition</div>
@@ -306,7 +306,7 @@
   {{#asset-list showAssetList="showAssetList" importAsset="importAsset" assetList=assetList assetListType=assetListType}}{{/asset-list}}
 {{/if}}
 {{#if showingAssetNodeList}}
-  {{#asset-list showAssetList="showAssetNodeList" importAsset="importAssetNode" deleteAsset="deleteAsset" assetList=assetList}}{{/asset-list}}
+  {{#asset-list showAssetList="showAssetNodeList" importAsset="importAssetNode" assetList=assetList}}{{/asset-list}}
 {{/if}}
 {{#if showingPreview}}
   {{#preview-dialog title="Workflow XML Preview" previewXml=previewXml closePreview="closePreview"}}{{/preview-dialog}}


[33/50] [abbrv] ambari git commit: AMBARI-19682. Small fixes for common log rotation (Madhuvanthi Radhakrishnan via smohanty)

Posted by nc...@apache.org.
AMBARI-19682. Small fixes for common log rotation (Madhuvanthi Radhakrishnan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1b630ebc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1b630ebc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1b630ebc

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1b630ebc6406ac2d81b375d4b503e028f6609c0e
Parents: 2a2b4e9
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Tue Jan 24 13:59:57 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Tue Jan 24 14:00:24 2017 -0800

----------------------------------------------------------------------
 .../AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml   | 8 ++++----
 .../LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml        | 4 ++--
 .../LOGSEARCH/0.5.0/configuration/logsearch-log4j.xml        | 4 ++--
 .../YARN/2.1.0.2.0/configuration/yarn-log4j.xml              | 4 ++--
 .../ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml        | 4 ++--
 .../HDP/2.3/services/YARN/configuration/yarn-log4j.xml       | 4 ++--
 6 files changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1b630ebc/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
index f5a2640..742cfe4 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-log4j.xml
@@ -24,7 +24,7 @@
     <name>ams_hbase_log_maxfilesize</name>
     <value>256</value>
     <description>The maximum size of backup file before the log is rotated</description>
-    <display-name>HBase Log: backup file size</display-name>
+    <display-name>AMS HBase Log: backup file size</display-name>
     <value-attributes>
       <unit>MB</unit>
     </value-attributes>
@@ -34,7 +34,7 @@
     <name>ams_hbase_log_maxbackupindex</name>
     <value>20</value>
     <description>The number of backup files</description>
-    <display-name>HBase Log: # of backup files</display-name>
+    <display-name>AMS HBase Log: # of backup files</display-name>
     <value-attributes>
       <type>int</type>
       <minimum>0</minimum>
@@ -45,7 +45,7 @@
     <name>ams_hbase_security_log_maxfilesize</name>
     <value>256</value>
     <description>The maximum size of backup file before the log is rotated</description>
-    <display-name>HBase Security Log: backup file size</display-name>
+    <display-name>AMS HBase Security Log: backup file size</display-name>
     <value-attributes>
       <unit>MB</unit>
     </value-attributes>
@@ -55,7 +55,7 @@
     <name>ams_hbase_security_log_maxbackupindex</name>
     <value>20</value>
     <description>The number of backup files</description>
-    <display-name>HBase Security Log: # of backup files</display-name>
+    <display-name>AMS HBase Security Log: # of backup files</display-name>
     <value-attributes>
       <type>int</type>
       <minimum>0</minimum>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b630ebc/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
index 33749ff..ba33a00 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
@@ -46,7 +46,7 @@
     <name>logfeeder_json_log_maxfilesize</name>
     <value>10</value>
     <description>The maximum size of backup file before the log is rotated</description>
-    <display-name>Log Feeder Json Log: backup file size</display-name>
+    <display-name>Log Feeder JSON Log: backup file size</display-name>
     <value-attributes>
       <unit>MB</unit>
     </value-attributes>
@@ -56,7 +56,7 @@
     <name>logfeeder_json_log_maxbackupindex</name>
     <value>10</value>
     <description>The number of backup files</description>
-    <display-name>Log Feeder Json Log: # of backup files</display-name>
+    <display-name>Log Feeder JSON Log: # of backup files</display-name>
     <value-attributes>
       <type>int</type>
       <minimum>0</minimum>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b630ebc/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-log4j.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-log4j.xml
index a845741..31f7d57 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-log4j.xml
@@ -47,7 +47,7 @@
     <name>logsearch_json_log_maxfilesize</name>
     <value>10</value>
     <description>The maximum size of backup file before the log is rotated</description>
-    <display-name>Log Search Json Log: backup file size</display-name>
+    <display-name>Log Search JSON Log: backup file size</display-name>
     <value-attributes>
       <unit>MB</unit>
     </value-attributes>
@@ -57,7 +57,7 @@
     <name>logsearch_json_log_maxbackupindex</name>
     <value>10</value>
     <description>The number of backup files</description>
-    <display-name>Log Search Json Log: # of backup files</display-name>
+    <display-name>Log Search JSON Log: # of backup files</display-name>
     <value-attributes>
       <type>int</type>
       <minimum>0</minimum>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b630ebc/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
index 41a3c0e..a49ad04 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-log4j.xml
@@ -24,7 +24,7 @@
     <name>yarn_rm_summary_log_max_backup_size</name>
     <value>256</value>
     <description>The maximum size of backup file before the log is rotated</description>
-    <display-name>Yarn Log: backup file size</display-name>
+    <display-name>YARN Log: backup file size</display-name>
     <value-attributes>
         <unit>MB</unit>
     </value-attributes>
@@ -34,7 +34,7 @@
       <name>yarn_rm_summary_log_number_of_backup_files</name>
       <value>20</value>
       <description>The number of backup files</description>
-      <display-name>Yarn Log: # of backup files</display-name>
+      <display-name>YARN Log: # of backup files</display-name>
     <value-attributes>
         <type>int</type>
         <minimum>0</minimum>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b630ebc/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
index 75dba02..76dff64 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
@@ -24,7 +24,7 @@
     <name>zookeeper_log_max_backup_size</name>
     <value>10</value>
     <description>The maximum size of backup file before the log is rotated</description>
-    <display-name>Zookeeper Log: backup file size</display-name>
+    <display-name>ZooKeeper Log: backup file size</display-name>
     <value-attributes>
       <unit>MB</unit>
     </value-attributes>
@@ -34,7 +34,7 @@
     <name>zookeeper_log_number_of_backup_files</name>
     <value>10</value>
     <description>The number of backup files</description>
-    <display-name>Zookeeper Log: # of backup files</display-name>
+    <display-name>ZooKeeper Log: # of backup files</display-name>
     <value-attributes>
       <type>int</type>
       <minimum>0</minimum>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b630ebc/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
index 7816591..3f5e48e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-log4j.xml
@@ -24,7 +24,7 @@
     <name>yarn_rm_summary_log_max_backup_size</name>
     <value>256</value>
     <description>The maximum size of backup file before the log is rotated</description>
-    <display-name>Yarn Log: backup file size</display-name>
+    <display-name>YARN Log: backup file size</display-name>
     <value-attributes>
         <unit>MB</unit>
     </value-attributes>
@@ -34,7 +34,7 @@
       <name>yarn_rm_summary_log_number_of_backup_files</name>
       <value>20</value>
       <description>The number of backup files</description>
-      <display-name>Yarn Log: # of backup files</display-name>
+      <display-name>YARN Log: # of backup files</display-name>
     <value-attributes>
         <type>int</type>
         <minimum>0</minimum>


[37/50] [abbrv] ambari git commit: AMBARI-19699. Axis units and aggregators are wrong in some graphs of ambari grafana dashboards. (Vivek Subramanian via yusaku)

Posted by nc...@apache.org.
AMBARI-19699. Axis units and aggregators are wrong in some graphs of ambari grafana dashboards. (Vivek Subramanian via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bb8b44cf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bb8b44cf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bb8b44cf

Branch: refs/heads/branch-dev-patch-upgrade
Commit: bb8b44cff641c49397d1eb72481398036e90f459
Parents: b3a070c
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Tue Jan 24 17:05:15 2017 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Tue Jan 24 17:05:15 2017 -0800

----------------------------------------------------------------------
 .../default/grafana-ambari-server-database.json |  4 +--
 .../default/grafana-ambari-server.json          | 26 ++++++++++----------
 2 files changed, 15 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bb8b44cf/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server-database.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server-database.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server-database.json
index aea7520..229db83 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server-database.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server-database.json
@@ -309,7 +309,7 @@
           "x-axis": true,
           "y-axis": true,
           "y_formats": [
-            "ns",
+            "none",
             "short"
           ]
         }
@@ -757,7 +757,7 @@
           "grid": {
             "leftLogBase": 1,
             "leftMax": null,
-            "leftMin": null,
+            "leftMin": 0,
             "rightLogBase": 1,
             "rightMax": null,
             "rightMin": null,

http://git-wip-us.apache.org/repos/asf/ambari/blob/bb8b44cf/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server.json
index 2cdd6d9..c458931 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/default/grafana-ambari-server.json
@@ -81,7 +81,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "MemHeapMax",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -93,7 +93,7 @@
               "transform": "none"
             },
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "MemHeapUsed",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -105,7 +105,7 @@
               "transform": "none"
             },
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "MemHeapCommitted",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -176,7 +176,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "MemNonHeapMax",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -280,7 +280,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "avg",
+              "aggregator": "none",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
               "errors": {},
@@ -350,7 +350,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "avg",
+              "aggregator": "none",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
               "errors": {},
@@ -420,7 +420,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
               "errors": {},
@@ -490,7 +490,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "avg",
+              "aggregator": "none",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
               "errors": {},
@@ -569,7 +569,7 @@
           "steppedLine": false,
           "targets": [
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "Total",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -581,7 +581,7 @@
               "transform": "none"
             },
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "Daemon",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -593,7 +593,7 @@
               "transform": "none"
             },
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "Deadlock",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -605,7 +605,7 @@
               "transform": "none"
             },
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "Blocked",
               "app": "ambari_server",
               "downsampleAggregator": "avg",
@@ -617,7 +617,7 @@
               "transform": "none"
             },
             {
-              "aggregator": "max",
+              "aggregator": "none",
               "alias": "Runnable",
               "app": "ambari_server",
               "downsampleAggregator": "avg",


[50/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/551f17b4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/551f17b4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/551f17b4

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 551f17b427465f1a3420a1158fa75f6498fd0fd2
Parents: eb2c904 6a81155
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Jan 25 13:56:33 2017 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Jan 25 13:56:33 2017 -0500

----------------------------------------------------------------------
 .../controllers/ambariViews/ViewsListCtrl.js    |    6 +
 .../controllers/groups/GroupsListCtrl.js        |    3 +
 .../remoteClusters/RemoteClustersListCtrl.js    |    3 +
 .../stackVersions/StackVersionsCreateCtrl.js    |   17 -
 .../stackVersions/StackVersionsEditCtrl.js      |   17 +-
 .../stackVersions/StackVersionsListCtrl.js      |    3 +
 .../scripts/controllers/users/UsersListCtrl.js  |    3 +
 .../resources/ui/admin-web/app/styles/main.css  |   11 +-
 .../app/views/ambariViews/listTable.html        |    5 +-
 .../app/views/ambariViews/listUrls.html         |    5 +-
 .../ui/admin-web/app/views/groups/list.html     |    5 +-
 .../app/views/remoteClusters/list.html          |    5 +-
 .../admin-web/app/views/stackVersions/list.html |    5 +-
 .../ui/admin-web/app/views/users/list.html      |    5 +-
 .../src/main/python/ambari_agent/Controller.py  |   20 +-
 .../ambari_agent/CustomServiceOrchestrator.py   |    8 +-
 .../src/main/python/ambari_agent/Facter.py      |   44 +-
 .../main/python/ambari_agent/PythonExecutor.py  |   19 +-
 .../test/python/ambari_agent/TestActionQueue.py |   43 +
 .../test/python/ambari_agent/TestController.py  |   20 +-
 .../test/python/ambari_agent/TestHardware.py    |   47 +
 .../src/main/python/ambari_commons/os_linux.py  |    6 +-
 .../libraries/functions/constants.py            |    2 +
 .../libraries/functions/security_commons.py     |   29 +-
 .../libraries/functions/setup_atlas_hook.py     |   14 +-
 .../libraries/functions/solr_cloud_util.py      |   22 +-
 .../libraries/functions/stack_select.py         |    5 +-
 .../libraries/script/dummy.py                   |   29 +-
 .../HDP/2.0.8/services/HDFS/kerberos.json       |    2 +-
 .../logfeeder/metrics/LogFeederAMSClient.java   |   40 +-
 .../apache/ambari/logfeeder/util/SSLUtil.java   |   78 +-
 .../src/main/scripts/run.sh                     |   78 +-
 .../ambari-logsearch-portal/pom.xml             |   13 +-
 .../configsets/audit_logs/conf/managed-schema   |  100 +-
 .../logsearch/common/ACLPropertiesSplitter.java |   70 +
 .../ambari/logsearch/common/MessageEnums.java   |    3 +
 .../ambari/logsearch/conf/ApiDocConfig.java     |    1 -
 .../conf/LogSearchHttpHeaderConfig.java         |   70 +
 .../ambari/logsearch/conf/SecurityConfig.java   |   68 +-
 .../logsearch/conf/SolrAuditLogPropsConfig.java |   27 +
 .../ambari/logsearch/conf/SolrConfig.java       |   71 +-
 .../conf/SolrConnectionPropsConfig.java         |   29 +
 .../ambari/logsearch/conf/SolrPropsConfig.java  |   12 +
 .../conf/global/SolrAuditLogsState.java         |   68 +
 .../conf/global/SolrCollectionState.java        |   33 +
 .../conf/global/SolrServiceLogsState.java       |   59 +
 .../conf/global/SolrUserConfigState.java        |   60 +
 .../configurer/LogfeederFilterConfigurer.java   |   66 +
 .../configurer/SolrAuditAliasConfigurer.java    |  136 +
 .../configurer/SolrCollectionConfigurer.java    |  230 +
 .../logsearch/configurer/SolrConfigurer.java    |   23 +
 .../ambari/logsearch/dao/AuditSolrDao.java      |   38 +-
 .../logsearch/dao/ServiceLogsSolrDao.java       |   30 +-
 .../ambari/logsearch/dao/SolrAliasDao.java      |  121 -
 .../ambari/logsearch/dao/SolrCollectionDao.java |  313 -
 .../ambari/logsearch/dao/SolrDaoBase.java       |   18 +-
 .../logsearch/dao/SolrSchemaFieldDao.java       |   38 +-
 .../ambari/logsearch/dao/UserConfigSolrDao.java |   71 +-
 .../ambari/logsearch/doc/DocConstants.java      |    7 +
 .../ambari/logsearch/handler/ACLHandler.java    |   97 +
 .../handler/CreateCollectionHandler.java        |  222 +
 .../handler/ListCollectionHandler.java          |   51 +
 .../handler/ReloadCollectionHandler.java        |   45 +
 .../logsearch/handler/SolrZkRequestHandler.java |   26 +
 .../handler/UploadConfigurationHandler.java     |  100 +
 .../ambari/logsearch/rest/StatusResource.java   |   91 +
 .../ambari/logsearch/util/RESTErrorUtil.java    |   10 +-
 .../apache/ambari/logsearch/util/SSLUtil.java   |   65 +-
 .../AbstractLogsearchGlobalStateFilter.java     |  100 +
 .../filters/LogsearchAuditLogsStateFilter.java  |   51 +
 .../web/filters/LogsearchCorsFilter.java        |   59 +
 .../LogsearchServiceLogsStateFilter.java        |   51 +
 .../filters/LogsearchUserConfigStateFilter.java |   52 +
 .../src/main/resources/swagger/swagger.html     |   26 +-
 .../src/main/webapp/scripts/utils/Tour.js       |  820 +-
 .../src/main/webapp/scripts/utils/Utils.js      |    9 +-
 .../logsearch/solr/AmbariSolrCloudClient.java   |   14 -
 .../solr/commands/CreateSaslUsersZkCommand.java |   60 -
 .../solr/commands/SecureSolrZNodeZkCommand.java |   17 +-
 .../ambari/logsearch/solr/util/AclUtils.java    |   30 -
 ambari-logsearch/docker/bin/start.sh            |    6 -
 .../logsearch/logsearch-https.properties        |    3 +
 .../test-config/logsearch/logsearch.properties  |    3 +
 .../ambari-metrics/datasource.js                |  101 +
 ambari-project/pom.xml                          |    4 +-
 ambari-server/conf/unix/create-python-wrap.sh   |   40 +
 ambari-server/src/main/assemblies/server.xml    |    5 +
 .../StackAdvisorBlueprintProcessor.java         |    8 +
 .../stackadvisor/StackAdvisorRequest.java       |   15 +
 .../commands/StackAdvisorCommand.java           |    4 +
 .../ambari/server/cleanup/CleanupDriver.java    |    3 +-
 .../server/configuration/Configuration.java     |   60 +-
 .../controller/AmbariActionExecutionHelper.java |   13 +-
 .../AmbariCustomCommandExecutionHelper.java     |   66 +-
 .../AmbariManagementControllerImpl.java         |  212 +-
 .../BlueprintConfigurationProcessor.java        |   11 +-
 .../internal/ClusterResourceProvider.java       |   17 +-
 .../internal/ConfigGroupResourceProvider.java   |   14 +-
 .../internal/HostResourceProvider.java          |    9 +-
 .../internal/ProvisionClusterRequest.java       |   36 +
 .../server/controller/internal/Stack.java       |    7 +
 .../internal/StackAdvisorResourceProvider.java  |   31 +-
 .../internal/UpgradeResourceProvider.java       |  142 +-
 .../logging/LogSearchDataRetrievalService.java  |   28 +-
 .../AmbariAuthorizationFilter.java              |    2 +-
 .../AmbariLdapAuthenticationProvider.java       |    6 +-
 .../AmbariLdapBindAuthenticator.java            |    9 +-
 .../security/authorization/AmbariLdapUtils.java |   69 +-
 .../security/ldap/AmbariLdapDataPopulator.java  |  105 +-
 .../upgrades/AbstractUpgradeServerAction.java   |   13 +-
 .../users/CsvFilePersisterService.java          |   24 +-
 .../ambari/server/stack/StackManager.java       |    4 +-
 .../org/apache/ambari/server/state/Cluster.java |   14 +
 .../apache/ambari/server/state/Clusters.java    |    3 +-
 .../ambari/server/state/PropertyInfo.java       |    3 +-
 .../ambari/server/state/UpgradeContext.java     |  190 +-
 .../server/state/UpgradeContextFactory.java     |   25 +-
 .../server/state/cluster/ClusterImpl.java       |  106 +-
 .../server/state/cluster/ClustersImpl.java      |    3 +-
 .../state/configgroup/ConfigGroupImpl.java      |    5 +-
 .../quicklinksprofile/AcceptAllFilter.java      |    5 +
 .../state/quicklinksprofile/Component.java      |    3 +
 .../server/state/quicklinksprofile/Filter.java  |    7 +-
 .../quicklinksprofile/QuickLinksProfile.java    |    9 +
 .../QuickLinksProfileBuilder.java               |  142 +
 .../QuickLinksProfileEvaluationException.java   |    4 +
 .../QuickLinksProfileParser.java                |   25 +-
 .../server/state/quicklinksprofile/Service.java |    3 +
 .../ambari/server/topology/TopologyManager.java |   38 +
 .../server/upgrade/AbstractUpgradeCatalog.java  |    2 +-
 .../server/upgrade/UpgradeCatalog250.java       |    8 +-
 .../src/main/package/deb/control/preinst        |   46 +
 .../src/main/package/rpm/preinstall.sh          |   46 +
 .../src/main/python/ambari_server_main.py       |   21 +-
 ambari-server/src/main/python/bootstrap.py      |   31 +
 ambari-server/src/main/python/os_check_type.py  |    2 +-
 .../1.6.1.2.2.0/configuration/accumulo-env.xml  |    1 +
 .../ACCUMULO/1.6.1.2.2.0/kerberos.json          |    2 +-
 .../0.1.0/configuration/infra-solr-env.xml      |    2 +
 .../0.1.0/package/scripts/infra_solr.py         |   13 +
 .../0.1.0/package/scripts/params.py             |   12 +
 .../0.1.0/package/scripts/setup_infra_solr.py   |    9 +
 .../0.1.0/configuration/ams-hbase-log4j.xml     |   10 +-
 .../configuration/ams-hbase-security-site.xml   |    4 +
 .../HDP/grafana-druid-home.json                 |  995 ++
 .../HDP/grafana-druid-ingestion.json            |  776 ++
 .../HDP/grafana-druid-query.json                |  858 ++
 .../default/grafana-ambari-server-database.json |  902 ++
 .../default/grafana-ambari-server-topn.json     |  437 +
 .../default/grafana-ambari-server.json          |   99 +-
 .../configuration/application-properties.xml    |    3 +-
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |    2 +-
 .../0.1.0.2.3/package/scripts/status_params.py  |    3 +-
 .../configuration/application-properties.xml    |    4 +-
 .../0.7.0.2.5/configuration/atlas-log4j.xml     |    2 +-
 .../ATLAS/0.7.0.2.5/quicklinks/quicklinks.json  |    1 +
 .../DRUID/0.9.2/configuration/druid-common.xml  |   58 +
 .../DRUID/0.9.2/package/scripts/params.py       |   36 +
 .../0.5.0.2.1/configuration/falcon-log4j.xml    |    2 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |    6 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |    4 +-
 .../FALCON/0.5.0.2.1/quicklinks/quicklinks.json |    1 +
 .../FLUME/1.4.0.2.0/kerberos.json               |   44 +
 .../1.4.0.2.0/package/scripts/flume_check.py    |    6 +-
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |   12 +-
 .../0.96.0.2.0/configuration/hbase-env.xml      |    1 +
 .../0.96.0.2.0/configuration/hbase-log4j.xml    |    2 +-
 .../HBASE/0.96.0.2.0/kerberos.json              |    2 +-
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |    1 +
 .../HDFS/2.1.0.2.0/configuration/hdfs-log4j.xml |    4 +-
 .../HDFS/2.1.0.2.0/kerberos.json                |    2 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |    5 +
 .../HDFS/2.1.0.2.0/package/scripts/utils.py     |   24 +-
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |   21 +-
 .../package/templates/hdfs_jaas.conf.j2         |   27 +
 .../HDFS/3.0.0.3.0/configuration/hadoop-env.xml |    5 +
 .../HDFS/3.0.0.3.0/configuration/hdfs-log4j.xml |    4 +-
 .../HDFS/3.0.0.3.0/kerberos.json                |    5 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |  167 +-
 .../package/scripts/setup_ranger_hdfs.py        |   43 +-
 .../HDFS/3.0.0.3.0/package/scripts/utils.py     |   26 +-
 .../3.0.0.3.0/package/scripts/zkfc_slave.py     |   18 +-
 .../package/templates/hdfs_jaas.conf.j2         |   27 +
 .../configuration/hive-exec-log4j.xml           |    2 +-
 .../0.12.0.2.0/configuration/hive-log4j.xml     |    2 +-
 .../0.12.0.2.0/configuration/webhcat-log4j.xml  |    2 +-
 .../HIVE/0.12.0.2.0/package/scripts/hcat.py     |    2 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |   57 +-
 .../package/scripts/hive_metastore.py           |    5 +-
 .../package/scripts/hive_server_interactive.py  |   66 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |    4 +-
 .../package/scripts/webhcat_server.py           |    2 -
 .../KAFKA/0.8.1/configuration/kafka-env.xml     |    1 +
 .../KAFKA/0.8.1/configuration/kafka-log4j.xml   |    2 +-
 .../KAFKA/0.8.1/package/scripts/params.py       |    4 +-
 .../1.10.3-10/configuration/kerberos-env.xml    |    1 +
 .../0.5.0.2.2/configuration/gateway-log4j.xml   |    2 +-
 .../KNOX/0.5.0.2.2/configuration/knox-env.xml   |    1 +
 .../KNOX/0.5.0.2.2/configuration/ldap-log4j.xml |    2 +-
 .../0.5.0/configuration/logfeeder-env.xml       |    2 +
 .../0.5.0/configuration/logfeeder-log4j.xml     |    4 +-
 .../0.5.0/configuration/logsearch-env.xml       |    2 +
 .../0.5.0/configuration/logsearch-log4j.xml     |    4 +-
 .../configuration/logsearch-properties.xml      |    1 +
 .../LOGSEARCH/0.5.0/metainfo.xml                |   52 +-
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |   49 +-
 .../0.5.0/package/scripts/service_check.py      |    2 +-
 .../0.5.0/package/scripts/setup_logsearch.py    |   38 -
 .../package/templates/input.config-hst.json.j2  |  102 -
 .../0.5.0/properties/logfeeder-env.sh.j2        |    2 +-
 .../0.5.0/properties/logsearch-env.sh.j2        |    2 +-
 .../LOGSEARCH/0.5.0/quicklinks/quicklinks.json  |    1 +
 .../4.0.0.2.0/configuration/oozie-log4j.xml     |    2 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |    4 +
 .../4.0.0.2.0/package/scripts/params_linux.py   |    3 +
 .../4.2.0.2.3/configuration/oozie-log4j.xml     |    2 +-
 .../OOZIE/4.2.0.2.3/kerberos.json               |    3 +-
 .../common-services/RANGER/0.4.0/metainfo.xml   |    4 +
 .../RANGER/0.4.0/quicklinks/quicklinks.json     |    1 +
 .../RANGER/0.5.0/quicklinks/quicklinks.json     |    1 +
 .../RANGER/0.6.0/configuration/admin-log4j.xml  |    2 +-
 .../0.6.0/configuration/ranger-admin-site.xml   |    3 +
 .../0.6.0/configuration/ranger-tagsync-site.xml |    1 +
 .../0.6.0/configuration/ranger-ugsync-site.xml  |    1 +
 .../0.6.0/configuration/tagsync-log4j.xml       |    2 +-
 .../0.6.0/configuration/usersync-log4j.xml      |    2 +-
 .../0.7.0/configuration/ranger-ugsync-site.xml  |   42 +
 .../common-services/RANGER/0.7.0/metainfo.xml   |    6 +
 .../RANGER/0.7.0/themes/theme_version_5.json    |   22 +
 .../0.5.0.2.3/configuration/kms-log4j.xml       |    2 +-
 .../0.5.0.2.3/configuration/kms-site.xml        |    2 +
 .../1.2.1/configuration/spark-defaults.xml      |    1 +
 .../SPARK/1.2.1/configuration/spark-env.xml     |    1 +
 .../common-services/SPARK/1.2.1/kerberos.json   |    2 +-
 .../common-services/SPARK/1.4.1/kerberos.json   |    2 +-
 .../2.0.0/configuration/spark2-defaults.xml     |    1 +
 .../SPARK2/2.0.0/configuration/spark2-env.xml   |    1 +
 .../configuration/spark2-logsearch-conf.xml     |   10 +-
 .../common-services/SPARK2/2.0.0/kerberos.json  |    2 +-
 .../configuration/storm-cluster-log4j.xml       |    2 +-
 .../0.10.0/configuration/storm-worker-log4j.xml |    2 +-
 .../STORM/0.9.1/configuration/storm-env.xml     |    4 +
 .../common-services/STORM/0.9.1/kerberos.json   |    2 +-
 .../STORM/0.9.1/package/scripts/params_linux.py |    2 +-
 .../1.0.1/configuration/storm-cluster-log4j.xml |    2 +-
 .../1.0.1/configuration/storm-worker-log4j.xml  |    2 +-
 .../common-services/STORM/1.0.1/kerberos.json   |    2 +-
 .../YARN/2.1.0.2.0/configuration/yarn-log4j.xml |    6 +-
 .../YARN/2.1.0.2.0/kerberos.json                |    1 -
 .../2.1.0.2.0/package/scripts/params_linux.py   |    1 +
 .../package/scripts/resourcemanager.py          |    5 +-
 .../YARN/3.0.0.3.0/configuration/yarn-log4j.xml |    2 +-
 .../YARN/3.0.0.3.0/kerberos.json                |    3 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |  213 +-
 .../package/scripts/resourcemanager.py          |    2 +-
 .../package/scripts/setup_ranger_yarn.py        |    4 +-
 .../0.6.0.2.5/configuration/zeppelin-env.xml    |    1 +
 .../ZEPPELIN/0.6.0.2.5/kerberos.json            |    2 +-
 .../0.6.0.2.5/package/scripts/master.py         |   22 +-
 .../ZOOKEEPER/3.4.5/configuration/zoo.cfg.xml   |    2 +-
 .../3.4.5/configuration/zookeeper-env.xml       |    1 +
 .../3.4.5/configuration/zookeeper-log4j.xml     |   10 +-
 .../ZOOKEEPER/3.4.6/metainfo.xml                |    2 +-
 .../src/main/resources/configuration-schema.xsd |    1 +
 .../custom_actions/scripts/ru_set_all.py        |    2 +
 .../src/main/resources/properties.json          |    3 +
 .../main/resources/scripts/Ambaripreupload.py   |    1 +
 .../src/main/resources/scripts/stack_advisor.py |   14 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |   19 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |    2 +-
 .../resources/stacks/HDP/2.0.6/kerberos.json    |    7 +-
 .../HDP/2.0.6/properties/stack_features.json    |   10 +
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   87 +-
 .../stacks/HDP/2.1/services/OOZIE/metainfo.xml  |    2 +-
 .../services/HDFS/configuration/hdfs-log4j.xml  |    4 +-
 .../stacks/HDP/2.2/services/HDFS/metainfo.xml   |    5 +-
 .../stacks/HDP/2.2/services/HIVE/metainfo.xml   |    4 +-
 .../stacks/HDP/2.2/services/PIG/metainfo.xml    |    2 +-
 .../stacks/HDP/2.2/services/YARN/kerberos.json  |    3 +-
 .../stacks/HDP/2.2/services/YARN/metainfo.xml   |    4 +-
 .../HDP/2.3.ECS/services/ECS/kerberos.json      |    2 +-
 .../HDP/2.3.ECS/services/HBASE/kerberos.json    |    2 +-
 .../HDP/2.3.ECS/services/YARN/kerberos.json     |    3 +-
 .../services/ACCUMULO/kerberos.json             |    2 +-
 .../HDP/2.3/services/ACCUMULO/kerberos.json     |    4 +-
 .../services/ATLAS/quicklinks/quicklinks.json   |    1 +
 .../stacks/HDP/2.3/services/HDFS/metainfo.xml   |    3 +
 .../services/YARN/configuration/yarn-log4j.xml  |    6 +-
 .../stacks/HDP/2.3/services/YARN/kerberos.json  |    3 +-
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |   46 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   29 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml |   29 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |   30 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |    6 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |    5 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |   45 +-
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |   44 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml |   28 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |   27 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |    9 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |    8 +
 .../stacks/HDP/2.5/services/HBASE/kerberos.json |    2 +-
 .../stacks/HDP/2.5/services/HDFS/kerberos.json  |    2 +-
 .../HIVE/configuration/hive-exec-log4j2.xml     |    2 +-
 .../services/HIVE/configuration/hive-log4j2.xml |    2 +-
 .../HIVE/configuration/llap-cli-log4j2.xml      |    2 +-
 .../HIVE/configuration/llap-daemon-log4j.xml    |    2 +-
 .../stacks/HDP/2.5/services/SPARK/kerberos.json |    4 +-
 .../stacks/HDP/2.5/services/YARN/kerberos.json  |    3 +-
 .../stacks/HDP/2.5/services/stack_advisor.py    |   37 +-
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |   47 +
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |   30 +-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |    8 +
 .../ATLAS/configuration/atlas-log4j.xml         |    2 +-
 .../services/HBASE/configuration/hbase-env.xml  |   37 +
 .../services/HDFS/configuration/hadoop-env.xml  |  181 +
 .../stacks/HDP/2.6/services/HDFS/kerberos.json  |  247 +
 .../HIVE/configuration/hive-interactive-env.xml |    2 +-
 .../stacks/HDP/2.6/services/OOZIE/kerberos.json |   70 +
 .../services/SPARK/configuration/livy-conf.xml  |    8 +
 .../services/SPARK/configuration/livy-env.xml   |   96 +
 .../stacks/HDP/2.6/services/SPARK/kerberos.json |    4 +-
 .../SPARK2/configuration/livy2-conf.xml         |    8 +
 .../services/SPARK2/configuration/livy2-env.xml |    2 +
 .../HDP/2.6/services/SPARK2/kerberos.json       |    4 +-
 .../stacks/HDP/2.6/services/SPARK2/metainfo.xml |    2 +-
 .../stacks/HDP/2.6/services/YARN/kerberos.json  |  278 +
 .../stacks/HDP/2.6/services/stack_advisor.py    |    3 +
 .../HDP/3.0/hooks/before-ANY/scripts/params.py  |   16 +-
 .../main/resources/stacks/HDP/3.0/kerberos.json |    7 +-
 .../HDP/3.0/properties/stack_features.json      |    5 +
 .../services/HDFS/configuration/hadoop-env.xml  |    4 +
 .../hooks/before-INSTALL/scripts/conf-select.py |   35 +
 .../before-INSTALL/scripts/distro-select.py     |  145 +
 .../1.0/hooks/before-INSTALL/scripts/hook.py    |   38 +
 .../1.0/hooks/before-INSTALL/scripts/params.py  |   23 +
 .../resources/stacks/PERF/1.0/kerberos.json     |    7 +-
 .../main/resources/stacks/PERF/1.0/metainfo.xml |    6 +-
 .../PERF/1.0/properties/stack_features.json     |    5 +
 .../stacks/PERF/1.0/repos/repoinfo.xml          |    8 +-
 .../PERF/1.0/services/FAKEHBASE/alerts.json     |   35 +
 .../configuration/hbase-alert-config.xml        |   80 +
 .../FAKEHBASE/configuration/hbase-env.xml       |  292 +
 .../FAKEHBASE/configuration/hbase-log4j.xml     |  146 +
 .../configuration/hbase-logsearch-conf.xml      |  111 +
 .../FAKEHBASE/configuration/hbase-policy.xml    |   53 +
 .../FAKEHBASE/configuration/hbase-site.xml      |  555 ++
 .../configuration/ranger-hbase-audit.xml        |  122 +
 .../ranger-hbase-policymgr-ssl.xml              |   66 +
 .../configuration/ranger-hbase-security.xml     |   68 +
 .../PERF/1.0/services/FAKEHBASE/kerberos.json   |  159 +
 .../PERF/1.0/services/FAKEHBASE/metainfo.xml    |  197 +
 .../PERF/1.0/services/FAKEHBASE/metrics.json    | 9374 ++++++++++++++++++
 .../package/alerts/hbase_master_process.py      |   59 +
 .../alerts/hbase_regionserver_process.py        |   59 +
 .../FAKEHBASE/package/scripts/hbase_client.py   |   38 +
 .../FAKEHBASE/package/scripts/hbase_master.py   |   45 +
 .../package/scripts/hbase_regionserver.py       |   45 +
 .../package/scripts/phoenix_queryserver.py      |   42 +
 .../FAKEHBASE/package/scripts/service_check.py  |   30 +
 .../FAKEHBASE/quicklinks/quicklinks.json        |   97 +
 .../1.0/services/FAKEHBASE/themes/theme.json    |  411 +
 .../PERF/1.0/services/FAKEHBASE/widgets.json    |  510 +
 .../PERF/1.0/services/FAKEHDFS/alerts.json      |  120 +
 .../FAKEHDFS/configuration/core-site.xml        |  225 +
 .../FAKEHDFS/configuration/hadoop-env.xml       |  419 +
 .../hadoop-metrics2.properties.xml              |  125 +
 .../FAKEHDFS/configuration/hadoop-policy.xml    |  130 +
 .../configuration/hdfs-alert-config.xml         |   80 +
 .../FAKEHDFS/configuration/hdfs-log4j.xml       |  225 +
 .../configuration/hdfs-logsearch-conf.xml       |  248 +
 .../FAKEHDFS/configuration/hdfs-site.xml        |  633 ++
 .../configuration/ranger-hdfs-audit.xml         |  124 +
 .../ranger-hdfs-plugin-properties.xml           |   88 +
 .../configuration/ranger-hdfs-policymgr-ssl.xml |   67 +
 .../configuration/ranger-hdfs-security.xml      |   65 +
 .../FAKEHDFS/configuration/ssl-client.xml       |   70 +
 .../FAKEHDFS/configuration/ssl-server.xml       |   80 +
 .../PERF/1.0/services/FAKEHDFS/kerberos.json    |  246 +
 .../PERF/1.0/services/FAKEHDFS/metainfo.xml     |  266 +
 .../PERF/1.0/services/FAKEHDFS/metrics.json     | 7905 +++++++++++++++
 .../package/alerts/alert_checkpoint_time.py     |   59 +
 .../alerts/alert_datanode_unmounted_data_dir.py |   59 +
 .../package/alerts/alert_nfs_gateway_process.py |   59 +
 .../package/alerts/alert_snamenode_process.py   |   59 +
 .../package/alerts/alert_upgrade_finalized.py   |   59 +
 .../FAKEHDFS/package/scripts/datanode.py        |   57 +
 .../FAKEHDFS/package/scripts/hdfs_client.py     |   38 +
 .../FAKEHDFS/package/scripts/journalnode.py     |   58 +
 .../FAKEHDFS/package/scripts/namenode.py        |   79 +
 .../FAKEHDFS/package/scripts/nfsgateway.py      |   42 +
 .../services/FAKEHDFS/package/scripts/params.py |   33 +
 .../FAKEHDFS/package/scripts/service_check.py   |   30 +
 .../FAKEHDFS/package/scripts/snamenode.py       |   42 +
 .../FAKEHDFS/package/scripts/zkfc_slave.py      |   38 +
 .../FAKEHDFS/quicklinks/quicklinks.json         |   76 +
 .../1.0/services/FAKEHDFS/themes/theme.json     |  179 +
 .../PERF/1.0/services/FAKEHDFS/widgets.json     |  649 ++
 .../1.0/services/FAKEYARN/YARN_metrics.json     | 3486 +++++++
 .../1.0/services/FAKEYARN/YARN_widgets.json     |  611 ++
 .../PERF/1.0/services/FAKEYARN/alerts.json      |   77 +
 .../configuration-mapred/mapred-env.xml         |   50 +
 .../configuration-mapred/mapred-site.xml        |  134 +
 .../configuration/capacity-scheduler.xml        |   69 +
 .../configuration/ranger-yarn-audit.xml         |  121 +
 .../ranger-yarn-plugin-properties.xml           |   82 +
 .../configuration/ranger-yarn-policymgr-ssl.xml |   66 +
 .../configuration/ranger-yarn-security.xml      |   58 +
 .../configuration/yarn-alert-config.xml         |   80 +
 .../FAKEYARN/configuration/yarn-env.xml         |  201 +
 .../FAKEYARN/configuration/yarn-log4j.xml       |  103 +
 .../FAKEYARN/configuration/yarn-site.xml        |  780 ++
 .../PERF/1.0/services/FAKEYARN/kerberos.json    |  278 +
 .../PERF/1.0/services/FAKEYARN/metainfo.xml     |  357 +
 .../package/alerts/alert_history_process.py     |   59 +
 .../package/alerts/alert_nodemanager_health.py  |   59 +
 .../alerts/alert_resourcemanager_process.py     |   59 +
 .../package/alerts/alert_timeline_process.py    |   59 +
 .../scripts/application_timeline_server.py      |   42 +
 .../FAKEYARN/package/scripts/historyserver.py   |   42 +
 .../package/scripts/mapred_service_check.py     |   30 +
 .../package/scripts/mapreduce2_client.py        |   38 +
 .../FAKEYARN/package/scripts/nodemanager.py     |   42 +
 .../FAKEYARN/package/scripts/resourcemanager.py |   48 +
 .../FAKEYARN/package/scripts/service_check.py   |   30 +
 .../FAKEYARN/package/scripts/yarn_client.py     |   38 +
 .../FAKEYARN/quicklinks-mapred/quicklinks.json  |   76 +
 .../FAKEYARN/quicklinks/quicklinks.json         |   76 +
 .../services/FAKEYARN/themes-mapred/theme.json  |  132 +
 .../1.0/services/FAKEYARN/themes/theme.json     |  250 +
 .../PERF/1.0/services/FAKEZOOKEEPER/alerts.json |   20 +
 .../configuration/zk-alert-config.xml           |   80 +
 .../1.0/services/FAKEZOOKEEPER/kerberos.json    |   39 +
 .../1.0/services/FAKEZOOKEEPER/metainfo.xml     |   69 +
 .../package/alerts/alert_zk_server_process.py   |   59 +
 .../package/scripts/service_check.py            |   30 +
 .../package/scripts/zookeeper_client.py         |   38 +
 .../package/scripts/zookeeper_server.py         |   42 +
 .../PERF/1.0/services/GRUMPY/kerberos.json      |    4 +-
 .../PERF/1.0/services/GRUMPY/metainfo.xml       |    1 +
 .../PERF/1.0/services/HAPPY/kerberos.json       |    4 +-
 .../stacks/PERF/1.0/services/HAPPY/metainfo.xml |    1 +
 .../stacks/PERF/1.0/services/HBASE/alerts.json  |   35 -
 .../HBASE/configuration/hbase-alert-config.xml  |   80 -
 .../services/HBASE/configuration/hbase-env.xml  |  292 -
 .../HBASE/configuration/hbase-log4j.xml         |  146 -
 .../configuration/hbase-logsearch-conf.xml      |  111 -
 .../HBASE/configuration/hbase-policy.xml        |   53 -
 .../services/HBASE/configuration/hbase-site.xml |  573 --
 .../HBASE/configuration/ranger-hbase-audit.xml  |  122 -
 .../ranger-hbase-policymgr-ssl.xml              |   66 -
 .../configuration/ranger-hbase-security.xml     |   68 -
 .../PERF/1.0/services/HBASE/kerberos.json       |  159 -
 .../stacks/PERF/1.0/services/HBASE/metainfo.xml |  197 -
 .../stacks/PERF/1.0/services/HBASE/metrics.json | 9374 ------------------
 .../package/alerts/hbase_master_process.py      |   59 -
 .../alerts/hbase_regionserver_process.py        |   59 -
 .../HBASE/package/scripts/hbase_client.py       |   38 -
 .../HBASE/package/scripts/hbase_master.py       |   45 -
 .../HBASE/package/scripts/hbase_regionserver.py |   45 -
 .../package/scripts/phoenix_queryserver.py      |   42 -
 .../HBASE/package/scripts/service_check.py      |   30 -
 .../services/HBASE/quicklinks/quicklinks.json   |   97 -
 .../PERF/1.0/services/HBASE/themes/theme.json   |  411 -
 .../stacks/PERF/1.0/services/HBASE/widgets.json |  510 -
 .../stacks/PERF/1.0/services/HDFS/alerts.json   |  120 -
 .../services/HDFS/configuration/core-site.xml   |  225 -
 .../services/HDFS/configuration/hadoop-env.xml  |  419 -
 .../hadoop-metrics2.properties.xml              |  125 -
 .../HDFS/configuration/hadoop-policy.xml        |  130 -
 .../HDFS/configuration/hdfs-alert-config.xml    |   80 -
 .../services/HDFS/configuration/hdfs-log4j.xml  |  225 -
 .../HDFS/configuration/hdfs-logsearch-conf.xml  |  248 -
 .../services/HDFS/configuration/hdfs-site.xml   |  633 --
 .../HDFS/configuration/ranger-hdfs-audit.xml    |  124 -
 .../ranger-hdfs-plugin-properties.xml           |   88 -
 .../configuration/ranger-hdfs-policymgr-ssl.xml |   67 -
 .../HDFS/configuration/ranger-hdfs-security.xml |   65 -
 .../services/HDFS/configuration/ssl-client.xml  |   70 -
 .../services/HDFS/configuration/ssl-server.xml  |   80 -
 .../stacks/PERF/1.0/services/HDFS/kerberos.json |  246 -
 .../stacks/PERF/1.0/services/HDFS/metainfo.xml  |  266 -
 .../stacks/PERF/1.0/services/HDFS/metrics.json  | 7905 ---------------
 .../package/alerts/alert_checkpoint_time.py     |   59 -
 .../alerts/alert_datanode_unmounted_data_dir.py |   59 -
 .../package/alerts/alert_nfs_gateway_process.py |   59 -
 .../package/alerts/alert_snamenode_process.py   |   59 -
 .../package/alerts/alert_upgrade_finalized.py   |   59 -
 .../services/HDFS/package/scripts/datanode.py   |   42 -
 .../HDFS/package/scripts/hdfs_client.py         |   38 -
 .../HDFS/package/scripts/journalnode.py         |   42 -
 .../services/HDFS/package/scripts/namenode.py   |   58 -
 .../services/HDFS/package/scripts/nfsgateway.py |   42 -
 .../HDFS/package/scripts/service_check.py       |   30 -
 .../services/HDFS/package/scripts/snamenode.py  |   42 -
 .../services/HDFS/package/scripts/zkfc_slave.py |   38 -
 .../services/HDFS/quicklinks/quicklinks.json    |   76 -
 .../PERF/1.0/services/HDFS/themes/theme.json    |  179 -
 .../stacks/PERF/1.0/services/HDFS/widgets.json  |  649 --
 .../PERF/1.0/services/SLEEPY/kerberos.json      |    4 +-
 .../PERF/1.0/services/SLEEPY/metainfo.xml       |    1 +
 .../stacks/PERF/1.0/services/SNOW/kerberos.json |    4 +-
 .../stacks/PERF/1.0/services/SNOW/metainfo.xml  |    1 +
 .../PERF/1.0/services/YARN/YARN_metrics.json    | 3486 -------
 .../PERF/1.0/services/YARN/YARN_widgets.json    |  611 --
 .../stacks/PERF/1.0/services/YARN/alerts.json   |   77 -
 .../YARN/configuration-mapred/mapred-env.xml    |   50 -
 .../YARN/configuration-mapred/mapred-site.xml   |  134 -
 .../YARN/configuration/capacity-scheduler.xml   |   69 -
 .../YARN/configuration/ranger-yarn-audit.xml    |  121 -
 .../ranger-yarn-plugin-properties.xml           |   82 -
 .../configuration/ranger-yarn-policymgr-ssl.xml |   66 -
 .../YARN/configuration/ranger-yarn-security.xml |   58 -
 .../YARN/configuration/yarn-alert-config.xml    |   80 -
 .../services/YARN/configuration/yarn-env.xml    |  201 -
 .../services/YARN/configuration/yarn-log4j.xml  |  103 -
 .../services/YARN/configuration/yarn-site.xml   |  796 --
 .../stacks/PERF/1.0/services/YARN/kerberos.json |  278 -
 .../stacks/PERF/1.0/services/YARN/metainfo.xml  |  355 -
 .../package/alerts/alert_history_process.py     |   59 -
 .../package/alerts/alert_nodemanager_health.py  |   59 -
 .../alerts/alert_resourcemanager_process.py     |   59 -
 .../package/alerts/alert_timeline_process.py    |   59 -
 .../scripts/application_timeline_server.py      |   42 -
 .../YARN/package/scripts/historyserver.py       |   42 -
 .../package/scripts/mapred_service_check.py     |   30 -
 .../YARN/package/scripts/mapreduce2_client.py   |   38 -
 .../YARN/package/scripts/nodemanager.py         |   42 -
 .../YARN/package/scripts/resourcemanager.py     |   48 -
 .../YARN/package/scripts/service_check.py       |   30 -
 .../YARN/package/scripts/yarn_client.py         |   38 -
 .../YARN/quicklinks-mapred/quicklinks.json      |   76 -
 .../services/YARN/quicklinks/quicklinks.json    |   76 -
 .../1.0/services/YARN/themes-mapred/theme.json  |  132 -
 .../PERF/1.0/services/YARN/themes/theme.json    |  250 -
 .../PERF/1.0/services/ZOOKEEPER/alerts.json     |   20 -
 .../ZOOKEEPER/configuration/zk-alert-config.xml |   80 -
 .../PERF/1.0/services/ZOOKEEPER/kerberos.json   |   39 -
 .../PERF/1.0/services/ZOOKEEPER/metainfo.xml    |   54 -
 .../package/alerts/alert_zk_server_process.py   |   59 -
 .../ZOOKEEPER/package/scripts/service_check.py  |   30 -
 .../package/scripts/zookeeper_client.py         |   38 -
 .../package/scripts/zookeeper_server.py         |   42 -
 .../stacks/PERF/1.0/upgrades/config-upgrade.xml |   34 +
 .../1.0/upgrades/nonrolling-upgrade-2.0.xml     |  443 +
 .../stacks/PERF/1.0/upgrades/upgrade-2.0.xml    |  331 +
 .../main/resources/stacks/PERF/2.0/metainfo.xml |   23 +
 .../stacks/PERF/2.0/repos/repoinfo.xml          |   33 +
 .../resources/stacks/PERF/PythonExecutor.sed    |   19 +
 .../resources/stacks/PERF/install_packages.sed  |   25 +
 .../src/main/resources/stacks/stack_advisor.py  |   29 +
 .../ambari/server/agent/AgentResourceTest.java  |    4 +
 .../server/configuration/ConfigurationTest.java |   12 -
 .../AmbariCustomCommandExecutionHelperTest.java |   46 +
 .../server/controller/KerberosHelperTest.java   |  112 +-
 .../ActiveWidgetLayoutResourceProviderTest.java |   17 +-
 .../internal/ProvisionClusterRequestTest.java   |   66 +
 .../QuickLinkArtifactResourceProviderTest.java  |    1 -
 .../StackAdvisorResourceProviderTest.java       |   32 +
 .../UserAuthorizationResourceProviderTest.java  |    9 +-
 .../internal/UserResourceProviderTest.java      |   19 +-
 .../LogSearchDataRetrievalServiceTest.java      |   27 +-
 .../server/security/AmbariLdapUtilsTest.java    |  118 +-
 .../AmbariAuthorizationFilterTest.java          |   10 +
 .../AmbariLdapBindAuthenticatorTest.java        |   23 +-
 .../ldap/AmbariLdapDataPopulatorTest.java       |    9 +-
 .../CsvFilePersisterServiceFunctionalTest.java  |   91 +
 .../ambari/server/state/ConfigHelperTest.java   |   98 +-
 .../ambari/server/state/UpgradeHelperTest.java  |  167 +-
 .../cluster/ClusterEffectiveVersionTest.java    |    2 +
 .../KerberosDescriptorUpdateHelperTest.java     |   22 +-
 .../QuickLinksProfileBuilderTest.java           |  243 +
 .../QuickLinksProfileParserTest.java            |   11 +-
 .../stack/upgrade/StageWrapperBuilderTest.java  |   46 +-
 .../server/topology/TopologyManagerTest.java    |   88 +-
 .../server/upgrade/UpgradeCatalog250Test.java   |    6 +-
 ambari-server/src/test/python/TestBootstrap.py  |    4 +-
 .../stacks/2.0.6/FLUME/test_service_check.py    |    1 +
 .../stacks/2.0.6/HIVE/test_hcat_client.py       |    4 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |    9 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |  121 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   14 +-
 .../stacks/2.3/ATLAS/test_metadata_server.py    |   46 +-
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |    3 +
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |   33 -
 .../stacks/2.5/ATLAS/test_atlas_server.py       |   19 +-
 .../stacks/2.5/RANGER/test_ranger_admin.py      |    4 +-
 .../stacks/2.5/ZEPPELIN/test_zeppelin_master.py |  129 +-
 .../stacks/2.5/common/test_stack_advisor.py     |    4 +-
 .../python/stacks/2.5/configs/hsi_default.json  |    3 +-
 .../test/python/stacks/2.5/configs/hsi_ha.json  |    3 +-
 .../2.5/configs/ranger-admin-default.json       |    2 +-
 .../2.5/configs/ranger-admin-secured.json       |    2 +-
 .../stacks/2.6/RANGER/test_ranger_admin.py      |    4 +-
 .../2.6/configs/ranger-admin-default.json       |    2 +-
 .../2.6/configs/ranger-admin-secured.json       |    2 +-
 .../inconsistent_quicklinks_profile_3.json      |    9 +
 .../test_kerberos_descriptor_2_1_3.json         |   14 +-
 .../test_kerberos_descriptor_no_hdfs.json       |    4 +-
 .../test_kerberos_descriptor_simple.json        |    4 +-
 .../HDP/2.0.8/services/HDFS/kerberos.json       |    2 +-
 ambari-web/app/controllers/installer.js         |   12 +-
 .../rangerAdmin/step4_controller.js             |   82 +-
 .../main/admin/stack_and_upgrade_controller.js  |   21 +-
 ambari-web/app/controllers/main/host/details.js |  202 +-
 ambari-web/app/controllers/main/service/item.js |   60 +-
 .../main/service/reassign/step4_controller.js   |   29 +-
 .../app/controllers/wizard/step7_controller.js  |   22 +-
 ambari-web/app/messages.js                      |    2 +-
 .../host/details/addDeleteComponentPopup.hbs    |   34 +-
 .../templates/main/service/add_host_popup.hbs   |   33 -
 .../info/delete_service_warning_popup.hbs       |    4 +-
 ambari-web/app/utils/date/date.js               |   29 +-
 .../configs/config_category_container_view.js   |    2 +
 .../configs/service_configs_by_category_view.js |    4 +-
 .../views/common/modal_popups/alert_popup.js    |   12 +-
 .../app/views/common/quick_view_link_view.js    |    5 +-
 .../admin/stack_upgrade/upgrade_history_view.js |    4 +-
 .../manage_alert_groups_controller_test.js      |    6 +-
 .../test/controllers/main/host/details_test.js  |  144 +-
 .../service/reassign/step4_controller_test.js   |   78 +-
 .../test/controllers/wizard/step7_test.js       |   18 +
 ambari-web/test/utils/config_test.js            |    2 +-
 ambari-web/test/utils/date/date_test.js         |   17 +
 ambari-web/test/utils/object_utils_test.js      |    2 +-
 .../stack_upgrade/upgrade_history_view_test.js  |   69 +
 .../main/resources/stacks/HDF/2.0/kerberos.json |    7 +-
 .../HDF/2.0/properties/stack_features.json      |    5 +
 contrib/utils/perf/deploy-gce-perf-cluster.py   |    4 +
 .../src/main/resources/ui/app/adapters/query.js |    6 +
 .../resources/ui/app/components/query-editor.js |    8 +
 .../ui/app/components/query-result-log.js       |   28 +
 .../main/resources/ui/app/models/worksheet.js   |    5 +-
 .../resources/ui/app/routes/queries/query.js    |  150 +-
 .../src/main/resources/ui/app/services/jobs.js  |   17 +-
 .../src/main/resources/ui/app/services/query.js |   14 +-
 .../src/main/resources/ui/app/styles/app.scss   |    9 +
 .../templates/components/query-result-log.hbs   |   23 +
 .../ui/app/templates/queries/query.hbs          |   72 +-
 .../hive20/src/main/resources/ui/bower.json     |    1 +
 .../hive20/src/main/resources/ui/package.json   |    1 +
 ...HiveHistoryQueryMigrationImplementation.java |   24 +-
 .../HiveSavedQueryMigrationImplementation.java  |   24 +-
 .../pigjob/PigJobMigrationImplementation.java   |   23 +-
 .../PigScriptMigrationImplementation.java       |   24 +-
 .../pigudf/PigUdfMigrationImplementation.java   |   24 +-
 .../app/routes/check-configuration.js           |   12 +-
 .../view/utils/hdfs/ConfigurationBuilder.java   |    4 +-
 contrib/views/wfmanager/pom.xml                 |    1 +
 .../org/apache/oozie/ambari/view/Constants.java |   16 +-
 .../apache/oozie/ambari/view/HDFSFileUtils.java |   19 +
 .../ambari/view/OozieProxyImpersonator.java     |   40 +-
 .../oozie/ambari/view/WorkflowFilesService.java |   70 +-
 .../oozie/ambari/view/assets/AssetRepo.java     |   10 +
 .../oozie/ambari/view/assets/AssetResource.java |    7 +
 .../oozie/ambari/view/assets/AssetService.java  |    4 +
 .../workflowmanager/WorkflowManagerService.java |   46 +-
 .../view/workflowmanager/WorkflowsRepo.java     |   19 +-
 .../resources/ui/app/components/asset-config.js |   18 +-
 .../resources/ui/app/components/asset-list.js   |   20 +-
 .../ui/app/components/asset-manager.js          |   51 +-
 .../resources/ui/app/components/coord-config.js |    4 +
 .../ui/app/components/date-with-expr.js         |    1 +
 .../ui/app/components/designer-workspace.js     |   23 +-
 .../ui/app/components/flow-designer.js          |   70 +-
 .../ui/app/components/version-settings.js       |    3 +-
 .../ui/app/components/workflow-action-editor.js |   77 +-
 .../ui/app/components/workflow-actions.js       |    5 +
 .../ui/app/domain/action-type-resolver.js       |   10 +-
 .../ui/app/domain/actionjob_hanlder.js          |   23 +-
 .../resources/ui/app/domain/mapping-utils.js    |   13 +-
 .../resources/ui/app/domain/node-handler.js     |   10 +-
 .../main/resources/ui/app/domain/workflow.js    |    3 +-
 .../ui/app/routes/design/dashboardtab.js        |    2 +-
 .../resources/ui/app/services/asset-manager.js  |   19 +
 .../src/main/resources/ui/app/styles/app.less   |    8 +
 .../app/templates/components/asset-config.hbs   |    8 +
 .../ui/app/templates/components/asset-list.hbs  |   12 +-
 .../app/templates/components/asset-manager.hbs  |   26 +-
 .../app/templates/components/coord-config.hbs   |    2 +-
 .../templates/components/designer-workspace.hbs |    2 +-
 .../app/templates/components/flow-designer.hbs  |   29 +-
 .../ui/app/templates/components/sla-info.hbs    |   12 +
 .../components/workflow-action-editor.hbs       |   16 +
 .../templates/components/workflow-actions.hbs   |    3 +
 .../main/resources/ui/app/utils/common-utils.js |    4 +
 .../main/resources/ui/app/utils/constants.js    |   31 +-
 .../views/wfmanager/src/main/resources/view.xml |    6 +-
 docs/pom.xml                                    |    7 +-
 689 files changed, 48220 insertions(+), 36781 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/551f17b4/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 9ebb6e8,9ecb774..96726e4
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@@ -223,36 -219,6 +221,10 @@@ public class UpgradeResourceProvider ex
        Arrays.asList(UPGRADE_REQUEST_ID, UPGRADE_CLUSTER_NAME));
    private static final Set<String> PROPERTY_IDS = new HashSet<>();
  
-   public static final String COMMAND_PARAM_VERSION = VERSION;
-   public static final String COMMAND_PARAM_CLUSTER_NAME = "clusterName";
-   public static final String COMMAND_PARAM_DIRECTION = "upgrade_direction";
-   private static final String COMMAND_PARAM_UPGRADE_PACK = "upgrade_pack";
-   public static final String COMMAND_PARAM_REQUEST_ID = "request_id";
- 
-   private static final String COMMAND_PARAM_UPGRADE_TYPE = "upgrade_type";
-   private static final String COMMAND_PARAM_TASKS = "tasks";
-   private static final String COMMAND_PARAM_STRUCT_OUT = "structured_out";
-   private static final String COMMAND_DOWNGRADE_FROM_VERSION = "downgrade_from_version";
- 
- 
-   /**
-    * The original "current" stack of the cluster before the upgrade started.
-    * This is the same regardless of whether the current direction is
-    * {@link Direction#UPGRADE} or {@link Direction#DOWNGRADE}.
-    */
-   public static final String COMMAND_PARAM_ORIGINAL_STACK = "original_stack";
- 
-   /**
-    * The target upgrade stack before the upgrade started. This is the same
-    * regardless of whether the current direction is {@link Direction#UPGRADE} or
-    * {@link Direction#DOWNGRADE}.
-    */
-   public static final String COMMAND_PARAM_TARGET_STACK = "target_stack";
- 
 +  /**
 +   * The list of supported services put on a command.
 +   */
 +  public static final String COMMAND_PARAM_SUPPORTED_SERVICES = "supported_services";
  
    private static final String DEFAULT_REASON_TEMPLATE = "Aborting upgrade %s";
  
@@@ -748,45 -717,7 +723,37 @@@
      Set<String> supportedServices = new HashSet<>();
      UpgradeScope scope = UpgradeScope.COMPLETE;
  
 +    switch (direction) {
 +      case UPGRADE:
-         sourceStackId = cluster.getCurrentStackVersion();
++        StackId sourceStackId = cluster.getCurrentStackVersion();
 +
 +        RepositoryVersionEntity targetRepositoryVersion = s_repoVersionDAO.findByStackNameAndVersion(
 +            sourceStackId.getStackName(), version);
 +
 +        // !!! Consult the version definition and add the service names to supportedServices
 +        if (targetRepositoryVersion.getType() != RepositoryType.STANDARD) {
 +          try {
 +            VersionDefinitionXml vdf = targetRepositoryVersion.getRepositoryXml();
 +            supportedServices.addAll(vdf.getAvailableServiceNames());
 +
 +            // !!! better not be, but just in case
 +            if (!supportedServices.isEmpty()) {
 +              scope = UpgradeScope.PARTIAL;
 +            }
 +
 +          } catch (Exception e) {
 +            String msg = String.format("Could not parse version definition for %s.  Upgrade will not proceed.", version);
 +            LOG.error(msg, e);
 +            throw new AmbariException(msg);
 +          }
 +        }
 +
-         targetStackId = targetRepositoryVersion.getStackId();
 +        break;
 +      case DOWNGRADE:
-         sourceStackId = cluster.getCurrentStackVersion();
-         targetStackId = cluster.getDesiredStackVersion();
 +        break;
 +    }
 +
      upgradeContext.setResolver(resolver);
-     upgradeContext.setSourceAndTargetStacks(sourceStackId, targetStackId);
-     upgradeContext.setVersion(version);
      upgradeContext.setSupportedServices(supportedServices);
      upgradeContext.setScope(scope);
  
@@@ -1596,15 -1505,8 +1544,9 @@@
  
      Cluster cluster = context.getCluster();
  
-     Map<String, String> commandParams = getNewParameterMap(request);
-     commandParams.put(COMMAND_PARAM_CLUSTER_NAME, cluster.getClusterName());
-     commandParams.put(COMMAND_PARAM_VERSION, context.getVersion());
-     commandParams.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
-     commandParams.put(COMMAND_PARAM_ORIGINAL_STACK, context.getOriginalStackId().getStackId());
-     commandParams.put(COMMAND_PARAM_TARGET_STACK, context.getTargetStackId().getStackId());
-     commandParams.put(COMMAND_DOWNGRADE_FROM_VERSION, context.getDowngradeFromVersion());
-     commandParams.put(COMMAND_PARAM_UPGRADE_PACK, upgradePack.getName());
+     Map<String, String> commandParams = getNewParameterMap(request, context);
+     commandParams.put(UpgradeContext.COMMAND_PARAM_UPGRADE_PACK, upgradePack.getName());
 +    commandParams.put(COMMAND_PARAM_SUPPORTED_SERVICES, StringUtils.join(context.getSupportedServices(), ','));
  
      // Notice that this does not apply any params because the input does not specify a stage.
      // All of the other actions do use additional params.

http://git-wip-us.apache.org/repos/asf/ambari/blob/551f17b4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
index a0b3980,0000000..5d73fac
mode 100644,000000..100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/AbstractUpgradeServerAction.java
@@@ -1,73 -1,0 +1,74 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.ambari.server.serveraction.upgrades;
 +
 +import java.util.Collections;
 +import java.util.Set;
 +
 +import org.apache.ambari.server.controller.internal.UpgradeResourceProvider;
 +import org.apache.ambari.server.serveraction.AbstractServerAction;
 +import org.apache.ambari.server.state.Clusters;
++import org.apache.ambari.server.state.UpgradeContext;
 +import org.apache.ambari.server.state.stack.upgrade.Direction;
 +import org.apache.commons.lang.StringUtils;
 +import org.apache.hadoop.metrics2.sink.relocated.google.common.collect.Sets;
 +
 +import com.google.inject.Inject;;
 +
 +/**
 + * Abstract class that reads values from command params in a consistent way.
 + */
 +public abstract class AbstractUpgradeServerAction extends AbstractServerAction {
 +
-   public static final String CLUSTER_NAME_KEY = UpgradeResourceProvider.COMMAND_PARAM_CLUSTER_NAME;
-   public static final String UPGRADE_DIRECTION_KEY = UpgradeResourceProvider.COMMAND_PARAM_DIRECTION;
-   public static final String VERSION_KEY = UpgradeResourceProvider.COMMAND_PARAM_VERSION;
-   protected static final String REQUEST_ID = UpgradeResourceProvider.COMMAND_PARAM_REQUEST_ID;
++  public static final String CLUSTER_NAME_KEY = UpgradeContext.COMMAND_PARAM_CLUSTER_NAME;
++  public static final String UPGRADE_DIRECTION_KEY = UpgradeContext.COMMAND_PARAM_DIRECTION;
++  public static final String VERSION_KEY = UpgradeContext.COMMAND_PARAM_VERSION;
++  protected static final String REQUEST_ID = UpgradeContext.COMMAND_PARAM_REQUEST_ID;
 +
 +  /**
 +   * The original "current" stack of the cluster before the upgrade started.
 +   * This is the same regardless of whether the current direction is
 +   * {@link Direction#UPGRADE} or {@link Direction#DOWNGRADE}.
 +   */
-   protected static final String ORIGINAL_STACK_KEY = UpgradeResourceProvider.COMMAND_PARAM_ORIGINAL_STACK;
++  protected static final String ORIGINAL_STACK_KEY = UpgradeContext.COMMAND_PARAM_ORIGINAL_STACK;
 +
 +  /**
 +   * The target upgrade stack before the upgrade started. This is the same
 +   * regardless of whether the current direction is {@link Direction#UPGRADE} or
 +   * {@link Direction#DOWNGRADE}.
 +   */
-   protected static final String TARGET_STACK_KEY = UpgradeResourceProvider.COMMAND_PARAM_TARGET_STACK;
++  protected static final String TARGET_STACK_KEY = UpgradeContext.COMMAND_PARAM_TARGET_STACK;
 +
 +  protected static final String SUPPORTED_SERVICES_KEY = UpgradeResourceProvider.COMMAND_PARAM_SUPPORTED_SERVICES;
 +
 +  @Inject
 +  protected Clusters m_clusters;
 +
 +  /**
 +   * @return the set of supported services
 +   */
 +  protected Set<String> getSupportedServices() {
 +    String services = getCommandParameterValue(SUPPORTED_SERVICES_KEY);
 +    if (StringUtils.isBlank(services)) {
 +      return Collections.emptySet();
 +    } else {
 +      return Sets.newHashSet(StringUtils.split(services, ','));
 +    }
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/ambari/blob/551f17b4/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/551f17b4/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/551f17b4/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------


[32/50] [abbrv] ambari git commit: AMBARI-19693. Permission issues with conf.server/hive-site.xml across several hive components (Dmytro Grinenko via smohanty)

Posted by nc...@apache.org.
AMBARI-19693. Permission issues with conf.server/hive-site.xml across several hive components (Dmytro Grinenko via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2a2b4e94
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2a2b4e94
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2a2b4e94

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2a2b4e94feacfff56097293810867ce627b1778a
Parents: f4f7571
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Tue Jan 24 13:46:21 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Tue Jan 24 13:47:54 2017 -0800

----------------------------------------------------------------------
 .../common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py      | 2 +-
 .../src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py        | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2a2b4e94/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
index eecbcff..faa8c7e 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py
@@ -51,7 +51,7 @@ def hcat():
 
   Directory(params.hive_conf_dir,
             create_parents = True,
-            owner=params.webhcat_user,
+            owner=params.hive_user,
             group=params.user_group,
   )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2a2b4e94/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
index c09cd06..ff7e728 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
@@ -34,7 +34,7 @@ class TestHcatClient(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
     self.assertResourceCalled('Directory', '/usr/hdp/current/hive-server2/conf',
-                              owner = 'hcat',
+                              owner = 'hive',
                               group = 'hadoop',
                               create_parents = True,
     )
@@ -74,7 +74,7 @@ class TestHcatClient(RMFTestCase):
     )
     self.assertResourceCalled('Directory', '/usr/hdp/current/hive-server2/conf',
                               create_parents = True,
-                              owner = 'hcat',
+                              owner = 'hive',
                               group = 'hadoop',
     )
     self.assertResourceCalled('Directory', '/etc/hive-hcatalog/conf',


[39/50] [abbrv] ambari git commit: AMBARI-19667. Hive View 2.0: Editor should be stretchable by dragging (Abhishek Kumar via pallavkul)

Posted by nc...@apache.org.
AMBARI-19667. Hive View 2.0: Editor should be stretchable by dragging (Abhishek Kumar via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0de31d6a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0de31d6a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0de31d6a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 0de31d6a59c2ec847fc060dffb9e69431eee4ebc
Parents: 72e02ab
Author: pallavkul <pa...@gmail.com>
Authored: Wed Jan 25 09:14:58 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Wed Jan 25 09:14:58 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/query-editor.js    | 8 ++++++++
 .../views/hive20/src/main/resources/ui/app/styles/app.scss  | 9 +++++++++
 contrib/views/hive20/src/main/resources/ui/bower.json       | 1 +
 contrib/views/hive20/src/main/resources/ui/package.json     | 1 +
 4 files changed, 19 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0de31d6a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
index 27d43d5..7bfe223 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/query-editor.js
@@ -74,6 +74,14 @@ export default Ember.Component.extend({
       });
     });
 
+    this.$('.CodeMirror').resizable({
+      handles: 's',
+
+      resize: function () {
+        Ember.run.debounce(this, updateSize, 150);
+      }
+    }).find('.ui-resizable-s').addClass('grip fa fa-reorder');
+
 
   }.on('didInsertElement'),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0de31d6a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index 5ae65d1..7ab6992 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -221,6 +221,15 @@ pre {
   overflow-y: scroll;
 }
 
+.grip {
+  height: 20px;
+  border: 0 1px 1px solid #ddd;
+  background-color: #f5f5f5;
+  color: #bbb;
+  text-align: center;
+  font-size: inherit;
+}
+
 .hv-dropdown {
   position: absolute;
   .dropdown-menu {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0de31d6a/contrib/views/hive20/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/bower.json b/contrib/views/hive20/src/main/resources/ui/bower.json
index 4eadee7..fcd11cf 100644
--- a/contrib/views/hive20/src/main/resources/ui/bower.json
+++ b/contrib/views/hive20/src/main/resources/ui/bower.json
@@ -6,6 +6,7 @@
     "ember-qunit-notifications": "0.1.0",
     "font-awesome": "~4.5.0",
     "codemirror": "~5.15.0",
+    "jquery-ui": "~1.12.1",
     "bootstrap-treeview": "~1.2.0"
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0de31d6a/contrib/views/hive20/src/main/resources/ui/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/package.json b/contrib/views/hive20/src/main/resources/ui/package.json
index ed7a299..d04a092 100644
--- a/contrib/views/hive20/src/main/resources/ui/package.json
+++ b/contrib/views/hive20/src/main/resources/ui/package.json
@@ -34,6 +34,7 @@
     "ember-cli-htmlbars": "^1.0.3",
     "ember-cli-htmlbars-inline-precompile": "^0.3.1",
     "ember-cli-inject-live-reload": "^1.4.0",
+    "ember-cli-jquery-ui": "0.0.20",
     "ember-cli-jshint": "^1.0.0",
     "ember-cli-moment-shim": "3.0.1",
     "ember-cli-qunit": "^2.0.0",


[02/50] [abbrv] ambari git commit: AMBARI-19622. Need abilities to add a custom action node and import a workflow xml with custom action (Padma Priya via pallavkul)

Posted by nc...@apache.org.
AMBARI-19622. Need abilities to add a custom action node and import a workflow xml with custom action (Padma Priya via pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3e5185ac
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3e5185ac
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3e5185ac

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 3e5185acaea24bfeb28f4b10da6d3a94c90dcacd
Parents: 9abe8da
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 23 15:00:03 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 23 15:00:03 2017 +0530

----------------------------------------------------------------------
 .../ui/app/components/flow-designer.js          | 11 +++
 .../ui/app/components/workflow-action-editor.js | 77 ++++++++++++++++----
 .../ui/app/components/workflow-actions.js       |  5 ++
 .../ui/app/domain/action-type-resolver.js       | 10 ++-
 .../ui/app/domain/actionjob_hanlder.js          | 22 +++++-
 .../resources/ui/app/domain/node-handler.js     | 10 +--
 .../src/main/resources/ui/app/styles/app.less   |  4 +
 .../app/templates/components/flow-designer.hbs  | 25 +++++++
 .../components/workflow-action-editor.hbs       | 16 ++++
 .../templates/components/workflow-actions.hbs   |  3 +
 .../main/resources/ui/app/utils/common-utils.js |  4 +
 .../main/resources/ui/app/utils/constants.js    |  3 +-
 12 files changed, 169 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index 8bbe831..1822a20 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -775,6 +775,17 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
       this.set('showCreateKillNode', false);
     },
     addNode(type){
+      if(type === 'custom'){
+        this.$('#customTypeModal').modal('show');
+      }else{
+        this.send('addAction', type);
+      }
+    },
+    createCustomAction(type){
+      this.send('addAction', type);
+      this.set('customActionType', '');
+    },
+    addAction(type){
       this.createSnapshot();
       var currentTransition=this.get("currentTransition");
       this.get("workflow").addNode(this.findTransition(this.get("workflow").startNode, currentTransition.sourceNodeId, currentTransition.targetNode.id),type);

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
index 8a3c7cf..f2d3ba8 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
@@ -17,6 +17,7 @@
 
 import Ember from 'ember';
 import Constants from '../utils/constants';
+import CommonUtils from '../utils/common-utils';
 import {SlaInfo} from '../domain/sla-info';
 
 export default Ember.Component.extend( Ember.Evented,{
@@ -40,6 +41,7 @@ export default Ember.Component.extend( Ember.Evented,{
   clonedActionModel : {},
   showingFileBrowser : false,
   childComponents : new Map(),
+  errors : Ember.A([]),
   isActionNode : Ember.computed('nodeType',function(){
     if(this.get('nodeType') === 'action'){
       return true;
@@ -58,13 +60,43 @@ export default Ember.Component.extend( Ember.Evented,{
     return this.get('actionIcons')[this.get('actionType')];
   }),
   saveClicked : false,
-  containsUnsupportedProperties : Ember.computed('actionModel.unsupportedProperties', function(){
-    return this.get('actionModel.unsupportedProperties') ? !Ember.isEmpty(Object.keys(this.get('actionModel.unsupportedProperties'))) : false;
-  }),
-  unsupportedPropertiesXml : Ember.computed('actionModel.unsupportedProperties', function(){
-    if(this.get('containsUnsupportedProperties')){
-      var x2js = new X2JS();
+  unsupportedPropertiesXml : Ember.computed('actionModel.unsupportedProperties', {
+    get(key){
+      let x2js = new X2JS();
       return vkbeautify.xml(x2js.json2xml_str(this.get('actionModel.unsupportedProperties')));
+    },
+    set(key, value) {
+      let x2js = new X2JS();
+      var temp = x2js.xml_str2json(vkbeautify.xmlmin(`<unsupportedProperties>${value}</unsupportedProperties>`));
+      this.set('actionModel.unsupportedProperties', temp.unsupportedProperties);
+      Object.keys(this.get('actionModel.unsupportedProperties')).forEach(key =>{
+        this.set(`actionModel.${key}`, this.get(`actionModel.unsupportedProperties.${key}`));
+      });
+      return value;
+    }
+  }),
+  actionXml : Ember.computed('actionModel', {
+    get(key) {
+      let x2js = new X2JS();
+      var startTag = `<${this.get('actionType')}`;
+      Object.keys(this.get('actionModel')).forEach(key => {
+        if(key.startsWith('_')){
+          startTag = `${startTag} ${key.substr(1)}="${this.get('actionModel')[key]}"`;
+        }
+      });
+      startTag = `${startTag}>`;
+      return vkbeautify.xml(`${startTag}${x2js.json2xml_str(this.get('actionModel'))}</${this.get('actionType')}>`);
+    },
+    set(key, value) {
+      let x2js = new X2JS();
+      this.set('errors', Ember.A([]));
+      let temp = x2js.xml_str2json(vkbeautify.xmlmin(value));
+      if(temp){
+        this.set('actionModel', temp[this.get('actionType')]);
+      }else{
+        this.get('errors').pushObject({message:'Action Xml is syntatically incorrect'});
+      }
+      return value;
     }
   }),
   fileBrowser : Ember.inject.service('file-browser'),
@@ -87,15 +119,30 @@ export default Ember.Component.extend( Ember.Evented,{
       errorNode : errorNode
     });
     this.set('transition',transition);
-    if (Ember.isBlank(this.get("actionModel.jobTracker"))){
-      this.set('actionModel.jobTracker',Constants.rmDefaultValue);
-    }
-    if (Ember.isBlank(this.get("actionModel.nameNode"))){
-      this.set('actionModel.nameNode','${nameNode}');
+    if(CommonUtils.isSupportedAction(this.get('actionType'))){
+      if (Ember.isBlank(this.get("actionModel.jobTracker"))){
+        this.set('actionModel.jobTracker',Constants.rmDefaultValue);
+      }
+      if (Ember.isBlank(this.get("actionModel.nameNode"))){
+        this.set('actionModel.nameNode','${nameNode}');
+      }
     }
-    if(this.get('nodeType') === 'action' && this.get('actionModel.slaInfo') === undefined){
+    if(this.get('nodeType') === 'action' && CommonUtils.isSupportedAction(this.get('actionType')) && this.get('actionModel.slaInfo') === undefined){
       this.set('actionModel.slaInfo', SlaInfo.create({}));
     }
+    if(!CommonUtils.isSupportedAction(this.get('actionType')) && !this.get('actionModel.slaInfo')){
+      this.set('customSlaInfo',  SlaInfo.create({}));
+    }else{
+      this.set('customSlaInfo',  this.get('actionModel.slaInfo'));
+      this.set('customSlaEnabled', this.get('actionModel.slaEnabled'));
+      delete this.get('actionModel').slaInfo;
+      delete this.get('actionModel').slaEnabled;
+    }
+    if(this.get('actionModel.unsupportedProperties') && !Ember.isEmpty(Object.keys(this.get('actionModel.unsupportedProperties')))){
+      this.set('containsUnsupportedProperties', true);
+    }else{
+      this.set('containsUnsupportedProperties', false);
+    }
   }.on('init'),
   initialize : function(){
     this.$('#action_properties_dialog').modal({
@@ -150,10 +197,14 @@ export default Ember.Component.extend( Ember.Evented,{
     },
     save () {
       var isChildComponentsValid = this.validateChildrenComponents();
-      if(this.get('validations.isInvalid') || !isChildComponentsValid) {
+      if(this.get('validations.isInvalid') || !isChildComponentsValid || this.get('errors').length > 0) {
         this.set('showErrorMessage', true);
         return;
       }
+      if(!CommonUtils.isSupportedAction(this.get('actionType'))){
+        this.set('actionModel.slaInfo',  this.get('customSlaInfo'));
+        this.set('actionModel.slaEnabled', this.get('customSlaEnabled'));
+      }
       this.processMultivaluedComponents();
       this.processStaticProps();
       this.$('#action_properties_dialog').modal('hide');

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js
index 7c78eea..2f8cdaa 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-actions.js
@@ -16,10 +16,15 @@
 */
 
 import Ember from 'ember';
+import Constants from '../utils/constants';
+
 export default Ember.Component.extend({
   clipboardHasContents : Ember.computed.oneWay('clipboard', function(){
     return !Ember.isEmpty(this.get('clipboard'));
   }),
+  initialize : function(){
+    this.set('customActionEnabled', Constants.customActionEnabled);
+  }.on('init'),
   actions : {
     addAction : function(type){
       this.$(".dr_action").css("background-color", "#fff");

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/domain/action-type-resolver.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/action-type-resolver.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/action-type-resolver.js
index c25b953..8cbcfaf 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/action-type-resolver.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/action-type-resolver.js
@@ -55,7 +55,15 @@ var ActionTypeResolver=Ember.Object.extend({
     return resolvedType;
   },
   getActionJobHandler(jobType){
-    return this.actionJobHandlerMap.get(jobType);
+    if(this.actionJobHandlerMap.has(jobType)) {
+      return this.actionJobHandlerMap.get(jobType);
+    }else{
+      var customActionJobHandler = actionJobHandler.CustomActionJobHandler.create({
+        actionType : jobType
+      });
+      this.actionJobHandlerMap.set(jobType,customActionJobHandler);
+      return customActionJobHandler;
+    }
   }
 });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
index 4cc89ef..34a9a4a 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
@@ -337,6 +337,26 @@ var MapRedActionJobHandler=ActionJobHandler.extend({
   }
 });
 
+var CustomActionJobHandler=ActionJobHandler.extend({
+  actionType:'',
+  mapping:null,
+  init(){
+    this.mapping=[];
+  },
+  handleImport(actionNode,json){
+    actionNode.set('domain', json);
+  },
+  handle(nodeDomain,nodeObj,nodeName){
+    var customDomain = {};
+    Object.keys(nodeDomain).forEach(key =>{
+      if(key !== 'slaInfo' && key !== 'slaEnabled' && key!=='credentials'){
+        customDomain[key] = nodeDomain[key];
+      }
+    });
+    nodeObj[this.get("actionType")] = customDomain;
+  }
+});
+
 var FSActionJobHandler=ActionJobHandler.extend({
   actionType:"fs",
   mapping:null,
@@ -544,4 +564,4 @@ var FSActionJobHandler=ActionJobHandler.extend({
     });
   }
 });
-export{ActionJobHandler,JavaActionJobHandler,PigActionJobHandler,HiveActionJobHandler,SqoopActionJobHandler,ShellActionJobHandler, EmailActionJobHandler,SparkActionJobHandler,MapRedActionJobHandler, Hive2ActionJobHandler, SubWFActionJobHandler, DistCpJobHandler, SshActionJobHandler, FSActionJobHandler};
+export{ActionJobHandler,JavaActionJobHandler,PigActionJobHandler,HiveActionJobHandler,SqoopActionJobHandler,ShellActionJobHandler, EmailActionJobHandler,SparkActionJobHandler,MapRedActionJobHandler, Hive2ActionJobHandler, SubWFActionJobHandler, DistCpJobHandler, SshActionJobHandler, FSActionJobHandler, CustomActionJobHandler};

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/domain/node-handler.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/node-handler.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/node-handler.js
index 6bc305a..28ea527 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/node-handler.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/node-handler.js
@@ -125,16 +125,16 @@ var ActionNodeHandler= NodeHandler.extend({
       return actionNode;
     }
     var actionJobHandler=this.get("actionTypeResolver").getActionJobHandler(actionType);
-    if (!actionJobHandler){
-      console.error("cannot handle unsupported action type:"+actionType+" for "+nodeJson._name);//TODO error handling...
-      return actionNode;
+    if(actionJobHandler){
+      actionJobHandler.handleImport(actionNode,nodeJson[actionType]);
     }
-    actionJobHandler.handleImport(actionNode,nodeJson[actionType]);
     if (nodeJson.info && nodeJson.info.__prefix==="sla") {
       actionNode.domain.slaEnabled=true;
       this.slaMapper.handleImport(actionNode.domain,nodeJson.info,"slaInfo");
     }
-    actionNode.domain.credentials=nodeJson._cred;
+    if(nodeJson._cred){
+      actionNode.domain.credentials=nodeJson._cred;
+    }
     return actionNode;
   },
   handleImportTransitions(node,json,nodeMap){

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index 05bdb5a..d91eb3b 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -1632,3 +1632,7 @@ input:invalid {
   padding: 3px;
   overflow-y: auto;
 }
+.custom-action-xml{
+  width: 100%;
+  min-height: 175px;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 95c8c3b..38d8eaf 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -377,3 +377,28 @@
 {{#if showKillNodeManager}}
   {{#killnode-manager killNodes=workflow.killNodes killNode=killNode createKillnodeError=createKillnodeError createKillNode="createKillNode" deleteNode="deleteNode" addKillNodeMode=addKillNodeMode editMode=editMode closeKillNodeManager="closeKillNodeManager"}}{{/killnode-manager}}
 {{/if}}
+
+<div id="customTypeModal" class="modal fade" role="dialog">
+  <div class="modal-dialog">
+    <div class="modal-content">
+      <div class="modal-header">
+        <button type="button" class="close" data-dismiss="modal">&times;</button>
+        <h4 class="modal-title">Custom Action</h4>
+      </div>
+      <div class="modal-body">
+        <form class="form-horizontal">
+          <div class="form-group">
+            <label for="inputEmail" class="control-label col-xs-2">Type</label>
+            <div class="col-xs-7">
+              {{input type="text" class="form-control" name="job-tracker" value=customActionType placeholder="Custom Action Type"}}
+            </div>
+          </div>
+        </form>
+      </div>
+      <div class="modal-footer">
+        <button type="button" class="btn btn-default" data-dismiss="modal">Close</button>
+        <button type="button" class="btn btn-primary" data-dismiss="modal" {{action 'createCustomAction' customActionType}}>OK</button>
+      </div>
+    </div>
+  </div>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
index b36578d..bb089c0 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-action-editor.hbs
@@ -64,6 +64,22 @@
                   {{#fs-action actionModel=actionModel transition=transition killNodes=killNodes openFileBrowser="openFileBrowser" register="registerChild" addKillNode="addKillNode" currentNode=currentNode credentials=credentials}}{{/fs-action}}
                 {{else if (eq actionType 'sub-workflow')}}
                   {{#sub-workflow actionModel=actionModel transition=transition killNodes=killNodes openFileBrowser="openFileBrowser" register="registerChild" addKillNode="addKillNode" currentNode=currentNode credentials=credentials}}{{/sub-workflow}}
+                {{else}}
+                    <div class="panel panel-default">
+                      <div class="panel-heading">Action XML</div>
+                      <div class="panel-body handlerPanel">
+                        {{designer-errors errors=errors}}
+                        {{textarea class="custom-action-xml" value=actionXml}}
+                      </div>
+                    </div>
+                    <div class="panel panel-default">
+                      <div class="panel-heading">Transition</div>
+                      <div class="panel-body handlerPanel">
+                        {{#transition-config transition=transition killNodes=killNodes currentNode=currentNode}}{{/transition-config}}
+                      </div>
+                    </div>
+                    {{#action-credential-config credentials=credentials actionCredentials=actionModel.credentials}}{{/action-credential-config}}
+                    {{#sla-info slaInfo=customSlaInfo slaEnabled=customSlaEnabled}}{{/sla-info}}
               {{/if}}
               {{#if containsUnsupportedProperties}}
                 <div id="unsupported-props" class=" panel panel-default">

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
index 6d672b4..badf320 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
@@ -55,6 +55,9 @@
           <li class="dr_action disabled hide" data-name="Stream" data-type="stream"> <i class="fa fa-exchange"></i> Stream </li>
           <li {{action 'addAction' 'email'}} class="dr_action  enabled" data-name="Email" data-type="email"> <i class="fa fa-envelope"></i> Email </li>
           <li {{action 'addAction' 'fs'}} class="dr_action  enabled" data-name="fs" data-type="fs"> <i class="fa fa-folder-o"></i> FS </li>
+          {{#if customActionEnabled}}
+            <li {{action 'addAction' 'custom'}} class="dr_action  enabled" data-name="custom" data-type="custom"> <i class="fa fa-magic" aria-hidden="true"></i> Custom </li>
+          {{/if}}
       </ul>
     </div>
     <div class="clearfix"></div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js b/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
index e3be7da..8cc40d6 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/utils/common-utils.js
@@ -16,6 +16,7 @@
  */
 
 import Ember from 'ember';
+import Constants from '../utils/constants';
 export default Ember.Object.create({
   extractSchemaVersion(xmlns){
     return xmlns.substring(xmlns.lastIndexOf(":")+1);
@@ -25,5 +26,8 @@ export default Ember.Object.create({
   },
   setTestContext(context){
     window.flowDesignerTestContext=context;
+  },
+  isSupportedAction(actionType){
+    return Object.values(Constants.actions).findBy('name', actionType)? true : false;
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/3e5185ac/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js b/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
index fc20359..9126819 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/utils/constants.js
@@ -88,5 +88,6 @@ export default Ember.Object.create({
   persistWorkInProgressInterval : 30000,
   elConstants : [
     '${YEAR}', '${MONTH}', '${DAY}', '${HOUR}', '${MINUTE}'
-  ]
+  ],
+  customActionEnabled : false
 });


[35/50] [abbrv] ambari git commit: AMBARI-19658. LogSearch Integration Cache Timeout should be configurable. (rnettleton)

Posted by nc...@apache.org.
AMBARI-19658. LogSearch Integration Cache Timeout should be configurable. (rnettleton)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/60462002
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/60462002
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/60462002

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 604620021908995d7b4581176ceecf6c44ffea26
Parents: 78fefdd
Author: Bob Nettleton <rn...@hortonworks.com>
Authored: Tue Jan 24 17:15:07 2017 -0500
Committer: Bob Nettleton <rn...@hortonworks.com>
Committed: Tue Jan 24 17:15:29 2017 -0500

----------------------------------------------------------------------
 .../server/configuration/Configuration.java     | 26 +++++++++++++++++-
 .../logging/LogSearchDataRetrievalService.java  | 28 +++++++++++++++++---
 .../LogSearchDataRetrievalServiceTest.java      | 27 ++++++++++++++++++-
 3 files changed, 76 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/60462002/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index df1b627..73c70dc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -2601,7 +2601,6 @@ public class Configuration {
   public static final ConfigurationProperty<Integer> LOGSEARCH_PORTAL_READ_TIMEOUT = new ConfigurationProperty<>(
     "logsearch.portal.read.timeout", 5000);
 
-
   /**
    * Global disable flag for AmbariServer Metrics.
    */
@@ -2609,6 +2608,18 @@ public class Configuration {
   public static final ConfigurationProperty<Boolean> AMBARISERVER_METRICS_DISABLE = new ConfigurationProperty<>(
     "ambariserver.metrics.disable", false);
 
+  /**
+   * The time, in hours, that the Ambari Server will hold Log File metadata in its internal cache before making
+   *   a request to the LogSearch Portal to get the latest metadata.
+   *
+   * The logging metadata (in this case, log file names) is generally quite static, so the default should
+   *   generally be quite long.
+   *
+   */
+  @Markdown(description = "The time, in hours, that the Ambari Server will hold Log File metadata in its internal cache before making a request to the LogSearch Portal to get the latest metadata.")
+  public static final ConfigurationProperty<Integer> LOGSEARCH_METADATA_CACHE_EXPIRE_TIMEOUT = new ConfigurationProperty<>(
+    "logsearch.metadata.cache.expire.timeout", 24);
+
   private static final Logger LOG = LoggerFactory.getLogger(
     Configuration.class);
 
@@ -5406,6 +5417,19 @@ public class Configuration {
     return NumberUtils.toInt(getProperty(LOGSEARCH_PORTAL_READ_TIMEOUT));
   }
 
+
+  /**
+   *
+   * Get the max time, in hours, to hold data in the LogSearch
+   *   metadata cache prior to expiring the cache and re-loading
+   *   the data from the LogSearch Portal service.
+   *
+   * @return max number of hours that the LogSearch metadata is cached
+   */
+  public int getLogSearchMetadataCacheExpireTimeout() {
+    return NumberUtils.toInt(getProperty(LOGSEARCH_METADATA_CACHE_EXPIRE_TIMEOUT));
+  }
+
   /**
    * Generates a markdown table which includes:
    * <ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/60462002/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
index ce6094c..6b484a4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalService.java
@@ -23,6 +23,7 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.server.AmbariService;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.AmbariServer;
 import org.apache.commons.collections.CollectionUtils;
@@ -77,6 +78,11 @@ public class LogSearchDataRetrievalService extends AbstractService {
   @Inject
   private Injector injector;
 
+  @Inject
+  private Configuration ambariServerConfiguration;
+
+
+
   /**
    * A Cache of host+component names to a set of log files associated with
    *  that Host/Component combination.  This data is retrieved from the
@@ -114,10 +120,17 @@ public class LogSearchDataRetrievalService extends AbstractService {
   protected void doStart() {
 
     LOG.debug("Initializing caches");
+
+    // obtain the max cache expire time from the ambari configuration
+    final int maxTimeoutForCacheInHours =
+      ambariServerConfiguration.getLogSearchMetadataCacheExpireTimeout();
+
+    LOG.debug("Caches configured with a max expire timeout of " + maxTimeoutForCacheInHours + " hours.");
+
     // initialize the log file name cache
-    logFileNameCache = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.HOURS).build();
+    logFileNameCache = CacheBuilder.newBuilder().expireAfterWrite(maxTimeoutForCacheInHours, TimeUnit.HOURS).build();
     // initialize the log file tail URI cache
-    logFileTailURICache = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.HOURS).build();
+    logFileTailURICache = CacheBuilder.newBuilder().expireAfterWrite(maxTimeoutForCacheInHours, TimeUnit.HOURS).build();
 
     // initialize the Executor
     executor = Executors.newSingleThreadExecutor();
@@ -126,7 +139,7 @@ public class LogSearchDataRetrievalService extends AbstractService {
   @Override
   protected void doStop() {
     LOG.debug("Invalidating LogSearch caches");
-    // invalidate the cache
+    // invalidate the caches
     logFileNameCache.invalidateAll();
 
     logFileTailURICache.invalidateAll();
@@ -230,6 +243,15 @@ public class LogSearchDataRetrievalService extends AbstractService {
   }
 
   /**
+   * Package-level setter to facilitate simpler unit testing
+   *
+   * @param ambariServerConfiguration
+   */
+  void setConfiguration(Configuration ambariServerConfiguration) {
+    this.ambariServerConfiguration = ambariServerConfiguration;
+  }
+
+  /**
    * This protected method allows for simpler unit tests.
    *
    * @return the Set of current Requests that are not yet completed

http://git-wip-us.apache.org/repos/asf/ambari/blob/60462002/ambari-server/src/test/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalServiceTest.java
index 033d698..4296004 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/logging/LogSearchDataRetrievalServiceTest.java
@@ -29,6 +29,7 @@ import java.util.Set;
 import java.util.concurrent.Executor;
 
 
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.easymock.EasyMockSupport;
 import org.junit.Test;
@@ -60,13 +61,18 @@ public class LogSearchDataRetrievalServiceTest {
     LoggingRequestHelper helperMock =
       mockSupport.createMock(LoggingRequestHelper.class);
 
+    Configuration configurationMock =
+      mockSupport.createMock(Configuration.class);
+
     expect(helperFactoryMock.getHelper(null, expectedClusterName)).andReturn(helperMock);
     expect(helperMock.createLogFileTailURI("http://localhost", expectedComponentName, expectedHostName)).andReturn(expectedResultURI);
+    expect(configurationMock.getLogSearchMetadataCacheExpireTimeout()).andReturn(1).atLeastOnce();
 
     mockSupport.replayAll();
 
     LogSearchDataRetrievalService retrievalService = new LogSearchDataRetrievalService();
     retrievalService.setLoggingRequestHelperFactory(helperFactoryMock);
+    retrievalService.setConfiguration(configurationMock);
     // call the initialization routine called by the Google framework
     retrievalService.doStart();
 
@@ -86,16 +92,23 @@ public class LogSearchDataRetrievalServiceTest {
 
     EasyMockSupport mockSupport = new EasyMockSupport();
 
-    LoggingRequestHelperFactory helperFactoryMock = mockSupport.createMock(LoggingRequestHelperFactory.class);
+    LoggingRequestHelperFactory helperFactoryMock =
+      mockSupport.createMock(LoggingRequestHelperFactory.class);
+
+    Configuration configurationMock =
+      mockSupport.createMock(Configuration.class);
 
     // return null, to simulate the case where LogSearch Server is
     // not available for some reason
     expect(helperFactoryMock.getHelper(null, expectedClusterName)).andReturn(null);
 
+    expect(configurationMock.getLogSearchMetadataCacheExpireTimeout()).andReturn(1).atLeastOnce();
+
     mockSupport.replayAll();
 
     LogSearchDataRetrievalService retrievalService = new LogSearchDataRetrievalService();
     retrievalService.setLoggingRequestHelperFactory(helperFactoryMock);
+    retrievalService.setConfiguration(configurationMock);
     // call the initialization routine called by the Google framework
     retrievalService.doStart();
 
@@ -122,6 +135,9 @@ public class LogSearchDataRetrievalServiceTest {
     Injector injectorMock =
       mockSupport.createMock(Injector.class);
 
+    Configuration configurationMock =
+      mockSupport.createMock(Configuration.class);
+
     // expect the executor to be called to execute the LogSearch request
     executorMock.execute(isA(LogSearchDataRetrievalService.LogSearchFileNameRequestRunnable.class));
     // executor should only be called once
@@ -129,11 +145,14 @@ public class LogSearchDataRetrievalServiceTest {
 
     expect(injectorMock.getInstance(LoggingRequestHelperFactory.class)).andReturn(helperFactoryMock);
 
+    expect(configurationMock.getLogSearchMetadataCacheExpireTimeout()).andReturn(1).atLeastOnce();
+
     mockSupport.replayAll();
 
     LogSearchDataRetrievalService retrievalService = new LogSearchDataRetrievalService();
     retrievalService.setLoggingRequestHelperFactory(helperFactoryMock);
     retrievalService.setInjector(injectorMock);
+    retrievalService.setConfiguration(configurationMock);
     // call the initialization routine called by the Google framework
     retrievalService.doStart();
     retrievalService.setExecutor(executorMock);
@@ -164,10 +183,16 @@ public class LogSearchDataRetrievalServiceTest {
 
     Executor executorMock = mockSupport.createMock(Executor.class);
 
+    Configuration configurationMock =
+      mockSupport.createMock(Configuration.class);
+
+    expect(configurationMock.getLogSearchMetadataCacheExpireTimeout()).andReturn(1).atLeastOnce();
+
     mockSupport.replayAll();
 
     LogSearchDataRetrievalService retrievalService = new LogSearchDataRetrievalService();
     retrievalService.setLoggingRequestHelperFactory(helperFactoryMock);
+    retrievalService.setConfiguration(configurationMock);
     // call the initialization routine called by the Google framework
     retrievalService.doStart();
     // there should be no expectations set on this mock


[42/50] [abbrv] ambari git commit: AMBARI-19696 Move HS2 does not install dependent components on the target host. (atkach)

Posted by nc...@apache.org.
AMBARI-19696 Move HS2 does not install dependent components on the target host. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dab389d9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dab389d9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dab389d9

Branch: refs/heads/branch-dev-patch-upgrade
Commit: dab389d94634e6c21935a7034a0389a70390176e
Parents: b9200e0
Author: Andrii Tkach <at...@apache.org>
Authored: Tue Jan 24 21:44:43 2017 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Wed Jan 25 12:42:27 2017 +0200

----------------------------------------------------------------------
 .../main/service/reassign/step4_controller.js   | 29 +++++++-
 .../manage_alert_groups_controller_test.js      |  6 +-
 .../service/reassign/step4_controller_test.js   | 78 ++++++++++++++++----
 3 files changed, 92 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/dab389d9/ambari-web/app/controllers/main/service/reassign/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/reassign/step4_controller.js b/ambari-web/app/controllers/main/service/reassign/step4_controller.js
index b383da7..2e9d431 100644
--- a/ambari-web/app/controllers/main/service/reassign/step4_controller.js
+++ b/ambari-web/app/controllers/main/service/reassign/step4_controller.js
@@ -70,6 +70,8 @@ App.ReassignMasterWizardStep4Controller = App.HighAvailabilityProgressPageContro
 
   hostComponents: [],
 
+  dependentHostComponents: [],
+
   dbPropertyMap: {
     'HIVE_SERVER': {
       type: 'hive-site',
@@ -89,16 +91,35 @@ App.ReassignMasterWizardStep4Controller = App.HighAvailabilityProgressPageContro
    * load step info
    */
   loadStep: function () {
-    if (this.get('content.reassign.component_name') === 'NAMENODE' && App.get('isHaEnabled')) {
+    var componentName = this.get('content.reassign.component_name');
+    if (componentName === 'NAMENODE' && App.get('isHaEnabled')) {
       this.set('hostComponents', ['NAMENODE', 'ZKFC']);
     } else {
-      this.set('hostComponents', [this.get('content.reassign.component_name')]);
+      this.set('hostComponents', [componentName]);
     }
+    this.setDependentHostComponents(componentName);
     this.set('serviceName', [this.get('content.reassign.service_id')]);
     this._super();
   },
 
   /**
+   * Set dependent host-components to <code>dependentHostComponents</code>
+   * @param {string} componentName
+   */
+  setDependentHostComponents: function(componentName) {
+    var installedComponents = App.Host.find(this.get('content.reassignHosts.target'))
+      .get('hostComponents')
+      .mapProperty('componentName');
+    var dependenciesToInstall = App.StackServiceComponent.find(componentName)
+      .get('dependencies')
+      .mapProperty('componentName')
+      .filter(function(component) {
+        return !installedComponents.contains(component);
+      });
+    this.set('dependentHostComponents', dependenciesToInstall);
+  },
+
+  /**
    * concat host-component names into string
    * @return {String}
    */
@@ -213,7 +234,7 @@ App.ReassignMasterWizardStep4Controller = App.HighAvailabilityProgressPageContro
   },
 
   createHostComponents: function () {
-    var hostComponents = this.get('hostComponents');
+    var hostComponents = this.get('hostComponents').concat(this.get('dependentHostComponents'));
     var hostName = this.get('content.reassignHosts.target');
     this.set('multiTaskCounter', hostComponents.length);
     for (var i = 0; i < hostComponents.length; i++) {
@@ -245,7 +266,7 @@ App.ReassignMasterWizardStep4Controller = App.HighAvailabilityProgressPageContro
   },
 
   installHostComponents: function () {
-    var hostComponents = this.get('hostComponents');
+    var hostComponents = this.get('hostComponents').concat(this.get('dependentHostComponents'));
     var hostName = this.get('content.reassignHosts.target');
     this.set('multiTaskCounter', hostComponents.length);
     for (var i = 0; i < hostComponents.length; i++) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dab389d9/ambari-web/test/controllers/main/alerts/manage_alert_groups_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/alerts/manage_alert_groups_controller_test.js b/ambari-web/test/controllers/main/alerts/manage_alert_groups_controller_test.js
index 2c58017..7a2f35b 100644
--- a/ambari-web/test/controllers/main/alerts/manage_alert_groups_controller_test.js
+++ b/ambari-web/test/controllers/main/alerts/manage_alert_groups_controller_test.js
@@ -21,7 +21,11 @@ var App = require('app');
 var manageAlertGroupsController;
 
 function getController() {
-  return App.ManageAlertGroupsController.create({});
+  return App.ManageAlertGroupsController.create({
+    selectedAlertGroup: Em.Object.create({
+      name: ''
+    })
+  });
 }
 
 describe('App.ManageAlertGroupsController', function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dab389d9/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
index aac15b8..6bf381a 100644
--- a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
+++ b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
@@ -367,17 +367,24 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
     });
     afterEach(function () {
       controller.createComponent.restore();
+      controller.set('dependentHostComponents', []);
     });
 
-    it('One host-component', function () {
+    it('createComponent should be called for each host-component', function () {
       controller.set('hostComponents', ['COMP1']);
+      controller.set('dependentHostComponents', ['COMP2']);
       controller.set('content.reassignHosts.target', 'host1');
       controller.set('content.reassign.service_id', 'SERVICE1');
 
       controller.createHostComponents();
 
-      expect(controller.get('multiTaskCounter')).to.equal(1);
-      expect(controller.createComponent.calledWith('COMP1', 'host1', 'SERVICE1')).to.be.true;
+      expect(controller.get('multiTaskCounter')).to.equal(2);
+      expect(controller.createComponent.getCall(0).args).to.be.eql([
+        'COMP1', 'host1', 'SERVICE1'
+      ]);
+      expect(controller.createComponent.getCall(1).args).to.be.eql([
+        'COMP2', 'host1', 'SERVICE1'
+      ]);
     });
   });
 
@@ -427,6 +434,7 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
     });
     afterEach(function () {
       controller.updateComponent.restore();
+      controller.set('dependentHostComponents', []);
     });
 
     it('No host-components', function () {
@@ -437,15 +445,21 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
       expect(controller.get('multiTaskCounter')).to.equal(0);
       expect(controller.updateComponent.called).to.be.false;
     });
-    it('One host-component', function () {
+    it('createComponent should be called for each host-component', function () {
       controller.set('hostComponents', ['COMP1']);
+      controller.set('dependentHostComponents', ['COMP2']);
       controller.set('content.reassignHosts.target', 'host1');
       controller.set('content.reassign.service_id', 'SERVICE1');
 
       controller.installHostComponents();
 
-      expect(controller.get('multiTaskCounter')).to.equal(1);
-      expect(controller.updateComponent.calledWith('COMP1', 'host1', 'SERVICE1', 'Install', 1)).to.be.true;
+      expect(controller.get('multiTaskCounter')).to.equal(2);
+      expect(controller.updateComponent.getCall(0).args).to.be.eql([
+        'COMP1', 'host1', 'SERVICE1', 'Install', 2
+      ]);
+      expect(controller.updateComponent.getCall(1).args).to.be.eql([
+        'COMP2', 'host1', 'SERVICE1', 'Install', 2
+      ]);
     });
   });
 
@@ -473,24 +487,21 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
   });
 
   describe('#loadStep()', function () {
-    var isHaEnabled = true;
-
     beforeEach(function () {
       controller.set('content.reassign.service_id', 'service1');
       sinon.stub(controller, 'onTaskStatusChange', Em.K);
       sinon.stub(controller, 'initializeTasks', Em.K);
-      sinon.stub(App, 'get', function () {
-        return isHaEnabled;
-      });
+      sinon.stub(controller, 'setDependentHostComponents');
+      this.mockGet = sinon.stub(App, 'get').returns(true);
     });
     afterEach(function () {
+      controller.setDependentHostComponents.restore();
       controller.onTaskStatusChange.restore();
       controller.initializeTasks.restore();
-      App.get.restore();
+      this.mockGet.restore();
     });
 
     it('reassign component is NameNode and HA enabled', function () {
-      isHaEnabled = true;
       controller.set('content.reassign.component_name', 'NAMENODE');
 
       controller.loadStep();
@@ -498,7 +509,7 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
       expect(controller.get('serviceName')).to.eql(['service1']);
     });
     it('reassign component is NameNode and HA disabled', function () {
-      isHaEnabled = false;
+      this.mockGet.returns(false);
       controller.set('content.reassign.component_name', 'NAMENODE');
 
       controller.loadStep();
@@ -506,7 +517,6 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
       expect(controller.get('serviceName')).to.eql(['service1']);
     });
     it('reassign component is JOBTRACKER and HA enabled', function () {
-      isHaEnabled = true;
       controller.set('content.reassign.component_name', 'JOBTRACKER');
 
       controller.loadStep();
@@ -514,13 +524,18 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
       expect(controller.get('serviceName')).to.eql(['service1']);
     });
     it('reassign component is RESOURCEMANAGER and HA enabled', function () {
-      isHaEnabled = true;
       controller.set('content.reassign.component_name', 'RESOURCEMANAGER');
 
       controller.loadStep();
       expect(controller.get('hostComponents')).to.eql(['RESOURCEMANAGER']);
       expect(controller.get('serviceName')).to.eql(['service1']);
     });
+    it('setDependentHostComponents should be called', function () {
+      controller.set('content.reassign.component_name', 'RESOURCEMANAGER');
+
+      controller.loadStep();
+      expect(controller.setDependentHostComponents.calledOnce).to.be.true;
+    });
   });
 
   describe('#saveConfigsToServer()', function () {
@@ -1065,4 +1080,35 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
       });
     });
   });
+
+  describe('#setDependentHostComponents', function() {
+    beforeEach(function() {
+      sinon.stub(App.Host, 'find').returns(Em.Object.create({
+        hostComponents: [
+          Em.Object.create({
+            componentName: 'C1'
+          })
+        ]
+      }));
+      sinon.stub(App.StackServiceComponent, 'find').returns(Em.Object.create({
+        dependencies: [
+          Em.Object.create({
+            componentName: 'C1'
+          }),
+          Em.Object.create({
+            componentName: 'C2'
+          })
+        ]
+      }));
+    });
+    afterEach(function() {
+      App.Host.find.restore();
+      App.StackServiceComponent.find.restore();
+    });
+
+    it('should set dependentHostComponents', function() {
+      controller.setDependentHostComponents();
+      expect(controller.get('dependentHostComponents')).to.be.eql(['C2']);
+    });
+  });
 });


[31/50] [abbrv] ambari git commit: AMBARI-19681: Credential store should add hadoop credential provider path property to all affected configuration types

Posted by nc...@apache.org.
AMBARI-19681: Credential store should add hadoop credential provider path property to all affected configuration types


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f4f7571d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f4f7571d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f4f7571d

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f4f7571d8f9a7709f030cd2587052492e40e8527
Parents: d77f3a5
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Tue Jan 24 09:53:58 2017 -0800
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Tue Jan 24 12:34:25 2017 -0800

----------------------------------------------------------------------
 .../ambari_agent/CustomServiceOrchestrator.py   |  8 ++----
 .../libraries/functions/security_commons.py     | 29 +++++++++-----------
 2 files changed, 15 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f4f7571d/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index 02f4212..8f1848c 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -266,7 +266,6 @@ class CustomServiceOrchestrator():
     serviceName = commandJson['serviceName']
 
     # Gather the password values and remove them from the configuration
-    provider_paths = [] # A service may depend on multiple configs
     configtype_credentials = self.getConfigTypeCredentials(commandJson)
     for config_type, credentials in configtype_credentials.items():
       config = commandJson['configurations'][config_type]
@@ -274,7 +273,6 @@ class CustomServiceOrchestrator():
       if os.path.exists(file_path):
         os.remove(file_path)
       provider_path = 'jceks://file{file_path}'.format(file_path=file_path)
-      provider_paths.append(provider_path)
       logger.info('provider_path={0}'.format(provider_path))
       for alias, pwd in credentials.items():
         logger.debug("config={0}".format(config))
@@ -286,10 +284,8 @@ class CustomServiceOrchestrator():
         cmd_result = subprocess.call(cmd)
         logger.info('cmd_result = {0}'.format(cmd_result))
         os.chmod(file_path, 0644) # group and others should have read access so that the service user can read
-
-    if provider_paths:
-      # Add JCEKS provider paths instead
-      config[self.CREDENTIAL_PROVIDER_PROPERTY_NAME] = ','.join(provider_paths)
+      # Add JCEKS provider path instead
+      config[self.CREDENTIAL_PROVIDER_PROPERTY_NAME] = provider_path
 
     return cmd_result
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f4f7571d/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py b/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
index cca244d..96d60da 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
@@ -46,22 +46,19 @@ def update_credential_provider_path(config, config_type, dest_provider_path, fil
   """
   # Get the path to the provider <config_type>.jceks
   if HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME in config:
-    provider_paths = config[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME].split(',')
-    for path_index in range(len(provider_paths)):
-      provider_path = provider_paths[path_index]
-      if config_type == os.path.splitext(os.path.basename(provider_path))[0]:
-        src_provider_path = provider_path[len('jceks://file'):]
-        File(dest_provider_path,
-             owner = file_owner,
-             group = file_group,
-             mode = 0640,
-             content = StaticFile(src_provider_path)
-             )
-        provider_paths[path_index] = 'jceks://file{0}'.format(dest_provider_path)
-        # make a copy of the config dictionary since it is read-only
-        config_copy = config.copy()
-        config_copy[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = ','.join(provider_paths)
-        return config_copy
+    provider_path = config[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME]
+    src_provider_path = provider_path[len('jceks://file'):]
+    File(dest_provider_path,
+        owner = file_owner,
+        group = file_group,
+        mode = 0640,
+        content = StaticFile(src_provider_path)
+    )
+    # make a copy of the config dictionary since it is read-only
+    config_copy = config.copy()
+    # overwrite the provider path with the path specified
+    config_copy[HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME] = 'jceks://file{0}'.format(dest_provider_path)
+    return config_copy
   return config
 
 def validate_security_config_properties(params, configuration_rules):


[12/50] [abbrv] ambari git commit: AMBARI-19640. Hue - Ambari Migration is not working in Kerberized envirionment. (Ishan Bhatt via gauravn7)

Posted by nc...@apache.org.
AMBARI-19640. Hue - Ambari Migration is not working in Kerberized envirionment. (Ishan Bhatt via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c32eb48e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c32eb48e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c32eb48e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c32eb48ed8421d0445ae7cace9421b9eb1bcc41a
Parents: aaa9931
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Mon Jan 23 19:38:01 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Mon Jan 23 19:38:01 2017 +0530

----------------------------------------------------------------------
 ...HiveHistoryQueryMigrationImplementation.java | 24 ++++++++++++++++----
 .../HiveSavedQueryMigrationImplementation.java  | 24 ++++++++++++++++----
 .../pigjob/PigJobMigrationImplementation.java   | 23 +++++++++++++++----
 .../PigScriptMigrationImplementation.java       | 24 ++++++++++++++++----
 .../pigudf/PigUdfMigrationImplementation.java   | 24 ++++++++++++++++----
 5 files changed, 95 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c32eb48e/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
index e827b09..f154b39 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
@@ -50,6 +50,7 @@ import java.net.URI;
 public class HiveHistoryQueryMigrationImplementation {
 
   static final Logger logger = Logger.getLogger(HiveHistoryQueryMigrationImplementation.class);
+  final String USER_DIRECTORY = "/user";
 
   public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
 
@@ -449,10 +450,16 @@ public class HiveHistoryQueryMigrationImplementation {
         public Boolean run() throws Exception {
 
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
-
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return b;
         }
       });
@@ -483,9 +490,16 @@ public class HiveHistoryQueryMigrationImplementation {
 
         public Boolean run() throws Exception {
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return b;
         }
       });
@@ -540,7 +554,7 @@ public class HiveHistoryQueryMigrationImplementation {
           }
           in.close();
           out.close();
-          fileSystem.setOwner(path, username, "hadoop");
+          fileSystem.setOwner(path, username, username);
           fileSystem.close();
           return null;
         }
@@ -600,7 +614,7 @@ public class HiveHistoryQueryMigrationImplementation {
           }
           in.close();
           out.close();
-          fileSystem.setOwner(path, username, "hadoop");
+          fileSystem.setOwner(path, username, username);
           fileSystem.close();
           return null;
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c32eb48e/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
index 584978a..c08455d 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
@@ -52,6 +52,7 @@ import java.net.URI;
 public class HiveSavedQueryMigrationImplementation {
 
   static final Logger logger = Logger.getLogger(HiveSavedQueryMigrationImplementation.class);
+  final String USER_DIRECTORY = "/user";
 
   private static String readAll(Reader rd) throws IOException {
     StringBuilder sb = new StringBuilder();
@@ -607,10 +608,16 @@ public class HiveSavedQueryMigrationImplementation {
         public Boolean run() throws Exception {
 
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
-
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return b;
         }
       });
@@ -640,9 +647,16 @@ public class HiveSavedQueryMigrationImplementation {
 
         public Boolean run() throws Exception {
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return b;
         }
       });
@@ -695,7 +709,7 @@ public class HiveSavedQueryMigrationImplementation {
           }
           in.close();
           out.close();
-          fileSystem.setOwner(path, username, "hadoop");
+          fileSystem.setOwner(path, username, username);
           fileSystem.close();
           return null;
         }
@@ -757,7 +771,7 @@ public class HiveSavedQueryMigrationImplementation {
           }
           in.close();
           out.close();
-          fileSystem.setOwner(path, username, "hadoop");
+          fileSystem.setOwner(path, username, username);
           fileSystem.close();
           return null;
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c32eb48e/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
index ec39d55..1cb0471 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
@@ -56,6 +56,7 @@ import org.apache.ambari.view.huetoambarimigration.migration.configuration.Confi
 public class PigJobMigrationImplementation {
 
   static final Logger logger = Logger.getLogger(PigJobMigrationImplementation.class);
+  final String USER_DIRECTORY = "/user";
 
   private static String readAll(Reader rd) throws IOException {
     StringBuilder sb = new StringBuilder();
@@ -414,9 +415,16 @@ public class PigJobMigrationImplementation {
           conf.set("hadoop.job.ugi", "hdfs");
 
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return null;
         }
       });
@@ -447,9 +455,16 @@ public class PigJobMigrationImplementation {
 
         public Boolean run() throws Exception {
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return b;
         }
       });
@@ -510,7 +525,7 @@ public class PigJobMigrationImplementation {
           }
           in1.close();
           out.close();
-          fileSystemAmbari.setOwner(path, username, "hadoop");
+          fileSystemAmbari.setOwner(path, username, username);
           fileSystemHue.close();
           fileSystemAmbari.close();
           return null;
@@ -580,7 +595,7 @@ public class PigJobMigrationImplementation {
           }
           in1.close();
           out.close();
-          fileSystemAmbari.setOwner(path, username, "hadoop");
+          fileSystemAmbari.setOwner(path, username, username);
           fileSystemHue.close();
           fileSystemAmbari.close();
           return null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/c32eb48e/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
index 0459a30..82461ca 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
@@ -53,6 +53,7 @@ import java.net.URI;
 public class PigScriptMigrationImplementation {
 
   static final Logger logger = Logger.getLogger(PigScriptMigrationImplementation.class);
+  final String USER_DIRECTORY = "/user";
 
   private static String readAll(Reader rd) throws IOException {
     StringBuilder sb = new StringBuilder();
@@ -448,9 +449,16 @@ public class PigScriptMigrationImplementation {
 
         public Boolean run() throws Exception {
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return b;
         }
       });
@@ -481,10 +489,16 @@ public class PigScriptMigrationImplementation {
         public Boolean run() throws Exception {
 
           URI uri = new URI(dir);
-          FileSystem fs = FileSystem.get(uri, conf, username);
-
+          FileSystem fs = FileSystem.get(conf);
           Path src = new Path(dir);
           Boolean b = fs.mkdirs(src);
+
+          String[] subDirs = dir.split("/");
+          String dirPath = USER_DIRECTORY;
+          for(int i=2;i<subDirs.length;i++) {
+            dirPath += "/"+subDirs[i];
+            fs.setOwner(new Path(dirPath), username, username);
+          }
           return b;
         }
       });
@@ -541,7 +555,7 @@ public class PigScriptMigrationImplementation {
           }
           in.close();
           out.close();
-          fileSystem.setOwner(path, username, "hadoop");
+          fileSystem.setOwner(path, username, username);
           fileSystem.close();
           return null;
         }
@@ -600,7 +614,7 @@ public class PigScriptMigrationImplementation {
           }
           in.close();
           out.close();
-          fileSystem.setOwner(path, username, "hadoop");
+          fileSystem.setOwner(path, username, username);
           fileSystem.close();
           return null;
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c32eb48e/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
index ce51bf1..a7728a8 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
@@ -47,6 +47,7 @@ import java.net.URI;
 
 public class PigUdfMigrationImplementation {
     static final Logger logger = Logger.getLogger(PigUdfMigrationImplementation.class);
+    final String USER_DIRECTORY = "/user";
 
     private static String readAll(Reader rd) throws IOException {
         StringBuilder sb = new StringBuilder();
@@ -215,10 +216,16 @@ public class PigUdfMigrationImplementation {
 
                 public Boolean run() throws Exception {
                     URI uri = new URI(dir);
-                    FileSystem fs = FileSystem.get(uri, conf, dir);
+                    FileSystem fs = FileSystem.get(conf);
                     Path src = new Path(dir);
                     Boolean b = fs.mkdirs(src);
-                    fs.setOwner(src,username,"hadoop");
+
+                    String[] subDirs = dir.split("/");
+                    String dirPath = USER_DIRECTORY;
+                    for(int i=2;i<subDirs.length;i++) {
+                        dirPath += "/"+subDirs[i];
+                        fs.setOwner(new Path(dirPath), username, username);
+                    }
                     return b;
                 }
             });
@@ -252,9 +259,16 @@ public class PigUdfMigrationImplementation {
                 public Void run() throws Exception {
 
                     URI uri = new URI(dir);
-                    FileSystem fs = FileSystem.get(uri, conf, username);
+                    FileSystem fs = FileSystem.get(conf);
                     Path src = new Path(dir);
                     fs.mkdirs(src);
+
+                    String[] subDirs = dir.split("/");
+                    String dirPath = USER_DIRECTORY;
+                    for(int i=2;i<subDirs.length;i++) {
+                        dirPath += "/"+subDirs[i];
+                        fs.setOwner(new Path(dirPath), username, username);
+                    }
                     return null;
                 }
             });
@@ -308,7 +322,7 @@ public class PigUdfMigrationImplementation {
                     }
                     in1.close();
                     out.close();
-                    fileSystemAmbari.setOwner(path, username, "hadoop");
+                    fileSystemAmbari.setOwner(path, username, username);
                     fileSystemHue.close();
                     fileSystemAmbari.close();
                     return null;
@@ -371,7 +385,7 @@ public class PigUdfMigrationImplementation {
                     }
                     in1.close();
                     out.close();
-                    fileSystemAmbari.setOwner(path, username, "hadoop");
+                    fileSystemAmbari.setOwner(path, username, username);
                     fileSystemHue.close();
                     fileSystemAmbari.close();
                     return null;


[40/50] [abbrv] ambari git commit: AMBARI-19438 Add enable delta sync property for Ranger (mugdha)

Posted by nc...@apache.org.
AMBARI-19438 Add enable delta sync property for Ranger (mugdha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ff4babbb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ff4babbb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ff4babbb

Branch: refs/heads/branch-dev-patch-upgrade
Commit: ff4babbb776b6f7f53003a499d8677c9d79b5404
Parents: 0de31d6
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Wed Jan 25 12:20:00 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Wed Jan 25 12:24:41 2017 +0530

----------------------------------------------------------------------
 .../0.7.0/configuration/ranger-ugsync-site.xml  | 42 ++++++++++++++++++++
 .../common-services/RANGER/0.7.0/metainfo.xml   |  6 +++
 .../RANGER/0.7.0/themes/theme_version_5.json    | 22 ++++++++++
 3 files changed, 70 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4babbb/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-ugsync-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-ugsync-site.xml b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-ugsync-site.xml
new file mode 100644
index 0000000..775b5ca
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-ugsync-site.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>ranger.usersync.ldap.deltasync</name>
+    <display-name>Incremental Sync</display-name>
+    <value>false</value>
+    <description>Enable Incremental Sync</description>
+    <value-attributes>
+      <type>value-list</type>
+      <overridable>false</overridable>
+      <entries>
+        <entry>
+          <value>true</value>
+          <label>Yes</label>
+        </entry>
+        <entry>
+          <value>false</value>
+          <label>No</label>
+        </entry>
+      </entries>
+      <selection-cardinality>1</selection-cardinality>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4babbb/ambari-server/src/main/resources/common-services/RANGER/0.7.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.7.0/metainfo.xml b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/metainfo.xml
index bc01321..e784056 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.7.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/metainfo.xml
@@ -27,6 +27,12 @@
       <comment>Comprehensive security for Hadoop</comment>
       <extends>common-services/RANGER/0.6.0</extends>
       <version>0.7.0</version>
+      <themes>
+        <theme>
+          <fileName>theme_version_5.json</fileName>
+          <default>true</default>
+        </theme>
+      </themes>
       <configuration-dependencies>
         <config-type>ranger-solr-configuration</config-type>
       </configuration-dependencies>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4babbb/ambari-server/src/main/resources/common-services/RANGER/0.7.0/themes/theme_version_5.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.7.0/themes/theme_version_5.json b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/themes/theme_version_5.json
new file mode 100644
index 0000000..87fccbe
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/themes/theme_version_5.json
@@ -0,0 +1,22 @@
+{
+  "configuration": {
+    "placement": {
+      "configuration-layout": "default",
+      "configs": [
+        {
+          "config": "ranger-ugsync-site/ranger.usersync.ldap.deltasync",
+          "subsection-name": "subsection-ranger-user-row2-col1",
+          "subsection-tab-name": "ldap-common-configs"
+        }
+      ]
+    },
+    "widgets": [
+      {
+        "config": "ranger-ugsync-site/ranger.usersync.ldap.deltasync",
+        "widget": {
+          "type": "toggle"
+        }
+      }
+    ]
+  }
+}
\ No newline at end of file


[24/50] [abbrv] ambari git commit: AMBARI-19686. Add condition for detecting Blueprint call so that 'llap' named queue can be created on 1st BP invocation, if deemed required.

Posted by nc...@apache.org.
AMBARI-19686. Add condition for detecting Blueprint call so that 'llap' named queue can be created on 1st BP invocation, if deemed required.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/796f52a4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/796f52a4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/796f52a4

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 796f52a47acb8a04c4e8a7e225863cd848460ed7
Parents: 9c952c3
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Mon Jan 23 17:46:59 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Mon Jan 23 17:46:59 2017 -0800

----------------------------------------------------------------------
 .../src/main/resources/stacks/HDP/2.5/services/stack_advisor.py  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/796f52a4/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 04ada3e..d2c0459 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -778,7 +778,9 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       # Check if it's 1st invocation after enabling Hive Server Interactive (config: enable_hive_interactive).
       changed_configs_has_enable_hive_int = self.isConfigPropertiesChanged(services, "hive-interactive-env", ['enable_hive_interactive'], False)
       llap_named_queue_selected_in_curr_invocation = None
-      if changed_configs_has_enable_hive_int \
+      # Check if its : 1. 1st invocation from UI ('enable_hive_interactive' in changed-configurations)
+      # OR 2. 1st invocation from BP (services['changed-configurations'] should be empty in this case)
+      if (changed_configs_has_enable_hive_int or  0 == len(services['changed-configurations']))\
         and services['configurations']['hive-interactive-env']['properties']['enable_hive_interactive']:
         if len(leafQueueNames) == 1 or (len(leafQueueNames) == 2 and llap_queue_name in leafQueueNames):
           llap_named_queue_selected_in_curr_invocation = True


[45/50] [abbrv] ambari git commit: AMBARI-19703. "Add Config Property" causes JS-error (onechiporenko)

Posted by nc...@apache.org.
AMBARI-19703. "Add Config Property" causes JS-error (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8642a463
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8642a463
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8642a463

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8642a4634b230755f5c285ed340a7636abe54b8f
Parents: 75bf160
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Wed Jan 25 11:58:52 2017 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Wed Jan 25 15:53:37 2017 +0200

----------------------------------------------------------------------
 .../app/views/common/configs/service_configs_by_category_view.js | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8642a463/ambari-web/app/views/common/configs/service_configs_by_category_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/service_configs_by_category_view.js b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
index 4148197..25f7dfb 100644
--- a/ambari-web/app/views/common/configs/service_configs_by_category_view.js
+++ b/ambari-web/app/views/common/configs/service_configs_by_category_view.js
@@ -664,8 +664,8 @@ App.ServiceConfigsByCategoryView = Em.View.extend(App.UserPref, App.ConfigOverri
           var controller = (App.router.get('currentState.name') == 'configs')
               ? App.router.get('mainServiceInfoConfigsController')
               : App.router.get('wizardStep7Controller');
-          this.get('mainView').onClose();
-          controller.set('filter', event.view.get('serviceConfigObj.name'));
+          controller.set('filter', this.get('serviceConfigObj.name'));
+          this.get('parentView').onClose();
         }
       })
     });


[07/50] [abbrv] ambari git commit: AMBARI-19665. Add basic auth support for Log Search Swagger UI (oleewere)

Posted by nc...@apache.org.
AMBARI-19665. Add basic auth support for Log Search Swagger UI (oleewere)

Change-Id: I369d3e994e178a98a1a925bb32fdb400232f71d4


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b6b43785
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b6b43785
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b6b43785

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b6b43785ea2f335734a647771cb468ccf89da06b
Parents: 9bd8b7f
Author: oleewere <ol...@gmail.com>
Authored: Mon Jan 23 11:32:08 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Mon Jan 23 13:33:54 2017 +0100

----------------------------------------------------------------------
 .../ambari/logsearch/conf/ApiDocConfig.java     |  1 -
 .../src/main/resources/swagger/swagger.html     | 26 +++++++++-----------
 2 files changed, 11 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b6b43785/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
index 0ddad65..86c1edd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
@@ -45,7 +45,6 @@ public class ApiDocConfig {
   public BeanConfig swaggerConfig() throws UnknownHostException {
     BeanConfig beanConfig = new BeanConfig();
     beanConfig.setSchemes(new String[]{"http", "https"});
-    beanConfig.setHost(InetAddress.getLocalHost().getHostAddress() + ":61888"); // TODO: port from property
     beanConfig.setBasePath("/api/v1");
     beanConfig.setTitle("Log Search REST API");
     beanConfig.setDescription("Log aggregation, analysis, and visualization.");

http://git-wip-us.apache.org/repos/asf/ambari/blob/b6b43785/ambari-logsearch/ambari-logsearch-portal/src/main/resources/swagger/swagger.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/swagger/swagger.html b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/swagger/swagger.html
index e1b052a..33346d3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/swagger/swagger.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/swagger/swagger.html
@@ -17,7 +17,7 @@
 -->
 <html>
 <head>
-    <title>Swagger UI</title>
+    <title>Log Search REST API</title>
     <link rel="icon" type="image/png" href="images/favicon-32x32.png" sizes="32x32" />
     <link rel="icon" type="image/png" href="images/favicon-16x16.png" sizes="16x16" />
     <link href='css/typography.css' media='screen' rel='stylesheet' type='text/css'/>
@@ -43,7 +43,7 @@
             if (url && url.length > 1) {
                 url = decodeURIComponent(url[1]);
             } else {
-                var urlPrefix = location.protocol+'//'+location.hostname+(location.port ? ':'+location.port: '');
+                var urlPrefix = location.protocol +'//'+ location.hostname+(location.port ? ':'+location.port: '');
                 url = urlPrefix + "/api/v1/swagger.yaml";
             }
             window.swaggerUi = new SwaggerUi({
@@ -74,21 +74,16 @@
             });
 
             function addApiKeyAuthorization(){
-                var key = encodeURIComponent($('#input_apiKey')[0].value);
-                if(key && key.trim() != "") {
-                    var apiKeyAuth = new SwaggerClient.ApiKeyAuthorization("api_key", key, "query");
-                    window.swaggerUi.api.clientAuthorizations.add("api_key", apiKeyAuth);
-                    log("added key " + key);
+                var username = encodeURIComponent($('#input_username')[0].value);
+                var password = encodeURIComponent($('#input_password')[0].value);
+                if (username && username.trim() != "" && password && password != "") {
+                    var apiKeyAuth = new SwaggerClient.PasswordAuthorization("Authorization", username, password);
+                    window.swaggerUi.api.clientAuthorizations.add("key", apiKeyAuth);
+                    log("added authorization header: " + 'Basic ' + btoa(username + ':' + password));
                 }
             }
 
-            $('#input_apiKey').change(addApiKeyAuthorization);
-
-            // if you have an apiKey you would like to pre-populate on the page for demonstration purposes...
-            /*
-             var apiKey = "myApiKeyXXXX123456789";
-             $('#input_apiKey').val(apiKey);
-             */
+            $('#input_username, #input_password').change(addApiKeyAuthorization);
 
             window.swaggerUi.load();
 
@@ -107,7 +102,8 @@
         <a id="logo" href="http://swagger.io">swagger</a>
         <form id='api_selector'>
             <div class='input'><input placeholder="http://example.com/api" id="input_baseUrl" name="baseUrl" type="text"/></div>
-            <div class='input'><input placeholder="api_key" id="input_apiKey" name="apiKey" type="text"/></div>
+            <div class="input"><input placeholder="username" id="input_username" name="username" type="text" size="10"></div>
+            <div class="input"><input placeholder="password" id="input_password" name="password" type="password" size="10"></div>
             <div class='input'><a id="explore" href="#">Explore</a></div>
         </form>
     </div>


[44/50] [abbrv] ambari git commit: AMBARI-19711 Unable to register HDP version 2.6.0.0 on cluster with HDP 2.5.3.0 (atkach)

Posted by nc...@apache.org.
AMBARI-19711 Unable to register HDP version 2.6.0.0 on cluster with HDP 2.5.3.0 (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/75bf160c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/75bf160c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/75bf160c

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 75bf160ce8dfd2fdd148f0dc7f857336c57a8b81
Parents: 18e58ff
Author: Andrii Tkach <at...@apache.org>
Authored: Wed Jan 25 14:31:16 2017 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Wed Jan 25 14:37:01 2017 +0200

----------------------------------------------------------------------
 .../controllers/stackVersions/StackVersionsCreateCtrl.js       | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/75bf160c/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index 3c16963..b3c27dc 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -284,6 +284,12 @@ angular.module('ambariAdminConsole')
     $scope.editVersionDisabled = true;
     delete $scope.updateObj.href;
     $scope.updateObj.operating_systems = [];
+    angular.forEach($scope.osList, function (os) {
+      os.OperatingSystems.ambari_managed_repositories = !$scope.useRedhatSatellite;
+      if (os.selected) {
+        $scope.updateObj.operating_systems.push(os);
+      }
+    });
 
     var skip = $scope.skipValidation || $scope.useRedhatSatellite;
     return Stack.validateBaseUrls(skip, $scope.osList, $scope.upgradeStack).then(function (invalidUrls) {


[43/50] [abbrv] ambari git commit: AMBARI-19692. LDAP regression in Ambari 2.4: Login alias is not resolved during authentication (oleewere)

Posted by nc...@apache.org.
AMBARI-19692. LDAP regression in Ambari 2.4: Login alias is not resolved during authentication (oleewere)

Change-Id: I91da4344bc8cbfdb4863c973312c75ac21464066


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/18e58ffb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/18e58ffb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/18e58ffb

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 18e58ffb3759819155261f7e3e68491f330ecd02
Parents: dab389d
Author: oleewere <ol...@gmail.com>
Authored: Tue Jan 24 15:24:59 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Wed Jan 25 12:07:36 2017 +0100

----------------------------------------------------------------------
 .../authorization/AmbariLdapAuthenticationProvider.java        | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/18e58ffb/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java
index 552be1e..a35e7eb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariLdapAuthenticationProvider.java
@@ -194,18 +194,18 @@ public class AmbariLdapAuthenticationProvider implements AuthenticationProvider
   }
 
   private Integer getUserId(Authentication authentication) {
-    String userName = authentication.getName();
+    String userName = AuthorizationHelper.resolveLoginAliasToUserName(authentication.getName());
 
     UserEntity userEntity = userDAO.findLdapUserByName(userName);
 
     // lookup is case insensitive, so no need for string comparison
     if (userEntity == null) {
-      LOG.info("user not found ");
+      LOG.info("user not found ('{}')", userName);
       throw new InvalidUsernamePasswordCombinationException();
     }
 
     if (!userEntity.getActive()) {
-      LOG.debug("User account is disabled");
+      LOG.debug("User account is disabled ('{}')", userName);
 
       throw new InvalidUsernamePasswordCombinationException();
     }