You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/02/17 22:06:00 UTC

[01/50] [abbrv] ambari git commit: AMBARI-20003. Not able to preview the workflow xml.(Padma Priya N via gauravn7)

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-12556 341cb1247 -> 36620ba89


AMBARI-20003. Not able to preview the workflow xml.(Padma Priya N via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/45ef011e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/45ef011e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/45ef011e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 45ef011e455d92c23f66c8e9e18bcb8cc0fd742d
Parents: 2ed7158
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Wed Feb 15 17:05:21 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Wed Feb 15 17:05:49 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/sqoop-action.js     | 8 ++++----
 .../src/main/resources/ui/app/domain/actionjob_hanlder.js    | 4 ++--
 .../resources/ui/app/templates/components/sqoop-action.hbs   | 2 +-
 3 files changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/45ef011e/contrib/views/wfmanager/src/main/resources/ui/app/components/sqoop-action.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/sqoop-action.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/sqoop-action.js
index 13eb6e5..2756700 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/sqoop-action.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/sqoop-action.js
@@ -22,7 +22,7 @@ export default Ember.Component.extend({
     if(this.get('isArg')){
       this.set("actionModel.command", undefined);
     }else{
-      this.set("actionModel.args",  Ember.A([]));
+      this.set("actionModel.arg",  Ember.A([]));
     }
   }),
   initialize : function(){
@@ -33,10 +33,10 @@ export default Ember.Component.extend({
     if(this.get('actionModel.jobXml') === undefined){
       this.set("actionModel.jobXml", Ember.A([]));
     }
-    if(this.get('actionModel.args') === undefined && !this.get('actionModel.command')){
-      this.set("actionModel.args", Ember.A([]));
+    if(this.get('actionModel.arg') === undefined && !this.get('actionModel.command')){
+      this.set("actionModel.arg", Ember.A([]));
       this.set('isArg', false);
-    }else if(this.get('actionModel.args') && this.get('actionModel.args').length > 0){
+    }else if(this.get('actionModel.arg') && this.get('actionModel.arg').length > 0){
       this.set('isArg', true);
     }else{
       this.set('isArg', false);

http://git-wip-us.apache.org/repos/asf/ambari/blob/45ef011e/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
index 2a82c24..691cc26 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
@@ -185,13 +185,13 @@ var SqoopActionJobHandler=ActionJobHandler.extend({
       {xml:"job-xml",domain:"jobXml",occurs:"many",domainProperty:"value"},
       {xml:"configuration",customHandler:this.configurationMapper},
       {xml:"command",domain:"command"},
-      {xml:"argument",domain:"args",occurs:"many",domainProperty:"value"},
+      {xml:"arg",domain:"arg",occurs:"many",domainProperty:"value"},
       {xml:"file",domain:"files",occurs:"many",domainProperty:"value"},
       {xml:"archive",domain:"archives",occurs:"many",domainProperty:"value"}
     ];
   },
   validate(nodeDomain){
-    if (Ember.isBlank(nodeDomain.command) && nodeDomain.args.length<1){
+    if (Ember.isBlank(nodeDomain.command) && nodeDomain.arg.length < 1){
       return [{message : "Either command or arguments have to be set."}];
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/45ef011e/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
index fb59149..db69864 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
@@ -27,7 +27,7 @@
     </div>
   </div>
   {{#if isArg}}
-    {{#arg-config args=actionModel.args register="register" title="arg"}}{{/arg-config}}
+    {{#arg-config args=actionModel.arg register="register" title="arg"}}{{/arg-config}}
   {{else}}
     <div class="form-group padding10">
       <label for="inputEmail" class="control-label col-xs-2">Command</label>


[23/50] [abbrv] ambari git commit: AMBARI-20031. Adopt optimal default config values for HBase (Ted Yu via smohanty)

Posted by nc...@apache.org.
AMBARI-20031. Adopt optimal default config values for HBase (Ted Yu via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6d82e215
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6d82e215
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6d82e215

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 6d82e215e22f41012156dc610ee07a10996a34f7
Parents: 4aa0f62
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed Feb 15 21:49:00 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed Feb 15 21:53:46 2017 -0800

----------------------------------------------------------------------
 .../services/HBASE/configuration/hbase-site.xml | 43 ++++++++++++++++++++
 1 file changed, 43 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6d82e215/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml
new file mode 100644
index 0000000..715023b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HBASE/configuration/hbase-site.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+  <property>
+    <name>hbase.regionserver.executor.openregion.threads</name>
+    <value>20</value>
+    <description>The number of threads region server uses to open regions
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hbase.master.namespace.init.timeout</name>
+    <value>2400000</value>
+    <description>The number of milliseconds master waits for hbase:namespace table to be initialized
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hbase.master.wait.on.regionservers.timeout</name>
+    <value>30000</value>
+    <description>The number of milliseconds master waits for region servers to report in
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>


[24/50] [abbrv] ambari git commit: AMBARI-20021 : ambari views : removed exclusion of httpcore and httpclient libraries and added in dependencyManagement to version 4.4 (nitirajrathore)

Posted by nc...@apache.org.
AMBARI-20021 : ambari views : removed exclusion of httpcore and httpclient libraries and added in dependencyManagement to version 4.4 (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f75eebad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f75eebad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f75eebad

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: f75eebadf656bde4ceca469fcd8f05ae641efc78
Parents: 6d82e21
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Thu Feb 16 14:37:23 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Thu Feb 16 14:37:23 2017 +0530

----------------------------------------------------------------------
 contrib/views/commons/pom.xml   |  8 -----
 contrib/views/files/pom.xml     |  8 -----
 contrib/views/hive-next/pom.xml | 26 ---------------
 contrib/views/hive20/pom.xml    | 62 +++++++++++-------------------------
 contrib/views/pig/pom.xml       |  8 -----
 contrib/views/pom.xml           | 11 +++++++
 contrib/views/utils/pom.xml     | 16 ----------
 contrib/views/wfmanager/pom.xml |  8 -----
 8 files changed, 30 insertions(+), 117 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/commons/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/commons/pom.xml b/contrib/views/commons/pom.xml
index 27fed13..74e4aa1 100644
--- a/contrib/views/commons/pom.xml
+++ b/contrib/views/commons/pom.xml
@@ -74,14 +74,6 @@
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-core</artifactId>
         </exclusion>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/files/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/pom.xml b/contrib/views/files/pom.xml
index eaad803..e55de89 100644
--- a/contrib/views/files/pom.xml
+++ b/contrib/views/files/pom.xml
@@ -58,14 +58,6 @@
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-core</artifactId>
         </exclusion>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/hive-next/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/pom.xml b/contrib/views/hive-next/pom.xml
index 09d0329..2c93cc5 100644
--- a/contrib/views/hive-next/pom.xml
+++ b/contrib/views/hive-next/pom.xml
@@ -133,14 +133,6 @@
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-core</artifactId>
         </exclusion>
@@ -183,14 +175,6 @@
           <groupId>tomcat</groupId>
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -207,16 +191,6 @@
       <groupId>org.apache.thrift</groupId>
       <artifactId>libthrift</artifactId>
       <version>0.9.0</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
     <dependency>
       <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/hive20/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/pom.xml b/contrib/views/hive20/pom.xml
index 168ff98..faa995c 100644
--- a/contrib/views/hive20/pom.xml
+++ b/contrib/views/hive20/pom.xml
@@ -134,14 +134,6 @@
           <artifactId>jasper-compiler</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-core</artifactId>
         </exclusion>
@@ -168,33 +160,6 @@
       <version>2.0</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-jdbc</artifactId>
-      <version>${hive-version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-runtime</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>com.fasterxml.jackson.core</groupId>
-          <artifactId>jackson-databind</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-compiler</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
       <groupId>commons-cli</groupId>
       <artifactId>commons-cli</artifactId>
       <version>1.2</version>
@@ -209,14 +174,6 @@
       <artifactId>libthrift</artifactId>
       <version>0.9.0</version>
       <exclusions>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -247,6 +204,25 @@
       <version>2.0.0.0-SNAPSHOT</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-jdbc</artifactId>
+      <version>${hive-version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-runtime</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.fasterxml.jackson.core</groupId>
+          <artifactId>jackson-databind</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>tomcat</groupId>
+          <artifactId>jasper-compiler</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
       <groupId>commons-validator</groupId>
       <artifactId>commons-validator</artifactId>
       <version>1.4.0</version>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/pig/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pig/pom.xml b/contrib/views/pig/pom.xml
index 3b71985..9ee9206 100644
--- a/contrib/views/pig/pom.xml
+++ b/contrib/views/pig/pom.xml
@@ -111,14 +111,6 @@
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-core</artifactId>
         </exclusion>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pom.xml b/contrib/views/pom.xml
index ee26174..958c4b6 100644
--- a/contrib/views/pom.xml
+++ b/contrib/views/pom.xml
@@ -178,6 +178,17 @@
   <dependencyManagement>
     <dependencies>
       <dependency>
+        <groupId>org.apache.httpcomponents</groupId>
+        <artifactId>httpclient</artifactId>
+        <version>4.4</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.httpcomponents</groupId>
+        <artifactId>httpcore</artifactId>
+        <version>4.4</version>
+      </dependency>
+
+      <dependency>
         <groupId>org.glassfish.jersey.containers</groupId>
         <artifactId>jersey-container-servlet</artifactId>
         <version>2.6</version>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/utils/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/utils/pom.xml b/contrib/views/utils/pom.xml
index c045f50..0bf8eb2 100644
--- a/contrib/views/utils/pom.xml
+++ b/contrib/views/utils/pom.xml
@@ -82,14 +82,6 @@
           <artifactId>jasper-runtime</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-core</artifactId>
         </exclusion>
@@ -188,17 +180,9 @@
       <version>${hadoop.version}</version>
       <exclusions>
         <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpclient</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
           <artifactId>jackson-core</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>org.apache.httpcomponents</groupId>
-          <artifactId>httpcore</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f75eebad/contrib/views/wfmanager/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/pom.xml b/contrib/views/wfmanager/pom.xml
index 2585e13..d466823 100644
--- a/contrib/views/wfmanager/pom.xml
+++ b/contrib/views/wfmanager/pom.xml
@@ -90,14 +90,6 @@
 					<artifactId>jasper-runtime</artifactId>
 				</exclusion>
 				<exclusion>
-					<groupId>org.apache.httpcomponents</groupId>
-					<artifactId>httpclient</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>org.apache.httpcomponents</groupId>
-					<artifactId>httpcore</artifactId>
-				</exclusion>
-				<exclusion>
 					<groupId>com.fasterxml.jackson.core</groupId>
 					<artifactId>jackson-core</artifactId>
 				</exclusion>


[03/50] [abbrv] ambari git commit: AMBARI-20006 Log Search should not hide external Solr button even if it's mandatory (mgergely)

Posted by nc...@apache.org.
AMBARI-20006 Log Search should not hide external Solr button even if it's mandatory (mgergely)

Change-Id: If2502004ac32ed048904f73204cc5fd8de6459e3


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2edfefcc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2edfefcc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2edfefcc

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 2edfefccaa6188c87b98219e98c9626061399f7c
Parents: a8c1010
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Feb 15 12:39:15 2017 +0100
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Feb 15 12:39:15 2017 +0100

----------------------------------------------------------------------
 .../org/apache/ambari/logsearch/LogSearch.java  | 51 ++-------------
 .../ambari/logsearch/conf/ApiDocConfig.java     |  1 -
 .../ambari/logsearch/conf/SecurityConfig.java   |  1 -
 .../logsearch/conf/SolrAuditLogPropsConfig.java |  1 -
 .../conf/SolrServiceLogPropsConfig.java         |  3 -
 .../apache/ambari/logsearch/util/WebUtil.java   | 65 ++++++++++++++++++++
 .../LogsearchKRBAuthenticationFilter.java       |  7 +--
 ...LogsearchSecurityContextFormationFilter.java |  1 -
 ...rchUsernamePasswordAuthenticationFilter.java |  2 -
 .../stacks/HDP/2.2/services/stack_advisor.py    |  1 -
 10 files changed, 72 insertions(+), 61 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index 70053d2..b75da0e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -18,18 +18,16 @@
  */
 package org.apache.ambari.logsearch;
 
-import java.io.IOException;
 import java.net.MalformedURLException;
-import java.net.ServerSocket;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.net.URL;
 import java.util.EnumSet;
 
 import org.apache.ambari.logsearch.common.ManageStartEndTime;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.conf.ApplicationConfig;
 import org.apache.ambari.logsearch.util.SSLUtil;
+import org.apache.ambari.logsearch.util.WebUtil;
 import org.apache.ambari.logsearch.web.listener.LogSearchSessionListener;
 import org.apache.commons.lang.StringUtils;
 import org.eclipse.jetty.server.Connector;
@@ -68,7 +66,6 @@ public class LogSearch {
   private static final String HTTPS_PORT = "61889";
   private static final String HTTP_PORT = "61888";
 
-  private static final String WEB_RESOURCE_FOLDER = "webapps/app";
   private static final String ROOT_CONTEXT = "/";
   private static final Integer SESSION_TIMEOUT = 60 * 30;
 
@@ -112,7 +109,7 @@ public class LogSearch {
     if (HTTPS_PROTOCOL.equals(protcolProperty) && SSLUtil.isKeyStoreSpecified()) {
       LOG.info("Building https server...........");
       port = portSpecified ? argv[0] : HTTPS_PORT;
-      checkPort(Integer.parseInt(port));
+      WebUtil.checkPort(Integer.parseInt(port));
       httpConfiguration.addCustomizer(new SecureRequestCustomizer());
       SslContextFactory sslContextFactory = SSLUtil.getSslContextFactory();
       ServerConnector sslConnector = new ServerConnector(server,
@@ -123,7 +120,7 @@ public class LogSearch {
     } else {
       LOG.info("Building http server...........");
       port = portSpecified ? argv[0] : HTTP_PORT;
-      checkPort(Integer.parseInt(port));
+      WebUtil.checkPort(Integer.parseInt(port));
       ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory(httpConfiguration));
       connector.setPort(Integer.parseInt(port));
       server.setConnectors(new Connector[] { connector });
@@ -134,7 +131,7 @@ public class LogSearch {
   }
 
   private WebAppContext createBaseWebappContext() throws MalformedURLException {
-    URI webResourceBase = findWebResourceBase();
+    URI webResourceBase = WebUtil.findWebResourceBase();
     WebAppContext context = new WebAppContext();
     context.setBaseResource(Resource.newResource(webResourceBase));
     context.setContextPath(ROOT_CONTEXT);
@@ -176,44 +173,4 @@ public class LogSearch {
     context.setHandler(resourceHandler);
     return context;
   }
-
-  private URI findWebResourceBase() {
-    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
-        .getResource(WEB_RESOURCE_FOLDER);
-    String errorMessage = "Web Resource Folder " + WEB_RESOURCE_FOLDER + " not found in classpath";
-    if (fileCompleteUrl != null) {
-      try {
-        return fileCompleteUrl.toURI().normalize();
-      } catch (URISyntaxException e) {
-        LOG.error(errorMessage, e);
-        System.exit(1);
-      }
-    } else {
-      LOG.error(errorMessage);
-      System.exit(1);
-    }
-    throw new IllegalStateException(errorMessage);
-  }
-
-  private void checkPort(int port) {
-    ServerSocket serverSocket = null;
-    boolean portBusy = false;
-    try {
-      serverSocket = new ServerSocket(port);
-    } catch (IOException ex) {
-      portBusy = true;
-      LOG.error(ex.getLocalizedMessage() + " PORT :" + port);
-    } finally {
-      if (serverSocket != null) {
-        try {
-          serverSocket.close();
-        } catch (Exception exception) {
-          // ignore
-        }
-      }
-      if (portBusy) {
-        System.exit(1);
-      }
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
index 86c1edd..4a8fdea 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
@@ -25,7 +25,6 @@ import io.swagger.jaxrs.listing.SwaggerSerializers;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
-import java.net.InetAddress;
 import java.net.UnknownHostException;
 
 @Configuration

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
index b15ae43..2f9cba4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
@@ -47,7 +47,6 @@ import org.springframework.security.web.util.matcher.RequestMatcher;
 
 import javax.inject.Inject;
 import javax.inject.Named;
-import javax.servlet.Filter;
 import java.util.List;
 
 import static org.apache.ambari.logsearch.common.LogSearchConstants.LOGSEARCH_SESSION_ID;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java
index ace278f..5981bcc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogPropsConfig.java
@@ -23,7 +23,6 @@ import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Configuration;
 
 import java.util.List;
-import java.util.Map;
 
 @Configuration
 public class SolrAuditLogPropsConfig implements SolrPropsConfig {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java
index 81935d4..482438a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogPropsConfig.java
@@ -21,9 +21,6 @@ package org.apache.ambari.logsearch.conf;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.Configuration;
 
-import java.util.List;
-import java.util.Map;
-
 @Configuration
 public class SolrServiceLogPropsConfig extends SolrConnectionPropsConfig {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/WebUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/WebUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/WebUtil.java
new file mode 100644
index 0000000..36865ad
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/WebUtil.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.util;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class WebUtil {
+  private static final Logger LOG = LoggerFactory.getLogger(WebUtil.class);
+
+  private static final String WEB_RESOURCE_FOLDER = "webapps/app";
+
+  private WebUtil() {
+    throw new UnsupportedOperationException();
+  }
+
+  public static URI findWebResourceBase() {
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(WEB_RESOURCE_FOLDER);
+    String errorMessage = "Web Resource Folder " + WEB_RESOURCE_FOLDER + " not found in classpath";
+    if (fileCompleteUrl != null) {
+      try {
+        return fileCompleteUrl.toURI().normalize();
+      } catch (URISyntaxException e) {
+        LOG.error(errorMessage, e);
+        System.exit(1);
+      }
+    } else {
+      LOG.error(errorMessage);
+      System.exit(1);
+    }
+    throw new IllegalStateException(errorMessage);
+  }
+
+  public static void checkPort(int port) {
+    try (ServerSocket serverSocket = new ServerSocket(port)) {
+    } catch (IOException ex) {
+      LOG.error(ex.getLocalizedMessage() + " PORT :" + port);
+      System.exit(1);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
index fdda542..1b77753 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
@@ -246,8 +246,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
   private Authentication getGrantedAuthority(Authentication authentication) {
     UsernamePasswordAuthenticationToken result = null;
     if (authentication != null && authentication.isAuthenticated()) {
-      final List<GrantedAuthority> grantedAuths = getAuthorities(authentication
-          .getName().toString());
+      final List<GrantedAuthority> grantedAuths = getAuthorities();
       final UserDetails userDetails = new User(authentication.getName()
           .toString(), authentication.getCredentials().toString(), grantedAuths);
       result = new UsernamePasswordAuthenticationToken(userDetails,
@@ -258,7 +257,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
     return authentication;
   }
 
-  private List<GrantedAuthority> getAuthorities(String username) {
+  private List<GrantedAuthority> getAuthorities() {
     final List<GrantedAuthority> grantedAuths = new ArrayList<>();
     grantedAuths.add(new SimpleGrantedAuthority(DEFAULT_USER_ROLE));
     return grantedAuths;
@@ -275,7 +274,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
     org.apache.ambari.logsearch.web.model.User user = new org.apache.ambari.logsearch.web.model.User();
     user.setUsername(username);
     authentication = new UsernamePasswordAuthenticationToken(username,
-        password, getAuthorities(username));
+        password, getAuthorities());
     return authentication;
   }
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
index b427749..fed86e8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
@@ -38,7 +38,6 @@ import org.apache.log4j.Logger;
 import org.springframework.security.authentication.AnonymousAuthenticationToken;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
-import org.springframework.stereotype.Component;
 import org.springframework.web.filter.GenericFilterBean;
 
 public class LogsearchSecurityContextFormationFilter extends GenericFilterBean {

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
index 24ec2d4..e20c0fa 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
@@ -20,13 +20,11 @@ package org.apache.ambari.logsearch.web.filters;
 
 import java.io.IOException;
 
-import javax.inject.Inject;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.log4j.Logger;
-import org.springframework.security.authentication.AuthenticationManager;
 import org.springframework.security.core.AuthenticationException;
 import org.springframework.security.web.authentication.RememberMeServices;
 import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;

http://git-wip-us.apache.org/repos/asf/ambari/blob/2edfefcc/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index 421d3d4..0d7b8b9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -1084,7 +1084,6 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       recommendedMaxShards = 100
       
       putLogsearchCommonEnvProperty('logsearch_use_external_solr', 'true')
-      putLogsearchCommonEnvAttribute('logsearch_use_external_solr', 'visible', 'false')
 
     # recommend number of shard
     putLogsearchAttribute('logsearch.collection.service.logs.numshards', 'minimum', recommendedMinShards)


[14/50] [abbrv] ambari git commit: AMBARI-19829: Several HDFS/YARN widgets on Heatmaps show N/A. (Qin Liu via avijayan)

Posted by nc...@apache.org.
AMBARI-19829: Several HDFS/YARN widgets on Heatmaps show N/A. (Qin Liu via avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dd6fb57d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dd6fb57d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dd6fb57d

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: dd6fb57d1ab26dad7f13a371f187e7330c2f6450
Parents: b36f00e
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Wed Feb 15 08:43:28 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Wed Feb 15 11:06:44 2017 -0800

----------------------------------------------------------------------
 .../common-services/HDFS/2.1.0.2.0/widgets.json | 12 ++---
 .../common-services/HDFS/3.0.0.3.0/widgets.json | 48 ++++++++++----------
 .../YARN/2.1.0.2.0/YARN_widgets.json            | 18 ++++----
 .../YARN/3.0.0.3.0/YARN_widgets.json            | 18 ++++----
 .../stacks/HDP/2.3/services/HDFS/widgets.json   | 48 ++++++++++----------
 .../HDP/2.3/services/YARN/YARN_widgets.json     | 18 ++++----
 6 files changed, 81 insertions(+), 81 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/dd6fb57d/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
index bcfb2cc..39c6c0e 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json
@@ -337,8 +337,8 @@
           "is_visible": true,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_read._rate",
+              "name": "dfs.datanode.BytesRead",
+              "metric_path": "metrics/dfs/datanode/bytes_read",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -346,7 +346,7 @@
           "values": [
             {
               "name": "HDFS Bytes Read",
-              "value": "${dfs.datanode.BytesRead._rate}"
+              "value": "${dfs.datanode.BytesRead}"
             }
           ],
           "properties": {
@@ -361,8 +361,8 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_written._rate",
+              "name": "dfs.datanode.BytesWritten",
+              "metric_path": "metrics/dfs/datanode/bytes_written",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -370,7 +370,7 @@
           "values": [
             {
               "name": "HDFS Bytes Written",
-              "value": "${dfs.datanode.BytesWritten._rate}"
+              "value": "${dfs.datanode.BytesWritten}"
             }
           ],
           "properties": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dd6fb57d/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/widgets.json b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/widgets.json
index 4a645b0..83ec3dc 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/widgets.json
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/widgets.json
@@ -416,8 +416,8 @@
           "is_visible": true,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_read._rate",
+              "name": "dfs.datanode.BytesRead",
+              "metric_path": "metrics/dfs/datanode/bytes_read",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -425,7 +425,7 @@
           "values": [
             {
               "name": "HDFS Bytes Read",
-              "value": "${dfs.datanode.BytesRead._rate}"
+              "value": "${dfs.datanode.BytesRead}"
             }
           ],
           "properties": {
@@ -440,8 +440,8 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_written._rate",
+              "name": "dfs.datanode.BytesWritten",
+              "metric_path": "metrics/dfs/datanode/bytes_written",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -449,7 +449,7 @@
           "values": [
             {
               "name": "HDFS Bytes Written",
-              "value": "${dfs.datanode.BytesWritten._rate}"
+              "value": "${dfs.datanode.BytesWritten}"
             }
           ],
           "properties": {
@@ -537,26 +537,26 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_read._rate",
+              "name": "dfs.datanode.BytesRead",
+              "metric_path": "metrics/dfs/datanode/bytes_read",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.BytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_written._rate",
+              "name": "dfs.datanode.BytesWritten",
+              "metric_path": "metrics/dfs/datanode/bytes_written",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.TotalReadTime._rate",
-              "metric_path": "metrics/dfs/datanode/TotalReadTime._rate",
+              "name": "dfs.datanode.TotalReadTime",
+              "metric_path": "metrics/dfs/datanode/TotalReadTime",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.TotalWriteTime._rate",
-              "metric_path": "metrics/dfs/datanode/TotalWriteTime._rate",
+              "name": "dfs.datanode.TotalWriteTime",
+              "metric_path": "metrics/dfs/datanode/TotalWriteTime",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -564,7 +564,7 @@
           "values": [
             {
               "name": "DataNode Process Disk I/O Utilization",
-              "value": "${((dfs.datanode.BytesRead._rate/dfs.datanode.TotalReadTime._rate)+(dfs.datanode.BytesWritten._rate/dfs.datanode.TotalWriteTime._rate))*50}"
+              "value": "${((dfs.datanode.BytesRead/dfs.datanode.TotalReadTime)+(dfs.datanode.BytesWritten/dfs.datanode.TotalWriteTime))*50}"
             }
           ],
           "properties": {
@@ -579,26 +579,26 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.RemoteBytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/RemoteBytesRead._rate",
+              "name": "dfs.datanode.RemoteBytesRead",
+              "metric_path": "metrics/dfs/datanode/RemoteBytesRead",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.ReadsFromRemoteClient._rate",
-              "metric_path": "metrics/dfs/datanode/reads_from_remote_client._rate",
+              "name": "dfs.datanode.ReadsFromRemoteClient",
+              "metric_path": "metrics/dfs/datanode/reads_from_remote_client",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.RemoteBytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/RemoteBytesWritten._rate",
+              "name": "dfs.datanode.RemoteBytesWritten",
+              "metric_path": "metrics/dfs/datanode/RemoteBytesWritten",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.WritesFromRemoteClient._rate",
-              "metric_path": "metrics/dfs/datanode/writes_from_remote_client._rate",
+              "name": "dfs.datanode.WritesFromRemoteClient",
+              "metric_path": "metrics/dfs/datanode/writes_from_remote_client",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -606,7 +606,7 @@
           "values": [
             {
               "name": "DataNode Process Network I/O Utilization",
-              "value": "${((dfs.datanode.RemoteBytesRead._rate/dfs.datanode.ReadsFromRemoteClient._rate)+(dfs.datanode.RemoteBytesWritten._rate/dfs.datanode.WritesFromRemoteClient._rate))*50}"
+              "value": "${((dfs.datanode.RemoteBytesRead/dfs.datanode.ReadsFromRemoteClient)+(dfs.datanode.RemoteBytesWritten/dfs.datanode.WritesFromRemoteClient))*50}"
             }
           ],
           "properties": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dd6fb57d/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/YARN_widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/YARN_widgets.json b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/YARN_widgets.json
index 4b76a17..df91f9a 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/YARN_widgets.json
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/YARN_widgets.json
@@ -438,20 +438,20 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "yarn.NodeManagerMetrics.ContainersFailed._rate",
-              "metric_path": "metrics/yarn/ContainersFailed._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersFailed",
+              "metric_path": "metrics/yarn/ContainersFailed",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersCompleted._rate",
-              "metric_path": "metrics/yarn/ContainersCompleted._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersCompleted",
+              "metric_path": "metrics/yarn/ContainersCompleted",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersLaunched._rate",
-              "metric_path": "metrics/yarn/ContainersLaunched._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersLaunched",
+              "metric_path": "metrics/yarn/ContainersLaunched",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
@@ -462,8 +462,8 @@
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersKilled._rate",
-              "metric_path": "metrics/yarn/ContainersKilled._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersKilled",
+              "metric_path": "metrics/yarn/ContainersKilled",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
@@ -477,7 +477,7 @@
           "values": [
             {
               "name": "Container Failures",
-              "value": "${(yarn.NodeManagerMetrics.ContainersFailed._rate/(yarn.NodeManagerMetrics.ContainersFailed._rate + yarn.NodeManagerMetrics.ContainersCompleted._rate + yarn.NodeManagerMetrics.ContainersLaunched._rate + yarn.NodeManagerMetrics.ContainersIniting + yarn.NodeManagerMetrics.ContainersKilled._rate + yarn.NodeManagerMetrics.ContainersRunning)) * 100}"
+              "value": "${(yarn.NodeManagerMetrics.ContainersFailed/(yarn.NodeManagerMetrics.ContainersFailed + yarn.NodeManagerMetrics.ContainersCompleted + yarn.NodeManagerMetrics.ContainersLaunched + yarn.NodeManagerMetrics.ContainersIniting + yarn.NodeManagerMetrics.ContainersKilled + yarn.NodeManagerMetrics.ContainersRunning)) * 100}"
             }
           ],
           "properties": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dd6fb57d/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/YARN_widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/YARN_widgets.json b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/YARN_widgets.json
index 782f21d..2bc2f39 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/YARN_widgets.json
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/YARN_widgets.json
@@ -497,20 +497,20 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "yarn.NodeManagerMetrics.ContainersFailed._rate",
-              "metric_path": "metrics/yarn/ContainersFailed._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersFailed",
+              "metric_path": "metrics/yarn/ContainersFailed",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersCompleted._rate",
-              "metric_path": "metrics/yarn/ContainersCompleted._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersCompleted",
+              "metric_path": "metrics/yarn/ContainersCompleted",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersLaunched._rate",
-              "metric_path": "metrics/yarn/ContainersLaunched._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersLaunched",
+              "metric_path": "metrics/yarn/ContainersLaunched",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
@@ -521,8 +521,8 @@
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersKilled._rate",
-              "metric_path": "metrics/yarn/ContainersKilled._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersKilled",
+              "metric_path": "metrics/yarn/ContainersKilled",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
@@ -536,7 +536,7 @@
           "values": [
             {
               "name": "Container Failures",
-              "value": "${(yarn.NodeManagerMetrics.ContainersFailed._rate/(yarn.NodeManagerMetrics.ContainersFailed._rate + yarn.NodeManagerMetrics.ContainersCompleted._rate + yarn.NodeManagerMetrics.ContainersLaunched._rate + yarn.NodeManagerMetrics.ContainersIniting + yarn.NodeManagerMetrics.ContainersKilled._rate + yarn.NodeManagerMetrics.ContainersRunning)) * 100}"
+              "value": "${(yarn.NodeManagerMetrics.ContainersFailed/(yarn.NodeManagerMetrics.ContainersFailed + yarn.NodeManagerMetrics.ContainersCompleted + yarn.NodeManagerMetrics.ContainersLaunched + yarn.NodeManagerMetrics.ContainersIniting + yarn.NodeManagerMetrics.ContainersKilled + yarn.NodeManagerMetrics.ContainersRunning)) * 100}"
             }
           ],
           "properties": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dd6fb57d/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
index 4a645b0..83ec3dc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/widgets.json
@@ -416,8 +416,8 @@
           "is_visible": true,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_read._rate",
+              "name": "dfs.datanode.BytesRead",
+              "metric_path": "metrics/dfs/datanode/bytes_read",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -425,7 +425,7 @@
           "values": [
             {
               "name": "HDFS Bytes Read",
-              "value": "${dfs.datanode.BytesRead._rate}"
+              "value": "${dfs.datanode.BytesRead}"
             }
           ],
           "properties": {
@@ -440,8 +440,8 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_written._rate",
+              "name": "dfs.datanode.BytesWritten",
+              "metric_path": "metrics/dfs/datanode/bytes_written",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -449,7 +449,7 @@
           "values": [
             {
               "name": "HDFS Bytes Written",
-              "value": "${dfs.datanode.BytesWritten._rate}"
+              "value": "${dfs.datanode.BytesWritten}"
             }
           ],
           "properties": {
@@ -537,26 +537,26 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.BytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_read._rate",
+              "name": "dfs.datanode.BytesRead",
+              "metric_path": "metrics/dfs/datanode/bytes_read",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.BytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/bytes_written._rate",
+              "name": "dfs.datanode.BytesWritten",
+              "metric_path": "metrics/dfs/datanode/bytes_written",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.TotalReadTime._rate",
-              "metric_path": "metrics/dfs/datanode/TotalReadTime._rate",
+              "name": "dfs.datanode.TotalReadTime",
+              "metric_path": "metrics/dfs/datanode/TotalReadTime",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.TotalWriteTime._rate",
-              "metric_path": "metrics/dfs/datanode/TotalWriteTime._rate",
+              "name": "dfs.datanode.TotalWriteTime",
+              "metric_path": "metrics/dfs/datanode/TotalWriteTime",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -564,7 +564,7 @@
           "values": [
             {
               "name": "DataNode Process Disk I/O Utilization",
-              "value": "${((dfs.datanode.BytesRead._rate/dfs.datanode.TotalReadTime._rate)+(dfs.datanode.BytesWritten._rate/dfs.datanode.TotalWriteTime._rate))*50}"
+              "value": "${((dfs.datanode.BytesRead/dfs.datanode.TotalReadTime)+(dfs.datanode.BytesWritten/dfs.datanode.TotalWriteTime))*50}"
             }
           ],
           "properties": {
@@ -579,26 +579,26 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "dfs.datanode.RemoteBytesRead._rate",
-              "metric_path": "metrics/dfs/datanode/RemoteBytesRead._rate",
+              "name": "dfs.datanode.RemoteBytesRead",
+              "metric_path": "metrics/dfs/datanode/RemoteBytesRead",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.ReadsFromRemoteClient._rate",
-              "metric_path": "metrics/dfs/datanode/reads_from_remote_client._rate",
+              "name": "dfs.datanode.ReadsFromRemoteClient",
+              "metric_path": "metrics/dfs/datanode/reads_from_remote_client",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.RemoteBytesWritten._rate",
-              "metric_path": "metrics/dfs/datanode/RemoteBytesWritten._rate",
+              "name": "dfs.datanode.RemoteBytesWritten",
+              "metric_path": "metrics/dfs/datanode/RemoteBytesWritten",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             },
             {
-              "name": "dfs.datanode.WritesFromRemoteClient._rate",
-              "metric_path": "metrics/dfs/datanode/writes_from_remote_client._rate",
+              "name": "dfs.datanode.WritesFromRemoteClient",
+              "metric_path": "metrics/dfs/datanode/writes_from_remote_client",
               "service_name": "HDFS",
               "component_name": "DATANODE"
             }
@@ -606,7 +606,7 @@
           "values": [
             {
               "name": "DataNode Process Network I/O Utilization",
-              "value": "${((dfs.datanode.RemoteBytesRead._rate/dfs.datanode.ReadsFromRemoteClient._rate)+(dfs.datanode.RemoteBytesWritten._rate/dfs.datanode.WritesFromRemoteClient._rate))*50}"
+              "value": "${((dfs.datanode.RemoteBytesRead/dfs.datanode.ReadsFromRemoteClient)+(dfs.datanode.RemoteBytesWritten/dfs.datanode.WritesFromRemoteClient))*50}"
             }
           ],
           "properties": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/dd6fb57d/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/YARN_widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/YARN_widgets.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/YARN_widgets.json
index 782f21d..2bc2f39 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/YARN_widgets.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/YARN_widgets.json
@@ -497,20 +497,20 @@
           "is_visible": false,
           "metrics": [
             {
-              "name": "yarn.NodeManagerMetrics.ContainersFailed._rate",
-              "metric_path": "metrics/yarn/ContainersFailed._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersFailed",
+              "metric_path": "metrics/yarn/ContainersFailed",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersCompleted._rate",
-              "metric_path": "metrics/yarn/ContainersCompleted._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersCompleted",
+              "metric_path": "metrics/yarn/ContainersCompleted",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersLaunched._rate",
-              "metric_path": "metrics/yarn/ContainersLaunched._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersLaunched",
+              "metric_path": "metrics/yarn/ContainersLaunched",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
@@ -521,8 +521,8 @@
               "component_name": "NODEMANAGER"
             },
             {
-              "name": "yarn.NodeManagerMetrics.ContainersKilled._rate",
-              "metric_path": "metrics/yarn/ContainersKilled._rate",
+              "name": "yarn.NodeManagerMetrics.ContainersKilled",
+              "metric_path": "metrics/yarn/ContainersKilled",
               "service_name": "YARN",
               "component_name": "NODEMANAGER"
             },
@@ -536,7 +536,7 @@
           "values": [
             {
               "name": "Container Failures",
-              "value": "${(yarn.NodeManagerMetrics.ContainersFailed._rate/(yarn.NodeManagerMetrics.ContainersFailed._rate + yarn.NodeManagerMetrics.ContainersCompleted._rate + yarn.NodeManagerMetrics.ContainersLaunched._rate + yarn.NodeManagerMetrics.ContainersIniting + yarn.NodeManagerMetrics.ContainersKilled._rate + yarn.NodeManagerMetrics.ContainersRunning)) * 100}"
+              "value": "${(yarn.NodeManagerMetrics.ContainersFailed/(yarn.NodeManagerMetrics.ContainersFailed + yarn.NodeManagerMetrics.ContainersCompleted + yarn.NodeManagerMetrics.ContainersLaunched + yarn.NodeManagerMetrics.ContainersIniting + yarn.NodeManagerMetrics.ContainersKilled + yarn.NodeManagerMetrics.ContainersRunning)) * 100}"
             }
           ],
           "properties": {


[47/50] [abbrv] ambari git commit: AMBARI-20058 - 'Final' is not shown correctly in config diff tool and in older versions (rzang)

Posted by nc...@apache.org.
AMBARI-20058 - 'Final' is not shown correctly in config diff tool and in older versions (rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8dabd55a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8dabd55a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8dabd55a

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 8dabd55a448120336309039a6eb9fae11295c6a0
Parents: db51ba4
Author: Richard Zang <rz...@apache.org>
Authored: Fri Feb 17 11:54:23 2017 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Fri Feb 17 11:54:23 2017 -0800

----------------------------------------------------------------------
 .../app/templates/common/configs/service_config_category.hbs     | 4 +---
 ambari-web/app/views/common/configs/controls_view.js             | 2 +-
 2 files changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8dabd55a/ambari-web/app/templates/common/configs/service_config_category.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/common/configs/service_config_category.hbs b/ambari-web/app/templates/common/configs/service_config_category.hbs
index fbcb828..c9bedc5 100644
--- a/ambari-web/app/templates/common/configs/service_config_category.hbs
+++ b/ambari-web/app/templates/common/configs/service_config_category.hbs
@@ -73,9 +73,7 @@
                         &nbsp;{{t services.service.config.configHistory.configGroup}}</span>
                     {{/if}}
                   {{/if}}
-                  {{#if controller.canEdit}}
-                    {{view App.ControlsView serviceConfigPropertyBinding="this"}}
-                  {{/if}}
+                  {{view App.ControlsView serviceConfigPropertyBinding="this"}}
                   {{#if errorMessage}}
                     <span class="help-block validation-block">{{errorMessage}}</span>
                   {{/if}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8dabd55a/ambari-web/app/views/common/configs/controls_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/controls_view.js b/ambari-web/app/views/common/configs/controls_view.js
index 8894ea6..e9464a3 100644
--- a/ambari-web/app/views/common/configs/controls_view.js
+++ b/ambari-web/app/views/common/configs/controls_view.js
@@ -35,7 +35,7 @@ App.ControlsView = Ember.View.extend({
 
 	showRemove: Em.computed.and('showActions', 'serviceConfigProperty.isEditable', 'serviceConfigProperty.isRemovable'),
 
-	showOverride: Em.computed.and('showActions', 'serviceConfigProperty.isPropertyOverridable'),
+	showOverride: Em.computed.and('showActions', 'serviceConfigProperty.isPropertyOverridable', 'controller.canEdit'),
 
 	showUndo: Em.computed.and('showActions', 'serviceConfigProperty.isEditable', '!serviceConfigProperty.cantBeUndone', 'serviceConfigProperty.isNotDefaultValue'),
 


[39/50] [abbrv] ambari git commit: AMBARI-19845 Secure Ranger passwords in Ambari Stacks (mugdha)

Posted by nc...@apache.org.
AMBARI-19845 Secure Ranger passwords in Ambari Stacks (mugdha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c395f694
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c395f694
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c395f694

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: c395f6948a1aa8bb62f65b3b7a1fe4c72f662762
Parents: 05c76ed
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Fri Feb 17 15:53:43 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Fri Feb 17 16:19:13 2017 +0530

----------------------------------------------------------------------
 .../libraries/functions/constants.py            |  1 +
 .../functions/setup_ranger_plugin_xml.py        | 23 ++++++-
 .../RANGER/0.4.0/package/scripts/params.py      | 18 ++++++
 .../0.4.0/package/scripts/setup_ranger_xml.py   | 67 ++++++++++++++++++--
 .../0.5.0/configuration/ranger-admin-site.xml   | 12 ++++
 .../0.7.0/configuration/ranger-admin-site.xml   | 31 +++++++++
 .../RANGER_KMS/0.5.0.2.3/package/scripts/kms.py | 29 ++++++++-
 .../0.5.0.2.3/package/scripts/params.py         |  4 ++
 .../HDP/2.0.6/properties/stack_features.json    |  5 ++
 .../stacks/2.5/RANGER/test_ranger_admin.py      | 16 ++++-
 .../stacks/2.5/RANGER/test_ranger_usersync.py   |  8 ++-
 .../stacks/2.5/RANGER_KMS/test_kms_server.py    | 50 +++++++++++++--
 .../stacks/2.6/RANGER/test_ranger_admin.py      | 40 +++++++++++-
 .../stacks/2.6/RANGER/test_ranger_tagsync.py    | 19 ++++--
 .../2.6/configs/ranger-admin-default.json       |  6 +-
 15 files changed, 302 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 8fd5c8d..c31b883 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -116,3 +116,4 @@ class StackFeature:
   ATLAS_INSTALL_HOOK_PACKAGE_SUPPORT="atlas_install_hook_package_support"
   ATLAS_HDFS_SITE_ON_NAMENODE_HA='atlas_hdfs_site_on_namenode_ha'
   HIVE_INTERACTIVE_GA_SUPPORT='hive_interactive_ga'
+  SECURE_RANGER_SSL_PASSWORD = "secure_ranger_ssl_password"

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
index a12116d..56c46dd 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/setup_ranger_plugin_xml.py
@@ -131,9 +131,17 @@ def setup_ranger_plugin(component_select_name, service_name, previous_jdbc_jar,
         mode = 0644
       )
 
+    # remove plain-text password from xml configs
+    plugin_audit_password_property = 'xasecure.audit.destination.db.password'
+    plugin_audit_properties_copy = {}
+    plugin_audit_properties_copy.update(plugin_audit_properties)
+
+    if plugin_audit_password_property in plugin_audit_properties_copy:
+      plugin_audit_properties_copy[plugin_audit_password_property] = "crypted"
+
     XmlConfig(format('ranger-{service_name}-audit.xml'),
       conf_dir=component_conf_dir,
-      configurations=plugin_audit_properties,
+      configurations=plugin_audit_properties_copy,
       configuration_attributes=plugin_audit_attributes,
       owner = component_user,
       group = component_group,
@@ -147,10 +155,19 @@ def setup_ranger_plugin(component_select_name, service_name, previous_jdbc_jar,
       group = component_group,
       mode=0744)
 
+    # remove plain-text password from xml configs
+    plugin_password_properties = ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']
+    plugin_policymgr_ssl_properties_copy = {}
+    plugin_policymgr_ssl_properties_copy.update(plugin_policymgr_ssl_properties)
+
+    for prop in plugin_password_properties:
+      if prop in plugin_policymgr_ssl_properties_copy:
+        plugin_policymgr_ssl_properties_copy[prop] = "crypted"
+
     if str(service_name).lower() == 'yarn' :
       XmlConfig("ranger-policymgr-ssl-yarn.xml",
         conf_dir=component_conf_dir,
-        configurations=plugin_policymgr_ssl_properties,
+        configurations=plugin_policymgr_ssl_properties_copy,
         configuration_attributes=plugin_policymgr_ssl_attributes,
         owner = component_user,
         group = component_group,
@@ -158,7 +175,7 @@ def setup_ranger_plugin(component_select_name, service_name, previous_jdbc_jar,
     else:
       XmlConfig("ranger-policymgr-ssl.xml",
         conf_dir=component_conf_dir,
-        configurations=plugin_policymgr_ssl_properties,
+        configurations=plugin_policymgr_ssl_properties_copy,
         configuration_attributes=plugin_policymgr_ssl_attributes,
         owner = component_user,
         group = component_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 49cd98b..0fae23e 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -73,6 +73,7 @@ stack_supports_ranger_admin_password_change = check_stack_feature(StackFeature.R
 stack_supports_ranger_setup_db_on_start = check_stack_feature(StackFeature.RANGER_SETUP_DB_ON_START, version_for_stack_feature_checks)
 stack_supports_ranger_tagsync_ssl_xml_support = check_stack_feature(StackFeature.RANGER_TAGSYNC_SSL_XML_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_solr_configs = check_stack_feature(StackFeature.RANGER_SOLR_CONFIG_SUPPORT, version_for_stack_feature_checks)
+stack_supports_secure_ssl_password = check_stack_feature(StackFeature.SECURE_RANGER_SSL_PASSWORD, version_for_stack_feature_checks)
 
 downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
@@ -425,3 +426,20 @@ if is_hbase_ha_enabled:
 if is_namenode_ha_enabled:
   if not is_empty(config['configurations']['ranger-hdfs-plugin-properties']['ranger-hdfs-plugin-enabled']):
     ranger_hdfs_plugin_enabled = config['configurations']['ranger-hdfs-plugin-properties']['ranger-hdfs-plugin-enabled'].lower() == 'yes'
+
+ranger_admin_password_properties = ['ranger.jpa.jdbc.password', 'ranger.jpa.audit.jdbc.password', 'ranger.ldap.bind.password', 'ranger.ldap.ad.bind.password']
+ranger_usersync_password_properties = ['ranger.usersync.ldap.ldapbindpassword']
+ranger_tagsync_password_properties = ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']
+if stack_supports_secure_ssl_password:
+  ranger_admin_password_properties.extend(['ranger.service.https.attrib.keystore.pass', 'ranger.truststore.password'])
+  ranger_usersync_password_properties.extend(['ranger.usersync.keystore.password', 'ranger.usersync.truststore.password'])
+
+ranger_auth_method = config['configurations']['ranger-admin-site']['ranger.authentication.method']
+ranger_ldap_password_alias = default('/configurations/ranger-admin-site/ranger.ldap.binddn.credential.alias', 'ranger.ldap.bind.password')
+ranger_ad_password_alias = default('/configurations/ranger-admin-site/ranger.ldap.ad.binddn.credential.alias', 'ranger.ldap.ad.bind.password')
+ranger_https_keystore_alias = default('/configurations/ranger-admin-site/ranger.service.https.attrib.keystore.credential.alias', 'keyStoreCredentialAlias')
+ranger_truststore_alias = default('/configurations/ranger-admin-site/ranger.truststore.alias', 'trustStoreAlias')
+https_enabled = config['configurations']['ranger-admin-site']['ranger.service.https.attrib.ssl.enabled']
+http_enabled = config['configurations']['ranger-admin-site']['ranger.service.http.enabled']
+https_keystore_password = config['configurations']['ranger-admin-site']['ranger.service.https.attrib.keystore.pass']
+truststore_password = config['configurations']['ranger-admin-site']['ranger.truststore.password']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
index acb5385..b3eb919 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
@@ -191,9 +191,17 @@ def setup_ranger_admin(upgrade_type=None):
     only_if=format("ls {ranger_home}/ews/ranger-admin-services.sh"),
     sudo=True)
 
+  # remove plain-text password from xml configs
+
+  ranger_admin_site_copy = {}
+  ranger_admin_site_copy.update(params.config['configurations']['ranger-admin-site'])
+  for prop in params.ranger_admin_password_properties:
+    if prop in ranger_admin_site_copy:
+      ranger_admin_site_copy[prop] = "_"
+
   XmlConfig("ranger-admin-site.xml",
     conf_dir=ranger_conf,
-    configurations=params.config['configurations']['ranger-admin-site'],
+    configurations=ranger_admin_site_copy,
     configuration_attributes=params.config['configuration_attributes']['ranger-admin-site'],
     owner=params.unix_user,
     group=params.unix_group,
@@ -321,6 +329,36 @@ def do_keystore_setup(upgrade_type=None):
       mode = 0640
     )
 
+  if params.ranger_auth_method.upper() == "LDAP":
+    ranger_credential_helper(params.cred_lib_path, params.ranger_ldap_password_alias, params.ranger_usersync_ldap_ldapbindpassword, params.ranger_credential_provider_path)
+
+    File(params.ranger_credential_provider_path,
+      owner = params.unix_user,
+      group = params.unix_group,
+      mode = 0640
+    )
+
+  if params.ranger_auth_method.upper() == "ACTIVE_DIRECTORY":
+    ranger_credential_helper(params.cred_lib_path, params.ranger_ad_password_alias, params.ranger_usersync_ldap_ldapbindpassword, params.ranger_credential_provider_path)
+
+    File(params.ranger_credential_provider_path,
+      owner = params.unix_user,
+      group = params.unix_group,
+      mode = 0640
+    )
+
+  if params.stack_supports_secure_ssl_password:
+    ranger_credential_helper(params.cred_lib_path, params.ranger_truststore_alias, params.truststore_password, params.ranger_credential_provider_path)
+
+    if params.https_enabled and not params.http_enabled:
+      ranger_credential_helper(params.cred_lib_path, params.ranger_https_keystore_alias, params.https_keystore_password, params.ranger_credential_provider_path)
+
+    File(params.ranger_credential_provider_path,
+      owner = params.unix_user,
+      group = params.unix_group,
+      mode = 0640
+    )
+
 def password_validation(password):
   import params
   if password.strip() == "":
@@ -453,9 +491,16 @@ def setup_usersync(upgrade_type=None):
     dst_file = format('{usersync_home}/conf/log4j.xml')
     Execute(('cp', '-f', src_file, dst_file), sudo=True)
 
+  # remove plain-text password from xml configs
+  ranger_ugsync_site_copy = {}
+  ranger_ugsync_site_copy.update(params.config['configurations']['ranger-ugsync-site'])
+  for prop in params.ranger_usersync_password_properties:
+    if prop in ranger_ugsync_site_copy:
+      ranger_ugsync_site_copy[prop] = "_"
+
   XmlConfig("ranger-ugsync-site.xml",
     conf_dir=ranger_ugsync_conf,
-    configurations=params.config['configurations']['ranger-ugsync-site'],
+    configurations=ranger_ugsync_site_copy,
     configuration_attributes=params.config['configuration_attributes']['ranger-ugsync-site'],
     owner=params.unix_user,
     group=params.unix_group,
@@ -750,9 +795,16 @@ def setup_tagsync_ssl_configs():
             mode=0775,
             create_parents=True)
 
+  # remove plain-text password from xml configs
+  ranger_tagsync_policymgr_ssl_copy = {}
+  ranger_tagsync_policymgr_ssl_copy.update(params.config['configurations']['ranger-tagsync-policymgr-ssl'])
+  for prop in params.ranger_tagsync_password_properties:
+    if prop in ranger_tagsync_policymgr_ssl_copy:
+      ranger_tagsync_policymgr_ssl_copy[prop] = "_"
+
   XmlConfig("ranger-policymgr-ssl.xml",
             conf_dir=params.ranger_tagsync_conf,
-            configurations=params.config['configurations']['ranger-tagsync-policymgr-ssl'],
+            configurations=ranger_tagsync_policymgr_ssl_copy,
             configuration_attributes=params.config['configuration_attributes']['ranger-tagsync-policymgr-ssl'],
             owner=params.unix_user,
             group=params.unix_group,
@@ -767,9 +819,16 @@ def setup_tagsync_ssl_configs():
        mode = 0640
        )
 
+  # remove plain-text password from xml configs
+  atlas_tagsync_ssl_copy = {}
+  atlas_tagsync_ssl_copy.update(params.config['configurations']['atlas-tagsync-ssl'])
+  for prop in params.ranger_tagsync_password_properties:
+    if prop in atlas_tagsync_ssl_copy:
+      atlas_tagsync_ssl_copy[prop] = "_"
+
   XmlConfig("atlas-tagsync-ssl.xml",
             conf_dir=params.ranger_tagsync_conf,
-            configurations=params.config['configurations']['atlas-tagsync-ssl'],
+            configurations=atlas_tagsync_ssl_copy,
             configuration_attributes=params.config['configuration_attributes']['atlas-tagsync-ssl'],
             owner=params.unix_user,
             group=params.unix_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/main/resources/common-services/RANGER/0.5.0/configuration/ranger-admin-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.5.0/configuration/ranger-admin-site.xml b/ambari-server/src/main/resources/common-services/RANGER/0.5.0/configuration/ranger-admin-site.xml
index c52924c..f2e23ce 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.5.0/configuration/ranger-admin-site.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.5.0/configuration/ranger-admin-site.xml
@@ -548,4 +548,16 @@
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>ranger.ldap.binddn.credential.alias</name>
+    <value>ranger.ldap.bind.password</value>
+    <description></description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>ranger.ldap.ad.binddn.credential.alias</name>
+    <value>ranger.ldap.ad.bind.password</value>
+    <description></description>
+    <on-ambari-upgrade add="true"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-admin-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-admin-site.xml b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-admin-site.xml
new file mode 100644
index 0000000..ebf8517
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.7.0/configuration/ranger-admin-site.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>ranger.truststore.alias</name>
+    <value>trustStoreAlias</value>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>ranger.service.https.attrib.keystore.credential.alias</name>
+    <value>keyStoreCredentialAlias</value>
+    <description></description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
index 742cb93..536ba76 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
@@ -271,9 +271,17 @@ def kms(upgrade_type=None):
     if params.stack_support_kms_hsm and params.enable_kms_hsm:
       do_keystore_setup(params.credential_provider_path, params.hms_partition_alias, unicode(params.hms_partition_passwd))
 
+    # remove plain-text password from xml configs
+    dbks_site_copy = {}
+    dbks_site_copy.update(params.config['configurations']['dbks-site'])
+
+    for prop in params.dbks_site_password_properties:
+      if prop in dbks_site_copy:
+        dbks_site_copy[prop] = "_"
+
     XmlConfig("dbks-site.xml",
       conf_dir=params.kms_conf_dir,
-      configurations=params.config['configurations']['dbks-site'],
+      configurations=dbks_site_copy,
       configuration_attributes=params.config['configuration_attributes']['dbks-site'],
       owner=params.kms_user,
       group=params.kms_group,
@@ -421,9 +429,16 @@ def enable_kms_plugin():
       mode = 0644        
     )
 
+    # remove plain-text password from xml configs
+    plugin_audit_properties_copy = {}
+    plugin_audit_properties_copy.update(params.config['configurations']['ranger-kms-audit'])
+
+    if params.plugin_audit_password_property in plugin_audit_properties_copy:
+      plugin_audit_properties_copy[params.plugin_audit_password_property] = "crypted"
+
     XmlConfig("ranger-kms-audit.xml",
       conf_dir=params.kms_conf_dir,
-      configurations=params.config['configurations']['ranger-kms-audit'],
+      configurations=plugin_audit_properties_copy,
       configuration_attributes=params.config['configuration_attributes']['ranger-kms-audit'],
       owner=params.kms_user,
       group=params.kms_group,
@@ -437,9 +452,17 @@ def enable_kms_plugin():
       group=params.kms_group,
       mode=0744)
 
+    # remove plain-text password from xml configs
+    ranger_kms_policymgr_ssl_copy = {}
+    ranger_kms_policymgr_ssl_copy.update(params.config['configurations']['ranger-kms-policymgr-ssl'])
+
+    for prop in params.kms_plugin_password_properties:
+      if prop in ranger_kms_policymgr_ssl_copy:
+        ranger_kms_policymgr_ssl_copy[prop] = "crypted"
+
     XmlConfig("ranger-policymgr-ssl.xml",
       conf_dir=params.kms_conf_dir,
-      configurations=params.config['configurations']['ranger-kms-policymgr-ssl'],
+      configurations=ranger_kms_policymgr_ssl_copy,
       configuration_attributes=params.config['configuration_attributes']['ranger-kms-policymgr-ssl'],
       owner=params.kms_user,
       group=params.kms_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index 05e8881..8473160 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -276,3 +276,7 @@ if security_enabled:
   spengo_keytab = config['configurations']['kms-site']['hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab']
   spnego_principal = config['configurations']['kms-site']['hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal']
   spnego_principal = spnego_principal.replace('_HOST', current_host.lower())
+
+plugin_audit_password_property = 'xasecure.audit.destination.db.password'
+kms_plugin_password_properties = ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']
+dbks_site_password_properties = ['ranger.db.encrypt.key.password', 'ranger.ks.jpa.jdbc.password', 'ranger.ks.hsm.partition.password']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index 0fd1766..5e173b7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -382,6 +382,11 @@
       "name": "hive_interactive_ga",
       "description": "Hive Interactive GA support",
       "min_version": "2.6.0.0"
+    },
+    {
+      "name": "secure_ranger_ssl_password",
+      "description": "Securing Ranger Admin and Usersync SSL and Trustore related passwords in jceks",
+      "min_version": "2.6.0.0"
     }
   ]
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
index 1b5d7ae..0d38876 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
@@ -293,11 +293,17 @@ class TestRangerAdmin(RMFTestCase):
       sudo = True
     )
 
+    ranger_admin_site_copy = {}
+    ranger_admin_site_copy.update(self.getConfig()['configurations']['ranger-admin-site'])
+    for prop in ['ranger.jpa.jdbc.password', 'ranger.jpa.audit.jdbc.password', 'ranger.ldap.bind.password', 'ranger.ldap.ad.bind.password']:
+      if prop in ranger_admin_site_copy:
+        ranger_admin_site_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'ranger-admin-site.xml',
       owner = 'ranger',
       group = 'ranger',
       conf_dir = '/usr/hdp/current/ranger-admin/conf',
-      configurations = self.getConfig()['configurations']['ranger-admin-site'],
+      configurations = ranger_admin_site_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-admin-site'],
       mode = 0644
     )
@@ -443,11 +449,17 @@ class TestRangerAdmin(RMFTestCase):
       sudo = True
     )
 
+    ranger_admin_site_copy = {}
+    ranger_admin_site_copy.update(self.getConfig()['configurations']['ranger-admin-site'])
+    for prop in ['ranger.jpa.jdbc.password', 'ranger.jpa.audit.jdbc.password', 'ranger.ldap.bind.password', 'ranger.ldap.ad.bind.password']:
+      if prop in ranger_admin_site_copy:
+        ranger_admin_site_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'ranger-admin-site.xml',
       owner = 'ranger',
       group = 'ranger',
       conf_dir = '/usr/hdp/current/ranger-admin/conf',
-      configurations = self.getConfig()['configurations']['ranger-admin-site'],
+      configurations = ranger_admin_site_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-admin-site'],
       mode = 0644
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_usersync.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_usersync.py b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_usersync.py
index 22e84fc..3f0d21b 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_usersync.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_usersync.py
@@ -132,11 +132,17 @@ class TestRangerUsersync(RMFTestCase):
       mode = 0644
     )
 
+    ranger_ugsync_site_copy = {}
+    ranger_ugsync_site_copy.update(self.getConfig()['configurations']['ranger-ugsync-site'])
+    for prop in ['ranger.usersync.ldap.ldapbindpassword']:
+      if prop in ranger_ugsync_site_copy:
+        ranger_ugsync_site_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'ranger-ugsync-site.xml',
       owner = 'ranger',
       group = 'ranger',
       conf_dir = '/usr/hdp/current/ranger-usersync/conf',
-      configurations = self.getConfig()['configurations']['ranger-ugsync-site'],
+      configurations = ranger_ugsync_site_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-ugsync-site'],
       mode = 0644
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
index 57f9f34..c2fc270 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
@@ -93,12 +93,18 @@ class TestRangerKMS(RMFTestCase):
       mode = 0644
     )
 
+    plugin_audit_properties_copy = {}
+    plugin_audit_properties_copy.update(self.getConfig()['configurations']['ranger-kms-audit'])
+
+    if 'xasecure.audit.destination.db.password' in plugin_audit_properties_copy:
+      plugin_audit_properties_copy['xasecure.audit.destination.db.password'] = "crypted"
+
     self.assertResourceCalled('XmlConfig', 'ranger-kms-audit.xml',
       mode = 0744,
       owner = 'kms',
       group = 'kms',
       conf_dir = '/usr/hdp/current/ranger-kms/conf',
-      configurations = self.getConfig()['configurations']['ranger-kms-audit'],
+      configurations = plugin_audit_properties_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-kms-audit']
     )
 
@@ -111,12 +117,19 @@ class TestRangerKMS(RMFTestCase):
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-kms-security']
     )
 
+    ranger_kms_policymgr_ssl_copy = {}
+    ranger_kms_policymgr_ssl_copy.update(self.getConfig()['configurations']['ranger-kms-policymgr-ssl'])
+
+    for prop in ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']:
+      if prop in ranger_kms_policymgr_ssl_copy:
+        ranger_kms_policymgr_ssl_copy[prop] = "crypted"
+
     self.assertResourceCalled('XmlConfig', 'ranger-policymgr-ssl.xml',
       mode = 0744,
       owner = 'kms',
       group = 'kms',
       conf_dir = '/usr/hdp/current/ranger-kms/conf',
-      configurations = self.getConfig()['configurations']['ranger-kms-policymgr-ssl'],
+      configurations = ranger_kms_policymgr_ssl_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-kms-policymgr-ssl']
     )
 
@@ -349,12 +362,18 @@ class TestRangerKMS(RMFTestCase):
       mode = 0640
     )
 
+    dbks_site_copy = {}
+    dbks_site_copy.update(self.getConfig()['configurations']['dbks-site'])
+    for prop in ['ranger.db.encrypt.key.password', 'ranger.ks.jpa.jdbc.password', 'ranger.ks.hsm.partition.password']:
+      if prop in dbks_site_copy:
+        dbks_site_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'dbks-site.xml',
       mode=0644,
       owner = 'kms',
       group = 'kms',
       conf_dir = '/usr/hdp/current/ranger-kms/conf',
-      configurations = self.getConfig()['configurations']['dbks-site'],
+      configurations = dbks_site_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['dbks-site']
     )
 
@@ -442,12 +461,18 @@ class TestRangerKMS(RMFTestCase):
       mode = 0644
     )
 
+    plugin_audit_properties_copy = {}
+    plugin_audit_properties_copy.update(self.getConfig()['configurations']['ranger-kms-audit'])
+
+    if 'xasecure.audit.destination.db.password' in plugin_audit_properties_copy:
+      plugin_audit_properties_copy['xasecure.audit.destination.db.password'] = "crypted"
+
     self.assertResourceCalled('XmlConfig', 'ranger-kms-audit.xml',
       mode = 0744,
       owner = 'kms',
       group = 'kms',
       conf_dir = '/usr/hdp/current/ranger-kms/conf',
-      configurations = self.getConfig()['configurations']['ranger-kms-audit'],
+      configurations = plugin_audit_properties_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-kms-audit']
     )
 
@@ -460,12 +485,19 @@ class TestRangerKMS(RMFTestCase):
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-kms-security']
     )
 
+    ranger_kms_policymgr_ssl_copy = {}
+    ranger_kms_policymgr_ssl_copy.update(self.getConfig()['configurations']['ranger-kms-policymgr-ssl'])
+
+    for prop in ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']:
+      if prop in ranger_kms_policymgr_ssl_copy:
+        ranger_kms_policymgr_ssl_copy[prop] = "crypted"
+
     self.assertResourceCalled('XmlConfig', 'ranger-policymgr-ssl.xml',
       mode = 0744,
       owner = 'kms',
       group = 'kms',
       conf_dir = '/usr/hdp/current/ranger-kms/conf',
-      configurations = self.getConfig()['configurations']['ranger-kms-policymgr-ssl'],
+      configurations = ranger_kms_policymgr_ssl_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-kms-policymgr-ssl']
     )
 
@@ -681,12 +713,18 @@ class TestRangerKMS(RMFTestCase):
       mode = 0640
     )
 
+    dbks_site_copy = {}
+    dbks_site_copy.update(self.getConfig()['configurations']['dbks-site'])
+    for prop in ['ranger.db.encrypt.key.password', 'ranger.ks.jpa.jdbc.password', 'ranger.ks.hsm.partition.password']:
+      if prop in dbks_site_copy:
+        dbks_site_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'dbks-site.xml',
       mode=0644,
       owner = 'kms',
       group = 'kms',
       conf_dir = '/usr/hdp/current/ranger-kms/conf',
-      configurations = self.getConfig()['configurations']['dbks-site'],
+      configurations = dbks_site_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['dbks-site']
     )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py b/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
index fb1dd0e..ea3829e 100644
--- a/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
+++ b/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
@@ -336,11 +336,17 @@ class TestRangerAdmin(RMFTestCase):
       sudo = True
     )
 
+    ranger_admin_site_copy = {}
+    ranger_admin_site_copy.update(self.getConfig()['configurations']['ranger-admin-site'])
+    for prop in ['ranger.jpa.jdbc.password', 'ranger.jpa.audit.jdbc.password', 'ranger.ldap.bind.password', 'ranger.ldap.ad.bind.password', 'ranger.service.https.attrib.keystore.pass', 'ranger.truststore.password']:
+      if prop in ranger_admin_site_copy:
+        ranger_admin_site_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'ranger-admin-site.xml',
       owner = 'ranger',
       group = 'ranger',
       conf_dir = '/usr/hdp/current/ranger-admin/conf',
-      configurations = self.getConfig()['configurations']['ranger-admin-site'],
+      configurations = ranger_admin_site_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-admin-site'],
       mode = 0644
     )
@@ -370,6 +376,18 @@ class TestRangerAdmin(RMFTestCase):
       mode = 0640
     )
 
+    self.assertResourceCalled('Execute', ('/usr/jdk64/jdk1.7.0_45/bin/java', '-cp', '/usr/hdp/current/ranger-admin/cred/lib/*', 'org.apache.ranger.credentialapi.buildks', 'create', 'trustStoreAlias', '-value', 'changeit', '-provider', 'jceks://file/etc/ranger/admin/rangeradmin.jceks'),
+      environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+      logoutput=True,
+      sudo = True
+    )
+
+    self.assertResourceCalled('File', '/etc/ranger/admin/rangeradmin.jceks',
+      owner = 'ranger',
+      group = 'ranger',
+      mode = 0640
+    )
+
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'ranger',
       group = 'ranger',
@@ -496,11 +514,17 @@ class TestRangerAdmin(RMFTestCase):
       sudo = True
     )
 
+    ranger_admin_site_copy = {}
+    ranger_admin_site_copy.update(self.getConfig()['configurations']['ranger-admin-site'])
+    for prop in ['ranger.jpa.jdbc.password', 'ranger.jpa.audit.jdbc.password', 'ranger.ldap.bind.password', 'ranger.ldap.ad.bind.password', 'ranger.service.https.attrib.keystore.pass', 'ranger.truststore.password']:
+      if prop in ranger_admin_site_copy:
+        ranger_admin_site_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'ranger-admin-site.xml',
       owner = 'ranger',
       group = 'ranger',
       conf_dir = '/usr/hdp/current/ranger-admin/conf',
-      configurations = self.getConfig()['configurations']['ranger-admin-site'],
+      configurations = ranger_admin_site_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-admin-site'],
       mode = 0644
     )
@@ -530,6 +554,18 @@ class TestRangerAdmin(RMFTestCase):
       mode = 0640
     )
 
+    self.assertResourceCalled('Execute', ('/usr/jdk64/jdk1.7.0_45/bin/java', '-cp', '/usr/hdp/current/ranger-admin/cred/lib/*', 'org.apache.ranger.credentialapi.buildks', 'create', 'trustStoreAlias', '-value', 'changeit', '-provider', 'jceks://file/etc/ranger/admin/rangeradmin.jceks'),
+      environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+      logoutput=True,
+      sudo = True
+    )
+
+    self.assertResourceCalled('File', '/etc/ranger/admin/rangeradmin.jceks',
+      owner = 'ranger',
+      group = 'ranger',
+      mode = 0640
+    )
+
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'ranger',
       group = 'ranger',

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_tagsync.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_tagsync.py b/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_tagsync.py
index bf5128e..0642428 100644
--- a/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_tagsync.py
+++ b/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_tagsync.py
@@ -143,11 +143,18 @@ class TestRangerTagsync(RMFTestCase):
       cd_access = 'a',
     )
 
+    ranger_tagsync_policymgr_ssl_copy = {}
+    ranger_tagsync_policymgr_ssl_copy.update(self.getConfig()['configurations']['ranger-tagsync-policymgr-ssl'])
+    ranger_tagsync_password_properties = ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']
+    for prop in ranger_tagsync_password_properties:
+      if prop in ranger_tagsync_policymgr_ssl_copy:
+        ranger_tagsync_policymgr_ssl_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'ranger-policymgr-ssl.xml',
       owner = 'ranger',
       group = 'ranger',
       conf_dir = '/usr/hdp/current/ranger-tagsync/conf',
-      configurations = self.getConfig()['configurations']['ranger-tagsync-policymgr-ssl'],
+      configurations = ranger_tagsync_policymgr_ssl_copy,
       configuration_attributes = self.getConfig()['configuration_attributes']['ranger-tagsync-policymgr-ssl'],
       mode = 0644,
     )
@@ -188,17 +195,21 @@ class TestRangerTagsync(RMFTestCase):
       mode = 0640,
     )
 
+    atlas_tagsync_ssl_copy = {}
+    atlas_tagsync_ssl_copy.update(self.getConfig()['configurations']['atlas-tagsync-ssl'])
+    for prop in ranger_tagsync_password_properties:
+      if prop in atlas_tagsync_ssl_copy:
+        atlas_tagsync_ssl_copy[prop] = "_"
+
     self.assertResourceCalled('XmlConfig', 'atlas-tagsync-ssl.xml',
       group = 'ranger',
       conf_dir = '/usr/hdp/current/ranger-tagsync/conf',
       mode = 0644,
       configuration_attributes = UnknownConfigurationMock(),
       owner = 'ranger',
-      configurations = self.getConfig()['configurations']['atlas-tagsync-ssl']
+      configurations = atlas_tagsync_ssl_copy
     )
 
-
-
     self.assertResourceCalled('Execute', (u'/usr/jdk64/jdk1.7.0_45/bin/java',
       '-cp',
       u'/usr/hdp/current/ranger-tagsync/lib/*',

http://git-wip-us.apache.org/repos/asf/ambari/blob/c395f694/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
index 2c4815b..abe84ab 100644
--- a/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
+++ b/ambari-server/src/test/python/stacks/2.6/configs/ranger-admin-default.json
@@ -326,7 +326,8 @@
             "ranger.service.http.port": "6080", 
             "ranger.ldap.user.searchfilter": "(uid={0})", 
             "ranger.plugins.atlas.serviceuser": "atlas", 
-            "ranger.truststore.password": "changeit", 
+            "ranger.truststore.password": "changeit",
+            "ranger.truststore.alias": "trustStoreAlias",
             "ranger.ldap.bind.password": "{{ranger_usersync_ldap_ldapbindpassword}}", 
             "ranger.audit.solr.password": "NONE", 
             "ranger.audit.solr.zookeepers": "c6401.ambari.apache.org:2181/infra-solr",
@@ -364,7 +365,8 @@
             "ranger.admin.kerberos.keytab": "", 
             "ranger.admin.kerberos.token.valid.seconds": "30", 
             "ranger.jpa.jdbc.driver": "com.mysql.jdbc.Driver", 
-            "ranger.unixauth.service.port": "5151"
+            "ranger.unixauth.service.port": "5151",
+            "ranger.service.https.attrib.keystore.credential.alias": "keyStoreCredentialAlias"
         }, 
         "ranger-hdfs-policymgr-ssl": {
             "xasecure.policymgr.clientssl.keystore": "/usr/hdp/current/hadoop-client/conf/ranger-plugin-keystore.jks", 


[16/50] [abbrv] ambari git commit: AMBARI-20033. Typecasting to 'long' from earlier 'float', before setting them the following configs for recommendation : (1). 'llap_concurrency', (2). 'llap_concurrency' max value and (3). 'hive.llap.daemon.num.executor

Posted by nc...@apache.org.
AMBARI-20033. Typecasting to 'long' from earlier 'float', before setting them the following configs for recommendation : (1). 'llap_concurrency', (2). 'llap_concurrency' max value and (3). 'hive.llap.daemon.num.executors' max value.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d8c8b4ec
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d8c8b4ec
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d8c8b4ec

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d8c8b4ec4e1a3467279c2b523516175662c03afe
Parents: 0952b8f
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Feb 15 13:32:29 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Feb 15 13:32:29 2017 -0800

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.5/services/stack_advisor.py | 13 ++++++++-----
 1 file changed, 8 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d8c8b4ec/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 4de9a41..52ada52 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -1031,12 +1031,15 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
                                  mem_per_thread_for_llap, normalized_tez_am_container_size))
       if llap_concurrency == 0:
         llap_concurrency = 1
+        Logger.info("DBG: Readjusted 'llap_concurrency' to : 1. Earlier calculated value : 0")
 
       if llap_concurrency * normalized_tez_am_container_size > hive_tez_am_cap_available:
-        llap_concurrency = math.floor(hive_tez_am_cap_available / normalized_tez_am_container_size)
+        llap_concurrency = long(math.floor(hive_tez_am_cap_available / normalized_tez_am_container_size))
+        Logger.info("DBG: Readjusted 'llap_concurrency' to : {0}, as llap_concurrency({1}) * normalized_tez_am_container_size({2}) > hive_tez_am_cap_available({3}))"
+                    .format(llap_concurrency, llap_concurrency, normalized_tez_am_container_size, hive_tez_am_cap_available))
 
         if llap_concurrency <= 0:
-          Logger.warning("Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
+          Logger.warning("DBG: Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
           self.recommendDefaultLlapConfiguration(configurations, services, hosts)
           return
         Logger.info("DBG: Adjusted 'llap_concurrency' : {0}, using following: hive_tez_am_cap_available : {1}, normalized_tez_am_container_size: "
@@ -1062,8 +1065,8 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
                   ": {2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, MAX_CONCURRENT_QUERIES : {4}".format(max_llap_concurreny_limit, max_executors_per_node,
                                                                                                num_llap_nodes_requested, MIN_EXECUTOR_TO_AM_RATIO,
                                                                                                MAX_CONCURRENT_QUERIES))
-    max_llap_concurreny = min(max_llap_concurreny_limit, math.floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
-                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size)))
+    max_llap_concurreny = long(min(max_llap_concurreny_limit, math.floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
+                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size))))
     Logger.info("DBG: Calculated 'max_llap_concurreny' : {0}, using following : max_llap_concurreny_limit : {1}, llap_mem_for_tezAm_and_daemons : "
                   "{2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, mem_per_thread_for_llap : {4}, normalized_tez_am_container_size : "
                   "{5}".format(max_llap_concurreny, max_llap_concurreny_limit, llap_mem_for_tezAm_and_daemons, MIN_EXECUTOR_TO_AM_RATIO,
@@ -1209,7 +1212,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     Logger.info("DBG: Putting num_executors_per_node as {0}".format(num_executors_per_node))
     putHiveInteractiveSiteProperty('hive.llap.daemon.num.executors', num_executors_per_node)
     putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "minimum", 1)
-    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", float(num_executors_per_node_max))
+    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", long(num_executors_per_node_max))
 
     # 'hive.llap.io.threadpool.size' config value is to be set same as value calculated for
     # 'hive.llap.daemon.num.executors' at all times.


[20/50] [abbrv] ambari git commit: AMBARI-20030 - Zeppelin and Atlas Service Checks Are Missing During Express Upgrades (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-20030 - Zeppelin and Atlas Service Checks Are Missing During Express Upgrades (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d4c1ace8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d4c1ace8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d4c1ace8

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d4c1ace80b46deb7982b1da28025927f00a4123e
Parents: 11618bb
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Feb 15 14:01:18 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Feb 15 23:47:37 2017 -0500

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml   | 2 ++
 .../resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml   | 2 ++
 .../resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml   | 2 ++
 3 files changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d4c1ace8/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
index 607f444..f5152a1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
@@ -626,6 +626,8 @@
         <service>HIVE</service>
         <service>SPARK</service>
         <service>SLIDER</service>
+        <service>ATLAS</service>
+        <service>ZEPPELIN</service>
         <service>OOZIE</service>
       </priority>
     </group>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4c1ace8/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index e92b115..62991e6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -810,6 +810,8 @@
         <service>HIVE</service>
         <service>SPARK</service>
         <service>SLIDER</service>
+        <service>ATLAS</service>
+        <service>ZEPPELIN</service>
         <service>OOZIE</service>
       </priority>
     </group>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4c1ace8/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
index 5786695..5347f01 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml
@@ -593,6 +593,8 @@
         <service>HIVE</service>
         <service>SPARK</service>
         <service>SLIDER</service>
+        <service>ATLAS</service>
+        <service>ZEPPELIN</service>
         <service>OOZIE</service>
       </priority>
     </group>


[15/50] [abbrv] ambari git commit: AMBARI-20028 Operations do not show up in the operations list without a refresh (dbuzhor)

Posted by nc...@apache.org.
AMBARI-20028 Operations do not show up in the operations list without a refresh (dbuzhor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0952b8ff
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0952b8ff
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0952b8ff

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 0952b8ff89d871962da19918c00357e8565625c1
Parents: 2db72cd
Author: Denys Buzhor <bd...@hortonworks.com>
Authored: Wed Feb 15 19:36:59 2017 +0200
Committer: Denys Buzhor <bd...@hortonworks.com>
Committed: Wed Feb 15 23:16:29 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/main/service/info/configs.js | 2 +-
 ambari-web/app/mixins/common/track_request_mixin.js     | 3 ++-
 ambari-web/app/routes/main.js                           | 4 ++++
 3 files changed, 7 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0952b8ff/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index c49bfae..d95a2d2 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -235,7 +235,6 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.AddSecurityConfi
    */
   clearStep: function () {
     this.abortRequests();
-    App.router.get('mainController').stopPolling();
     App.set('componentToBeAdded', {});
     App.set('componentToBeDeleted', {});
     this.clearLoadInfo();
@@ -284,6 +283,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.AddSecurityConfi
    */
   loadStep: function () {
     var serviceName = this.get('content.serviceName'), self = this;
+    App.router.get('mainController').stopPolling();
     this.clearStep();
     this.set('dependentServiceNames', (App.StackService.find(serviceName).get('dependentServiceNames') || []).reduce(function(acc, i) {
       acc.push(i);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0952b8ff/ambari-web/app/mixins/common/track_request_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/track_request_mixin.js b/ambari-web/app/mixins/common/track_request_mixin.js
index 07eaf6e..4efcecb 100644
--- a/ambari-web/app/mixins/common/track_request_mixin.js
+++ b/ambari-web/app/mixins/common/track_request_mixin.js
@@ -37,7 +37,8 @@ App.TrackRequestMixin = Em.Mixin.create({
       completed: ['resolved', 'rejected'].contains(request.state())
     });
     request.always(function() {
-      Em.setProperties(self.get('requestsInProgress').findProperty('id', requestId), {
+      var requestInProgress = self.get('requestsInProgress').findProperty('id', requestId) || {};
+      Em.setProperties(requestInProgress, {
         completed: true,
         status: request.state()
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0952b8ff/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index 9ed2dd1..54f36b2 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -248,6 +248,10 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
           router.get('mainController').isLoading.call(router.get('clusterController'), 'isConfigsPropertiesLoaded').done(function () {
             router.get('mainHostDetailsController').connectOutlet('mainHostConfigs');
           });
+        },
+        exitRoute: function (router, context, callback) {
+          router.get('mainController').startPolling();
+          callback();
         }
       }),
 


[38/50] [abbrv] ambari git commit: AMBARI-20043. Don't rerender all widgets when one of them is changed (onechiporenko)

Posted by nc...@apache.org.
AMBARI-20043. Don't rerender all widgets when one of them is changed (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/05c76ed6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/05c76ed6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/05c76ed6

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 05c76ed609dfcf75e0ff211aa8032834ef8e9f73
Parents: 984b35e
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Thu Feb 16 15:25:48 2017 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Fri Feb 17 10:26:47 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   1 +
 ambari-web/app/data/dashboard_widgets.js        | 196 +++++++++
 ambari-web/app/messages.js                      |   2 +-
 .../app/mixins/common/track_request_mixin.js    |   6 +-
 .../mixins/main/dashboard/widgets/editable.js   |  91 +---
 .../dashboard/widgets/editable_with_limit.js    | 106 +----
 .../widgets/single_numeric_threshold.js         | 127 +-----
 .../main/dashboard/edit_widget_popup.hbs        |  20 +-
 .../edit_widget_popup_single_threshold.hbs      |  12 +-
 ambari-web/app/views.js                         |   1 +
 .../modal_popups/edit_dashboard_widget_popup.js | 436 +++++++++++++++++++
 ambari-web/app/views/main/dashboard/widget.js   | 173 ++------
 ambari-web/app/views/main/dashboard/widgets.js  | 266 ++---------
 .../views/main/dashboard/widgets/text_widget.js |  23 +-
 .../edit_dashboard_widget_popup_test.js         | 214 +++++++++
 .../test/views/main/dashboard/widget_test.js    | 112 +----
 .../test/views/main/dashboard/widgets_test.js   |  10 +-
 17 files changed, 968 insertions(+), 828 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index d47d558..05c1657 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -253,6 +253,7 @@ var files = [
   'test/views/common/widget/template_widget_view_test',
   'test/views/common/widget/heatmap_widget_view_test',
   'test/views/common/modal_popups/cluster_check_popup_test',
+  'test/views/common/modal_popups/edit_dashboard_widget_popup_test',
   'test/views/common/modal_popups/hosts_table_list_popup_test',
   'test/views/common/modal_popups/dependent_configs_list_popup_test',
   'test/views/main/admin_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/data/dashboard_widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/dashboard_widgets.js b/ambari-web/app/data/dashboard_widgets.js
new file mode 100644
index 0000000..d58b0e2
--- /dev/null
+++ b/ambari-web/app/data/dashboard_widgets.js
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+module.exports = [
+  {
+    id: 1,
+    viewName: 'NameNodeHeapPieChartView',
+    sourceName: 'HDFS',
+    title: Em.I18n.t('dashboard.widgets.NameNodeHeap'),
+    threshold: [80, 90]
+  },
+  {
+    id: 2,
+    viewName: 'NameNodeCapacityPieChartView',
+    sourceName: 'HDFS',
+    title: Em.I18n.t('dashboard.widgets.HDFSDiskUsage'),
+    threshold: [85, 95]
+  },
+  {
+    id: 3,
+    viewName: 'NameNodeCpuPieChartView',
+    sourceName: 'HDFS',
+    title: Em.I18n.t('dashboard.widgets.NameNodeCpu'),
+    threshold: [90, 95]
+  },
+  {
+    id: 4,
+    viewName: 'DataNodeUpView',
+    sourceName: 'HDFS',
+    title: Em.I18n.t('dashboard.widgets.DataNodeUp'),
+    threshold: [80, 90]
+  },
+  {
+    id: 5,
+    viewName: 'NameNodeRpcView',
+    sourceName: 'HDFS',
+    title: Em.I18n.t('dashboard.widgets.NameNodeRpc'),
+    threshold: [1000, 3000]
+  },
+  {
+    id: 6,
+    viewName: 'ChartClusterMetricsMemoryWidgetView',
+    sourceName: 'HOST_METRICS',
+    title: Em.I18n.t('dashboard.clusterMetrics.memory'),
+    threshold: []
+  },
+  {
+    id: 7,
+    viewName: 'ChartClusterMetricsNetworkWidgetView',
+    sourceName: 'HOST_METRICS',
+    title: Em.I18n.t('dashboard.clusterMetrics.network'),
+    threshold: []
+  },
+  {
+    id: 8,
+    viewName: 'ChartClusterMetricsCPUWidgetView',
+    sourceName: 'HOST_METRICS',
+    title: Em.I18n.t('dashboard.clusterMetrics.cpu'),
+    threshold: []
+  },
+  {
+    id: 9,
+    viewName: 'ChartClusterMetricsLoadWidgetView',
+    sourceName: 'HOST_METRICS',
+    title: Em.I18n.t('dashboard.clusterMetrics.load'),
+    threshold: []
+  },
+  {
+    id: 10,
+    viewName: 'NameNodeUptimeView',
+    sourceName: 'HDFS',
+    title: Em.I18n.t('dashboard.widgets.NameNodeUptime'),
+    threshold: []
+  },
+  {
+    id: 11,
+    viewName: 'HDFSLinksView',
+    sourceName: 'HDFS',
+    title: Em.I18n.t('dashboard.widgets.HDFSLinks'),
+    threshold: []
+  },
+  {
+    id: 12,
+    viewName: 'HBaseLinksView',
+    sourceName: 'HBASE',
+    title: Em.I18n.t('dashboard.widgets.HBaseLinks'),
+    threshold: []
+  },
+  {
+    id: 13,
+    viewName: 'HBaseMasterHeapPieChartView',
+    sourceName: 'HBASE',
+    title: Em.I18n.t('dashboard.widgets.HBaseMasterHeap'),
+    threshold: [70, 90]
+  },
+  {
+    id: 14,
+    viewName: 'HBaseAverageLoadView',
+    sourceName: 'HBASE',
+    title: Em.I18n.t('dashboard.widgets.HBaseAverageLoad'),
+    threshold: [150, 250]
+  },
+  {
+    id: 15,
+    viewName: 'HBaseRegionsInTransitionView',
+    sourceName: 'HBASE',
+    title: Em.I18n.t('dashboard.widgets.HBaseRegionsInTransition'),
+    threshold: [3, 10],
+    isHiddenByDefault: true
+  },
+  {
+    id: 16,
+    viewName: 'HBaseMasterUptimeView',
+    sourceName: 'HBASE',
+    title: Em.I18n.t('dashboard.widgets.HBaseMasterUptime'),
+    threshold: []
+  },
+  {
+    id: 17,
+    viewName: 'ResourceManagerHeapPieChartView',
+    sourceName: 'YARN',
+    title: Em.I18n.t('dashboard.widgets.ResourceManagerHeap'),
+    threshold: [70, 90]
+  },
+  {
+    id: 18,
+    viewName: 'ResourceManagerUptimeView',
+    sourceName: 'YARN',
+    title: Em.I18n.t('dashboard.widgets.ResourceManagerUptime'),
+    threshold: []
+  },
+  {
+    id: 19,
+    viewName: 'NodeManagersLiveView',
+    sourceName: 'YARN',
+    title: Em.I18n.t('dashboard.widgets.NodeManagersLive'),
+    threshold: [50, 75]
+  },
+  {
+    id: 20,
+    viewName: 'YARNMemoryPieChartView',
+    sourceName: 'YARN',
+    title: Em.I18n.t('dashboard.widgets.YARNMemory'),
+    threshold: [50, 75]
+  },
+  {
+    id: 21,
+    viewName: 'SuperVisorUpView',
+    sourceName: 'STORM',
+    title: Em.I18n.t('dashboard.widgets.SuperVisorUp'),
+    threshold: [85, 95]
+  },
+  {
+    id: 22,
+    viewName: 'FlumeAgentUpView',
+    sourceName: 'FLUME',
+    title: Em.I18n.t('dashboard.widgets.FlumeAgentUp'),
+    threshold: [85, 95]
+  },
+  {
+    id: 23,
+    viewName: 'YARNLinksView',
+    sourceName: 'YARN',
+    title: Em.I18n.t('dashboard.widgets.YARNLinks'),
+    threshold: []
+  },
+  {
+    id: 24,
+    viewName: 'HawqSegmentUpView',
+    sourceName: 'HAWQ',
+    title: Em.I18n.t('dashboard.widgets.HawqSegmentUp'),
+    threshold: [75, 90]
+  },
+  {
+    id: 25,
+    viewName: 'PxfUpView',
+    sourceName: 'PXF',
+    title: Em.I18n.t('dashboard.widgets.PxfUp'),
+    threshold: []
+  }
+];
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 81833f3..5d69b53 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2860,7 +2860,7 @@ Em.I18n.translations = {
   'dashboard.widgets.NodeManagersLive': 'NodeManagers Live',
   'dashboard.widgets.YARNMemory': 'YARN Memory',
   'dashboard.widgets.YARNLinks': 'YARN Links',
-  'dashboard.widgets.error.invalid': 'Invalid! Enter a number between 0 - {0}',
+  'dashboard.widgets.error.invalid': 'Invalid! Enter a number between {0} - {1}',
   'dashboard.widgets.error.smaller': 'Threshold 1 should be smaller than threshold 2!',
   'dashboard.widgets.HawqSegmentUp': 'HAWQ Segments Live',
   'dashboard.widgets.PxfUp': 'PXF Agents Live',

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/mixins/common/track_request_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/track_request_mixin.js b/ambari-web/app/mixins/common/track_request_mixin.js
index c665253..dd97b97 100644
--- a/ambari-web/app/mixins/common/track_request_mixin.js
+++ b/ambari-web/app/mixins/common/track_request_mixin.js
@@ -38,14 +38,14 @@ App.TrackRequestMixin = Em.Mixin.create({
     this.get('requestsInProgress').pushObject({
       request: request,
       id: requestId,
-      status: request.state(),
-      completed: ['resolved', 'rejected'].contains(request.state())
+      status: Em.tryInvoke(request, 'state'),
+      completed: ['resolved', 'rejected'].contains(Em.tryInvoke(request, 'state'))
     });
     request.always(function() {
       var requestInProgress = self.get('requestsInProgress').findProperty('id', requestId) || {};
       Em.setProperties(requestInProgress, {
         completed: true,
-        status: request.state()
+        status: Em.tryInvoke(request, 'state')
       });
     });
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/mixins/main/dashboard/widgets/editable.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/dashboard/widgets/editable.js b/ambari-web/app/mixins/main/dashboard/widgets/editable.js
index fcb6bfb..482ff6a 100644
--- a/ambari-web/app/mixins/main/dashboard/widgets/editable.js
+++ b/ambari-web/app/mixins/main/dashboard/widgets/editable.js
@@ -20,93 +20,22 @@ var App = require('app');
 
 App.EditableWidgetMixin = Em.Mixin.create({
 
-  hintInfo: '',
-
   editWidget: function () {
-    var self = this;
-    var configObj = Ember.Object.create({
-      thresholdMin: self.get('thresholdMin') + '',
-      thresholdMax: self.get('thresholdMax') + '',
-      hintInfo: self.get('hintInfo'),
-      isThresh1Error: false,
-      isThresh2Error: false,
-      errorMessage1: "",
-      errorMessage2: "",
-      maxValue: 'infinity',
-      observeNewThresholdValue: function () {
-        var thresholdMin = this.get('thresholdMin');
-        var thresholdMax = this.get('thresholdMax');
-        if (thresholdMin.trim() !== "") {
-          if (isNaN(thresholdMin) || thresholdMin < 0) {
-            this.set('isThresh1Error', true);
-            this.set('errorMessage1', 'Invalid! Enter a number larger than 0');
-          } else if ( this.get('isThresh2Error') === false && parseFloat(thresholdMax)<= parseFloat(thresholdMin)){
-            this.set('isThresh1Error', true);
-            this.set('errorMessage1', 'Threshold 1 should be smaller than threshold 2 !');
-          } else {
-            this.set('isThresh1Error', false);
-            this.set('errorMessage1', '');
-          }
-        } else {
-          this.set('isThresh1Error', true);
-          this.set('errorMessage1', 'This is required');
-        }
-
-        if (thresholdMax.trim() !== "") {
-          if (isNaN(thresholdMax) || thresholdMax < 0) {
-            this.set('isThresh2Error', true);
-            this.set('errorMessage2', 'Invalid! Enter a number larger than 0');
-          } else {
-            this.set('isThresh2Error', false);
-            this.set('errorMessage2', '');
-          }
-        } else {
-          this.set('isThresh2Error', true);
-          this.set('errorMessage2', 'This is required');
-        }
+    return App.EditDashboardWidgetPopup.show({
 
-      }.observes('thresholdMin', 'thresholdMax')
+      widgetView: this,
 
-    });
-
-    App.ModalPopup.show({
-      header: Em.I18n.t('dashboard.widgets.popupHeader'),
-      classNames: [ 'modal-edit-widget'],
-      modalDialogClasses: ['modal-lg'],
-      bodyClass: Ember.View.extend({
-        templateName: require('templates/main/dashboard/edit_widget_popup'),
-        configPropertyObj: configObj
+      sliderHandlersManager: App.EditDashboardWidgetPopup.DoubleHandlers.create({
+        maxValue: 'infinity',
+        thresholdMin: this.get('thresholdMin'),
+        thresholdMax: this.get('thresholdMax')
       }),
-      primary: Em.I18n.t('common.apply'),
-      onPrimary: function () {
-        configObj.observeNewThresholdValue();
-        if (!configObj.isThresh1Error && !configObj.isThresh2Error) {
-
-          var parent = self.get('parentView');
-          var userPreferences = parent.get('userPreferences');
-          userPreferences.threshold[Number(self.get('id'))] = [configObj.get('thresholdMin'), configObj.get('thresholdMax')];
-          parent.saveWidgetsSettings(userPreferences);
-          parent.renderWidgets();
-          this.hide();
-        }
-      },
 
-      didInsertElement: function () {
-        this._super();
-        var colors = [App.healthStatusGreen, App.healthStatusOrange, App.healthStatusRed]; //color green, orange ,red
-        var handlers = [33, 66]; //fixed value
+      sliderDisabled: true,
+      sliderHandlers: [33, 66],
+      sliderMaxValue: 100,
+      sliderColors: [App.healthStatusGreen, App.healthStatusOrange, App.healthStatusRed]
 
-        $("#slider-range").slider({
-          range: true,
-          disabled: true, //handlers cannot move
-          min: 0,
-          max: 100,
-          values: handlers,
-          create: function (event, ui) {
-            self.updateColors(handlers, colors);
-          }
-        });
-      }
     });
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/mixins/main/dashboard/widgets/editable_with_limit.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/dashboard/widgets/editable_with_limit.js b/ambari-web/app/mixins/main/dashboard/widgets/editable_with_limit.js
index ddf2a26..1cb96df 100644
--- a/ambari-web/app/mixins/main/dashboard/widgets/editable_with_limit.js
+++ b/ambari-web/app/mixins/main/dashboard/widgets/editable_with_limit.js
@@ -23,110 +23,18 @@ var App = require('app');
  */
 App.EditableWithLimitWidgetMixin = Em.Mixin.create({
 
-  hintInfo: '',
-
   editWidget: function () {
-    var parent = this;
-    var maxTmp = parseFloat(parent.get('maxValue'));
-    var configObj = Ember.Object.create({
-      thresholdMin: parent.get('thresholdMin') + '',
-      thresholdMax: parent.get('thresholdMax') + '',
-      hintInfo: parent.get('hintInfo'),
-      thresholdMinError: false,
-      thresholdMaxError: false,
-      thresholdMinErrorMessage: '',
-      thresholdMaxErrorMessage: '',
-      maxValue: maxTmp,
-      observeNewThresholdValue: function () {
-        var thresholdMin = this.get('thresholdMin');
-        var thresholdMax = this.get('thresholdMax');
-        if (thresholdMin.trim() !== '') {
-          if (isNaN(thresholdMin) || thresholdMin > maxTmp || thresholdMin < 0){
-            this.set('thresholdMinError', true);
-            this.set('thresholdMinErrorMessage', 'Invalid! Enter a number between 0 - ' + maxTmp);
-          } else if ( this.get('thresholdMaxError') === false && parseFloat(thresholdMax)<= parseFloat(thresholdMin)) {
-            this.set('thresholdMinError', true);
-            this.set('thresholdMinErrorMessage', 'Threshold 1 should be smaller than threshold 2 !');
-          } else {
-            this.set('thresholdMinError', false);
-            this.set('thresholdMinErrorMessage', '');
-          }
-        } else {
-          this.set('thresholdMinError', true);
-          this.set('thresholdMinErrorMessage', 'This is required');
-        }
-
-        if (thresholdMax.trim() !== '') {
-          if (isNaN(thresholdMax) || thresholdMax > maxTmp || thresholdMax < 0) {
-            this.set('thresholdMaxError', true);
-            this.set('thresholdMaxErrorMessage', 'Invalid! Enter a number between 0 - ' + maxTmp);
-          } else {
-            this.set('thresholdMaxError', false);
-            this.set('thresholdMaxErrorMessage', '');
-          }
-        } else {
-          this.set('thresholdMaxError', true);
-          this.set('thresholdMaxErrorMessage', 'This is required');
-        }
-
-        // update the slider handles and color
-        if (!this.get('thresholdMinError') && !this.get('thresholdMaxError')) {
-          $("#slider-range").slider('values', 0 , parseFloat(thresholdMin));
-          $("#slider-range").slider('values', 1 , parseFloat(thresholdMax));
-        }
-      }.observes('thresholdMin', 'thresholdMax')
-
-    });
 
-    App.ModalPopup.show({
-      header: Em.I18n.t('dashboard.widgets.popupHeader'),
-      classNames: ['modal-edit-widget'],
-      modalDialogClasses: ['modal-lg'],
-      bodyClass: Ember.View.extend({
-        templateName: require('templates/main/dashboard/edit_widget_popup'),
-        configPropertyObj: configObj
-      }),
-      primary: Em.I18n.t('common.apply'),
-      onPrimary: function () {
-        configObj.observeNewThresholdValue();
-        if (!configObj.thresholdMinError && !configObj.thresholdMaxError) {
-          parent.set('thresholdMin', parseFloat(configObj.get('thresholdMin')) );
-          parent.set('thresholdMax', parseFloat(configObj.get('thresholdMax')) );
-          if (!App.get('testMode')) {
-            var bigParent = parent.get('parentView');
-            bigParent.getUserPref(bigParent.get('persistKey'));
-            var oldValue = bigParent.get('currentPrefObject');
-            oldValue.threshold[parseInt(parent.id, 10)] = [configObj.get('thresholdMin'), configObj.get('thresholdMax')];
-            bigParent.postUserPref(bigParent.get('persistKey'),oldValue);
-          }
-          this.hide();
-        }
-      },
+    return App.EditDashboardWidgetPopup.show({
 
-      didInsertElement: function () {
-        this._super();
-        var handlers = [configObj.get('thresholdMin'), configObj.get('thresholdMax')];
-        var colors = [App.healthStatusRed, App.healthStatusOrange, App.healthStatusGreen]; //color red, orange, green
+      widgetView: this,
 
-        $("#slider-range").slider({
-          range: true,
-          min: 0,
-          max: maxTmp,
-          values: handlers,
-          create: function () {
-            parent.updateColors(handlers, colors);
-          },
-          slide: function (event, ui) {
-            parent.updateColors(ui.values, colors);
-            configObj.set('thresholdMin', ui.values[0] + '');
-            configObj.set('thresholdMax', ui.values[1] + '');
-          },
-          change: function (event, ui) {
-            parent.updateColors(ui.values, colors);
-          }
-        });
+      sliderHandlersManager: App.EditDashboardWidgetPopup.DoubleHandlers.create({
+        maxValue: parseFloat(this.get('maxValue')),
+        thresholdMin: this.get('thresholdMin'),
+        thresholdMax: this.get('thresholdMax')
+      })
 
-      }
     });
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/mixins/main/dashboard/widgets/single_numeric_threshold.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/dashboard/widgets/single_numeric_threshold.js b/ambari-web/app/mixins/main/dashboard/widgets/single_numeric_threshold.js
index 06bcffe..36172b7 100644
--- a/ambari-web/app/mixins/main/dashboard/widgets/single_numeric_threshold.js
+++ b/ambari-web/app/mixins/main/dashboard/widgets/single_numeric_threshold.js
@@ -23,126 +23,25 @@ var App = require('app');
  */
 App.SingleNumericThresholdMixin = Em.Mixin.create({
 
-  /**
-   * @type {Em.Object}
-   * @class
-   */
-  widgetConfig: Ember.Object.extend({
-    thresholdMin: '',
-    hintInfo: '',
-    isThresh1Error: false,
-    errorMessage1: "",
-
-    maxValue: 0,
-    observeThresh1Value: function () {
-      var thresholdMin = this.get('thresholdMin');
-      var maxValue = this.get('maxValue');
-
-      if (thresholdMin.trim() !== "") {
-        if (isNaN(thresholdMin) || thresholdMin > maxValue || thresholdMin < 0) {
-          this.set('isThresh1Error', true);
-          this.set('errorMessage1', Em.I18n.t('dashboard.widgets.error.invalid').format(maxValue));
-        } else {
-          this.set('isThresh1Error', false);
-          this.set('errorMessage1', '');
-        }
-      } else {
-        this.set('isThresh1Error', true);
-        this.set('errorMessage1', Em.I18n.t('admin.users.editError.requiredField'));
-      }
-        this.updateSlider();
-    }.observes('thresholdMin', 'maxValue'),
-
-    updateSlider: function () {
-      var thresholdMin = this.get('thresholdMin');
-      // update the slider handles and color
-      if (this.get('isThresh1Error') === false) {
-        $("#slider-range")
-          .slider('values', 0, parseFloat(thresholdMin))
-      }
-    }
-  }),
-
-  /**
-   * edit widget
-   * @param {object} event
-   */
   editWidget: function () {
-    var parent = this;
-    var maxTmp = parseFloat(this.get('maxValue'));
-    var configObj = this.get('widgetConfig').create({
-      thresholdMin: this.get('thresholdMin') + '',
-      hintInfo: this.get('hintInfo') + '',
-      maxValue: parseFloat(this.get('maxValue'))
-    });
+    return App.EditDashboardWidgetPopup.show({
+
+      widgetView: this,
 
-    App.ModalPopup.show({
-        header: Em.I18n.t('dashboard.widgets.popupHeader'),
-        classNames: ['modal-edit-widget'],
-        modalDialogClasses: ['modal-lg'],
-        bodyClass: Ember.View.extend({
-          templateName: require('templates/main/dashboard/edit_widget_popup_single_threshold'),
-          configPropertyObj: configObj
-        }),
-        primary: Em.I18n.t('common.apply'),
-        onPrimary: function () {
-          configObj.observeThresh1Value();
-          if (!configObj.isThresh1Error) {
-            var bigParent = parent.get('parentView');
-            parent.set('thresholdMin', parseFloat(configObj.get('thresholdMin')));
-            if (!App.get('testMode')) {
-              // save to persist
-              var userPreferences = bigParent.get('userPreferences');
-              userPreferences.threshold[parseInt(parent.get('id'), 10)] = [configObj.get('thresholdMin')];
-              bigParent.saveWidgetsSettings(userPreferences);
-              bigParent.renderWidgets();
-            }
-            this.hide();
-          }
-        },
-        didInsertElement: function () {
-          this._super();
-          var handlers = [configObj.get('thresholdMin')];
-          var _this = this;
+      sliderHandlersManager: App.EditDashboardWidgetPopup.SingleHandler.create({
+        thresholdMin: this.get('thresholdMin'),
+        maxValue: parseFloat(this.get('maxValue'))
+      }),
 
-          $("#slider-range").slider({
-            range: false,
-            min: 0,
-            max: maxTmp,
-            values: handlers,
-            create: function () {
-              _this.updateColors(handlers);
-            },
-            slide: function (event, ui) {
-              _this.updateColors(ui.values);
-              configObj.set('thresholdMin', ui.values[0] + '');
-            },
-            change: function (event, ui) {
-              _this.updateColors(ui.values);
-            }
-          });
-        },
+      bodyClass: App.EditDashboardWidgetPopup.EditDashboardWidgetPopupBody.extend({
+        templateName: require('templates/main/dashboard/edit_widget_popup_single_threshold')
+      }),
 
-      updateColors: function (handlers) {
-        var colors = [App.healthStatusGreen, App.healthStatusRed]; //color green,red
-        var colorstops = colors[0] + ", "; // start with the first color
-        for (var i = 0; i < handlers.length; i++) {
-          colorstops += colors[i] + " " + handlers[i] * 100 / maxTmp + "%,";
-          colorstops += colors[i + 1] + " " + handlers[i] * 100 / maxTmp + "%,";
-        }
-        colorstops += colors[colors.length - 1];
-        var sliderElement = $('#slider-range');
-        var css1 = '-webkit-linear-gradient(left,' + colorstops + ')'; // chrome & safari
-        sliderElement.css('background-image', css1);
-        var css2 = '-ms-linear-gradient(left,' + colorstops + ')'; // IE 10+
-        sliderElement.css('background-image', css2);
-        var css3 = '-moz-linear-gradient(left,' + colorstops + ')'; // Firefox
-        sliderElement.css('background-image', css3);
+      sliderIsRange: false,
 
-        sliderElement.find('.ui-widget-header').css({'background-color': '#FF8E00', 'background-image': 'none'}); // change the  original ranger color
-      }
+      sliderColors: [App.healthStatusGreen, App.healthStatusRed]
 
-      });
+    });
 
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/templates/main/dashboard/edit_widget_popup.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/edit_widget_popup.hbs b/ambari-web/app/templates/main/dashboard/edit_widget_popup.hbs
index 7caf085..60c9741 100644
--- a/ambari-web/app/templates/main/dashboard/edit_widget_popup.hbs
+++ b/ambari-web/app/templates/main/dashboard/edit_widget_popup.hbs
@@ -18,7 +18,7 @@
 <form class="form-horizontal" autocomplete="off">
     <div class="each-row">
         <div class="alert alert-info">
-          {{{view.configPropertyObj.hintInfo}}}
+          {{{view.parentView.widgetView.hintInfo}}}
         </div>
     </div>
 
@@ -32,20 +32,20 @@
         <div id="slider-value1" class="value-on-slider col-md-2">
           <input type="text" value="0" disabled="disabled" class="form-control" />
         </div>
-        <div id="slider-value2" {{bindAttr class="view.configPropertyObj.thresholdMinError:slider-error :value-on-slider :col-md-4 view.configPropertyObj.thresholdMinError:has-error"}}>
-          {{view Ember.TextField class="form-control" valueBinding="view.configPropertyObj.thresholdMin"}}
-          {{#if view.configPropertyObj.thresholdMinErrorMessage}}
-            <span class="help-block validation-block">{{view.configPropertyObj.thresholdMinErrorMessage}}</span>
+        <div id="slider-value2" {{bindAttr class="view.parentView.sliderHandlersManager.thresholdMinError:slider-error :value-on-slider :col-md-4 view.parentView.sliderHandlersManager.thresholdMinError:has-error"}}>
+          {{view Ember.TextField class="form-control" valueBinding="view.parentView.sliderHandlersManager.thresholdMin"}}
+          {{#if view.parentView.sliderHandlersManager.thresholdMinError}}
+            <span class="help-block validation-block">{{view.parentView.sliderHandlersManager.thresholdMinErrorMessage}}</span>
           {{/if}}
         </div>
-        <div id="slider-value3" {{bindAttr class="view.configPropertyObj.thresholdMaxError:slider-error :value-on-slider :col-md-4 view.configPropertyObj.thresholdMaxError:has-error"}}>
-          {{view Ember.TextField class="form-control" valueBinding="view.configPropertyObj.thresholdMax"}}
-          {{#if view.configPropertyObj.thresholdMaxErrorMessage}}
-            <span class="help-block validation-block">{{view.configPropertyObj.thresholdMaxErrorMessage}}</span>
+        <div id="slider-value3" {{bindAttr class="view.parentView.sliderHandlersManager.thresholdMaxError:slider-error :value-on-slider :col-md-4 view.parentView.sliderHandlersManager.thresholdMaxError:has-error"}}>
+          {{view Ember.TextField class="form-control" valueBinding="view.parentView.sliderHandlersManager.thresholdMax"}}
+          {{#if view.parentView.sliderHandlersManager.thresholdMaxError}}
+            <span class="help-block validation-block">{{view.parentView.sliderHandlersManager.thresholdMaxErrorMessage}}</span>
           {{/if}}
         </div>
         <div id="slider-value4" class="value-on-slider col-md-2">
-          {{view Em.TextField valueBinding="view.configPropertyObj.maxValue" classNames="form-control" disabled="disabled"}}
+          {{view Em.TextField valueBinding="view.parentView.sliderHandlersManager.maxValue" classNames="form-control" disabled="disabled"}}
         </div>
     </div>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/templates/main/dashboard/edit_widget_popup_single_threshold.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/dashboard/edit_widget_popup_single_threshold.hbs b/ambari-web/app/templates/main/dashboard/edit_widget_popup_single_threshold.hbs
index 86ffb47..416852e 100644
--- a/ambari-web/app/templates/main/dashboard/edit_widget_popup_single_threshold.hbs
+++ b/ambari-web/app/templates/main/dashboard/edit_widget_popup_single_threshold.hbs
@@ -19,7 +19,7 @@
 <form class="form-horizontal" autocomplete="off">
   <div class="each-row">
     <div class="alert alert-info">
-      {{{view.configPropertyObj.hintInfo}}}
+      {{{view.parentView.widgetView.hintInfo}}}
     </div>
   </div>
 
@@ -33,14 +33,14 @@
     <div id="slider-value1" class="value-on-slider col-md-2">
       <input type="text" value="0" disabled="disabled" class="form-control" />
     </div>
-      <div id="slider-value2" {{bindAttr class="view.configPropertyObj.isThresh1Error:slider-error :value-on-slider :col-md-4 :col-md-offset-2 :col-sm-offset-2 view.configPropertyObj.isThresh1Error:has-error"}}>
-        {{view Ember.TextField valueBinding="view.configPropertyObj.thresholdMin" class="form-control"}}
-        {{#if view.configPropertyObj.errorMessage1}}
-          <span class="help-block validation-block">{{view.configPropertyObj.errorMessage1}}</span>
+      <div id="slider-value2" {{bindAttr class="view.parentView.sliderHandlersManager.thresholdMinError:slider-error :value-on-slider :col-md-4 :col-md-offset-2 :col-sm-offset-2 view.parentView.sliderHandlersManager.thresholdMinError:has-error"}}>
+        {{view Ember.TextField valueBinding="view.parentView.sliderHandlersManager.thresholdMin" class="form-control"}}
+        {{#if view.parentView.sliderHandlersManager.thresholdMinErrorMessage}}
+          <span class="help-block validation-block">{{view.parentView.sliderHandlersManager.thresholdMinErrorMessage}}</span>
         {{/if}}
       </div>
     <div id="slider-value3" class="value-on-slider col-md-2 col-md-offset-2 col-sm-offset-2">
-      {{view Em.TextField valueBinding="view.configPropertyObj.maxValue" classNames="form-control" disabled="disabled"}}
+      {{view Em.TextField valueBinding="view.parentView.sliderHandlersManager.maxValue" classNames="form-control" disabled="disabled"}}
     </div>
   </div>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/views.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views.js b/ambari-web/app/views.js
index 77b5d5a..6972d2a 100644
--- a/ambari-web/app/views.js
+++ b/ambari-web/app/views.js
@@ -33,6 +33,7 @@ require('views/common/chart/linear');
 require('views/common/chart/linear_time');
 require('views/common/modal_popup');
 require('views/common/modal_popups/alert_popup');
+require('views/common/modal_popups/edit_dashboard_widget_popup');
 require('views/common/modal_popups/manage_kdc_credentials_popup');
 require('views/common/modal_popups/confirmation_feedback_popup');
 require('views/common/modal_popups/confirmation_popup');

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/views/common/modal_popups/edit_dashboard_widget_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/modal_popups/edit_dashboard_widget_popup.js b/ambari-web/app/views/common/modal_popups/edit_dashboard_widget_popup.js
new file mode 100644
index 0000000..7ea1b32
--- /dev/null
+++ b/ambari-web/app/views/common/modal_popups/edit_dashboard_widget_popup.js
@@ -0,0 +1,436 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+const {isValidFloat} = require('utils/validator');
+
+/**
+ * Thresholds manager for sliders with single value
+ * Usage:
+ * <pre>
+ * SingleHandler.create({
+ *    thresholdMin: 12,
+ *    minValue: 10,
+ *    maxValue: 100
+ * });
+ * </pre>
+ *
+ * @class SingleHandler
+ */
+const SingleHandler = Em.Object.extend({
+
+  /**
+   * @type {number}
+   */
+  thresholdMin: null,
+
+  /**
+   * @type {number}
+   * @default 0
+   */
+  minValue: 0,
+
+  /**
+   * @type {number}
+   */
+  maxValue: null,
+
+  /**
+   * Is <code>thresholdMin</code> invalid
+   * true - invalid
+   * false - valid
+   *
+   * @type {boolean}
+   */
+  thresholdMinError: Em.computed.bool('thresholdMinErrorMessage'),
+
+  /**
+   * Alias for <code>thresholdMinError</code>
+   *
+   * @type {boolean}
+   */
+  hasErrors: Em.computed.alias('thresholdMinError'),
+
+  /**
+   * Error message for <code>thresholdMin</code>
+   * <ul>
+   *  <li>Value is not a number</li>
+   *  <li>Value is out of range <code>(minValue - maxValue)</code></li>
+   * </ul>
+   * Empty message means that <code>thresholdMin</code> has valid value
+   *
+   * @type {string}
+   */
+  thresholdMinErrorMessage: function () {
+    var thresholdMin = this.get('thresholdMin');
+    var maxValue = this.get('maxValue');
+    var minValue = this.get('minValue');
+    if (!isValidFloat(thresholdMin) || thresholdMin > maxValue || thresholdMin < minValue) {
+      return Em.I18n.t('dashboard.widgets.error.invalid').format(minValue, maxValue);
+    }
+    return '';
+  }.property('thresholdMin', 'maxValue'),
+
+  /**
+   * Formatted threshold value
+   *
+   * @type {number[]}
+   */
+  preparedThresholds: function () {
+    return [parseFloat(this.get('thresholdMin'))];
+  }.property('thresholdMin'),
+
+  /**
+   * Force set new values for threshold
+   *
+   * @param {number[]} newValues
+   */
+  updateThresholds(newValues) {
+    this.set('thresholdMin', newValues[0]);
+  }
+
+});
+
+/**
+ * Thresholds manager for sliders with double values
+ * Usage:
+ * <pre>
+ * SingleHandler.create({
+ *    thresholdMin: 12,
+ *    thresholdMax: 40,
+ *    minValue: 10,
+ *    maxValue: 100
+ * });
+ * </pre>
+ *
+ * @class DoubleHandlers
+ */
+const DoubleHandlers = SingleHandler.extend({
+
+  /**
+   * @type {number}
+   */
+  thresholdMax: null,
+
+  /**
+   * Is <code>thresholdMax</code> invalid
+   * true - invalid
+   * false - valid
+   *
+   * @type {boolean}
+   */
+  thresholdMaxError: Em.computed.bool('thresholdMaxErrorMessage'),
+
+  /**
+   * Is some threshold invalid
+   * true - some one is invalid
+   * false - thresholds are valid
+   *
+   * @type {boolean}
+   */
+  hasErrors: Em.computed.or('thresholdMinError', 'thresholdMaxError'),
+
+  /**
+   * Error message for <code>thresholdMin</code>
+   * <ul>
+   *  <li>Value is not a number</li>
+   *  <li>Value is out of range <code>(minValue - maxValue)</code></li>
+   *  <li><code>thresholdMin</code>-value greater than <code>thresholdMax</code>-value</li>
+   * </ul>
+   * Empty message means that <code>thresholdMin</code> has valid value
+   *
+   * @type {string}
+   */
+  thresholdMinErrorMessage: function () {
+    var thresholdMin = this.get('thresholdMin');
+    var thresholdMax = this.get('thresholdMax');
+    var maxValue = this.get('maxValue');
+    var minValue = this.get('minValue');
+    if (!isValidFloat(thresholdMin) || thresholdMin > maxValue || thresholdMin < minValue) {
+      return Em.I18n.t('dashboard.widgets.error.invalid').format(minValue, maxValue);
+    }
+    if (this.get('thresholdMaxError') === false && thresholdMax <= thresholdMin) {
+      return Em.I18n.t('dashboard.widgets.error.smaller');
+    }
+    return '';
+  }.property('thresholdMin', 'thresholdMax'),
+
+  /**
+   * Error message for <code>thresholdMax</code>
+   * <ul>
+   *  <li>Value is not a number</li>
+   *  <li>Value is out of range <code>(minValue - maxValue)</code></li>
+   * </ul>
+   * Empty message means that <code>thresholdMax</code> has valid value
+   *
+   * @type {string}
+   */
+  thresholdMaxErrorMessage: function () {
+    var thresholdMax = this.get('thresholdMax');
+    var maxValue = this.get('maxValue');
+    var minValue = this.get('minValue');
+    if (!isValidFloat(thresholdMax) || thresholdMax > maxValue || thresholdMax < minValue) {
+      return Em.I18n.t('dashboard.widgets.error.invalid').format(minValue, maxValue);
+    }
+    return '';
+  }.property('thresholdMax'),
+
+  /**
+   * Threshold values ready to save
+   *
+   * @type {number[]}
+   */
+  preparedThresholds: function () {
+    return [parseFloat(this.get('thresholdMin')), parseFloat(this.get('thresholdMax'))];
+  }.property('thresholdMin', 'thresholdMax'),
+
+  /**
+   * Force set new values for threshold
+   *
+   * @param {number[]} newValues
+   */
+  updateThresholds(newValues) {
+    this.set('thresholdMin', newValues[0]);
+    this.set('thresholdMax', newValues[1]);
+  }
+
+});
+
+/**
+ * Common body-view for popup with sliders
+ *
+ * @class EditDashboardWidgetPopupBody
+ */
+const EditDashboardWidgetPopupBody = Em.View.extend({
+  templateName: require('templates/main/dashboard/edit_widget_popup')
+});
+
+/**
+ * Popup with slider to edit dashboard widget
+ * Usage:
+ * <pre>
+ *   App.EditDashboardWidgetPopup.show({
+ *    widgetView: this,
+ *    sliderHandlersManager: App.EditDashboardWidgetPopup.DoubleHandlers.create({
+ *      maxValue: 100,
+ *      thresholdMin: this.get('thresholdMin'),
+ *      thresholdMax: this.get('thresholdMax')
+ *    })
+ *  });
+ * </pre>
+ *
+ * <code>widgetView</code> should be set to view with widget.
+ * Usually you will use <code>App.EditDashboardWidgetPopup</code> inside of some <code>App.DashboardWidgetView</code> instance,
+ * so <code>widgetView</code> may be set to <code>this</code>
+ * <code>sliderHandlersManager</code> should be set to some of the <code>App.EditDashboardWidgetPopup.SingleHandler</code>
+ * or <code>App.EditDashboardWidgetPopup.DoubleHandler</code>
+ *
+ * You can't use <code>App.EditDashboardWidgetPopup</code> without setting this two properties!
+ *
+ * @class App.EditDashboardWidgetPopup
+ */
+App.EditDashboardWidgetPopup = App.ModalPopup.extend({
+
+  header: Em.I18n.t('dashboard.widgets.popupHeader'),
+  classNames: ['modal-edit-widget'],
+  modalDialogClasses: ['modal-lg'],
+  primary: Em.I18n.t('common.apply'),
+  disablePrimary: Em.computed.alias('sliderHandlersManager.hasErrors'),
+
+  /**
+   * Can't be null or undefined
+   *
+   * @type {SingleHandler|DoubleHandlers}
+   */
+  sliderHandlersManager: null,
+
+  /**
+   * Widget view
+   * Can't be not a view. Used to save Thresholds. Normally it's an instance of <code>App.DashboardWidgetView</code>
+   *
+   * @type {Em.View}
+   */
+  widgetView: null,
+
+  /**
+   * Determines if slider is enabled for slide
+   * true - don't enabled
+   * false - enabled
+   *
+   * Determines if slider handlers should be updated with Threshold values
+   * true - don't update
+   * false - update
+   *
+   * Used as option <code>disabled</code> for $.ui.slider
+   *
+   * @type {boolean}
+   */
+  sliderDisabled: false,
+
+  /**
+   * Slider "ticks"
+   * Used as option <code>values</code> for $.ui.slider
+   *
+   * @type {number[]}
+   */
+  sliderHandlers: function () {
+    return this.get('sliderHandlersManager.preparedThresholds');
+  }.property('sliderHandlersManager.preparedThresholds.[]'),
+
+  /**
+   * Colors used for slider ranges
+   * @type {string[]}
+   */
+  sliderColors: [App.healthStatusRed, App.healthStatusOrange, App.healthStatusGreen],
+
+  /**
+   * Maximum value for slider
+   * Used as option <code>max</code> for $.ui.slider
+   *
+   * @type {number}
+   */
+  sliderMaxValue: Em.computed.alias('sliderHandlersManager.maxValue'),
+
+  /**
+   * Minimum value for slider
+   * Used as option <code>min</code> for $.ui.slider
+   *
+   * @type {number}
+   */
+  sliderMinValue: 0,
+
+  /**
+   * Check how many handlers has slider
+   * true - 2 handlers
+   * false - 1 handler
+   *
+   * Used as option <code>range</code> for $.ui.slider
+   *
+   * @type {boolean}
+   */
+  sliderIsRange: true,
+
+  bodyClass: EditDashboardWidgetPopupBody,
+
+  init() {
+    Em.assert('`widgetView` should be valid view', this.get('widgetView.isView'));
+    Em.assert('`sliderHandlersManager` should be set', !!this.get('sliderHandlersManager'));
+    return this._super(...arguments);
+  },
+
+  /**
+   * Save new threshold value on popup-close (means Primary click)
+   * Use <code>widgetView</code> to get <code>widgetsView</code> and save new values
+   * Current widget is updated too (without redrawing)
+   */
+  saveThreshold () {
+    let preparedThresholds = this.get('sliderHandlersManager.preparedThresholds');
+    this.get('widgetView').saveWidgetThresholds(preparedThresholds);
+  },
+
+  /**
+   * Update slider values when new threshold values are provided
+   * Don't do anything if some value is invalid or slider is disabled
+   *
+   * @private
+   */
+  _updateSliderValues: function() {
+    var sliderHandlersManager = this.get('sliderHandlersManager');
+    if (!sliderHandlersManager.get('hasErrors') && !this.get('sliderDisabled')) {
+      $('#slider-range').slider('values', sliderHandlersManager.get('preparedThresholds'));
+    }
+  }.observes('sliderDisabled', 'sliderHandlersManager.preparedThresholds.[]', 'sliderHandlersManager.hasErrors'),
+
+  onPrimary () {
+    let sliderHandlersManager = this.get('sliderHandlersManager');
+    if (!sliderHandlersManager.get('hasErrors')) {
+      this.saveThreshold();
+      this.hide();
+    }
+  },
+
+  /**
+   * Create slider in the popup when it's opened
+   */
+  createSlider() {
+    var self = this;
+    let sliderHandlersManager = this.get('sliderHandlersManager');
+    var handlers = this.get('sliderHandlers');
+
+    $('#slider-range').slider({
+      range: this.get('sliderIsRange'),
+      min: this.get('sliderMinValue'),
+      max: this.get('sliderMaxValue'),
+      disabled: this.get('sliderDisabled'),
+      values: handlers,
+      create: function () {
+        self.updateSliderColors(handlers);
+      },
+      slide: function (event, ui) {
+        self.updateSliderColors(ui.values);
+        sliderHandlersManager.updateThresholds(ui.values);
+      },
+      change: function (event, ui) {
+        self.updateSliderColors(ui.values);
+      }
+    });
+  },
+
+  didInsertElement: function () {
+    this._super();
+    this.createSlider();
+  },
+
+  /**
+   * Update colors on slider using <code>sliderColors</code> theme when user interacts with it
+   *
+   * @param {number[]} handlers
+   */
+  updateSliderColors(handlers) {
+    let gradient = this._getGradientStr(handlers);
+    $('#slider-range')
+      .css('background-image', '-webkit-' + gradient)
+      .css('background-image', '-ms-' + gradient)
+      .css('background-image', '-moz-' + gradient)
+      .find('.ui-widget-header').css({
+        'background-color': App.healthStatusOrange,
+        'background-image': 'none'
+      });
+  },
+
+  /**
+   * @param {number[]} handlers
+   * @returns {string}
+   * @private
+   */
+  _getGradientStr(handlers) {
+    let maxValue = this.get('sliderMaxValue');
+    let colors = this.get('sliderColors');
+    let gradient = colors[0] + ', ' + handlers.map((handler, i) => {
+        return `${colors[i]}  ${handlers[i] * 100 / maxValue}%, ${colors[i + 1]} ${handlers[i] * 100 / maxValue}%,`;
+      }).join('') + colors[colors.length - 1];
+    return `linear-gradient(left,${gradient})`;
+  }
+
+});
+
+App.EditDashboardWidgetPopup.reopenClass({
+  SingleHandler,
+  DoubleHandlers,
+  EditDashboardWidgetPopupBody
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/views/main/dashboard/widget.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widget.js b/ambari-web/app/views/main/dashboard/widget.js
index e7a626c..9fba906 100644
--- a/ambari-web/app/views/main/dashboard/widget.js
+++ b/ambari-web/app/views/main/dashboard/widget.js
@@ -30,9 +30,19 @@ App.DashboardWidgetView = Em.View.extend({
 
   sourceName: Em.computed.alias('widget.sourceName'),
 
+  /**
+   * Bound from template
+   *
+   * @type {object}
+   */
   widget: null,
 
   /**
+   * @type {Em.View}
+   */
+  widgetsView: Em.computed.alias('parentView'),
+
+  /**
    * @type {object} - record from model that serve as data source
    */
   model: function () {
@@ -79,6 +89,11 @@ App.DashboardWidgetView = Em.View.extend({
   hiddenInfoClass: "hidden-info-two-line",
 
   /**
+   * @type {string}
+   */
+  hintInfo: Em.computed.i18nFormat('dashboard.widgets.hintInfo.common', 'maxValue'),
+
+  /**
    * @type {number}
    * @default 0
    */
@@ -100,61 +115,6 @@ App.DashboardWidgetView = Em.View.extend({
    */
   isDataLoadedBinding: 'App.router.clusterController.isServiceContentFullyLoaded',
 
-  /**
-   * @type {Em.Object}
-   * @class
-   */
-  widgetConfig: Ember.Object.extend({
-    thresholdMin: '',
-    thresholdMax: '',
-    hintInfo: Em.computed.i18nFormat('dashboard.widgets.hintInfo.common', 'maxValue'),
-    thresholdMinError: false,
-    thresholdMaxError: false,
-    thresholdMinErrorMessage: "",
-    thresholdMaxErrorMessage: "",
-    maxValue: 0,
-    validateThreshold: function(thresholdName) {
-      var thresholdMin = this.get('thresholdMin'),
-       thresholdMax = this.get('thresholdMax'),
-       maxValue = this.get('maxValue'),
-       currentThreshold = this.get(thresholdName),
-       isError = false,
-       errorMessage = '';
-
-      if (currentThreshold.trim() !== "") {
-        if (isNaN(currentThreshold) || currentThreshold > maxValue || currentThreshold < 0) {
-          isError = true;
-          errorMessage = Em.I18n.t('dashboard.widgets.error.invalid').format(maxValue);
-        } else if (parseFloat(thresholdMax) <= parseFloat(thresholdMin)) {
-          isError = true;
-          errorMessage = Em.I18n.t('dashboard.widgets.error.smaller');
-        } else {
-          isError = false;
-          errorMessage = '';
-        }
-      } else {
-        isError = true;
-        errorMessage = Em.I18n.t('admin.users.editError.requiredField');
-      }
-      this.set(thresholdName + 'ErrorMessage', errorMessage);
-      this.set(thresholdName + 'Error', isError);
-      this.updateSlider();
-    },
-    observeThreshMinValue: function () {
-      this.validateThreshold('thresholdMin');
-    }.observes('thresholdMin', 'maxValue'),
-    observeThreshMaxValue: function () {
-      this.validateThreshold('thresholdMax');
-    }.observes('thresholdMax', 'maxValue'),
-    updateSlider: function () {
-      if (this.get('thresholdMinError') === false && this.get('thresholdMaxError') === false) {
-        $("#slider-range")
-          .slider('values', 0, parseFloat(this.get('thresholdMin')))
-          .slider('values', 1, parseFloat(this.get('thresholdMax')));
-      }
-    }
-  }),
-
   didInsertElement: function () {
     App.tooltip(this.$("[rel='ZoomInTooltip']"), {
       placement: 'left',
@@ -190,97 +150,36 @@ App.DashboardWidgetView = Em.View.extend({
   },
 
   /**
-   * edit widget
+   * Update thresholds for widget and save this them to persist
+   *
+   * @param {number[]} preparedThresholds
    */
-  editWidget: function () {
-    var configObj = this.get('widgetConfig').create({
-      thresholdMin: this.get('thresholdMin') + '',
-      thresholdMax: this.get('thresholdMax') + '',
-      maxValue: parseFloat(this.get('maxValue'))
-    });
-    this.showEditDialog(configObj);
+  saveWidgetThresholds(preparedThresholds) {
+    const widgetsView = this.get('widgetsView');
+    const userPreferences = widgetsView.get('userPreferences');
+    const widgetId = Number(this.get('id'));
+    userPreferences.threshold[widgetId] = preparedThresholds;
+    this.set('widget.threshold', userPreferences.threshold[widgetId]);
+    widgetsView.saveWidgetsSettings(userPreferences);
   },
 
   /**
-   *  show edit dialog
-   * @param {Em.Object} configObj
-   * @returns {App.ModalPopup}
+   * edit widget
    */
-  showEditDialog: function (configObj) {
-    var self = this;
-    var maxValue = this.get('maxValue');
-
-    return App.ModalPopup.show({
-      header: Em.I18n.t('dashboard.widgets.popupHeader'),
-      classNames: ['modal-edit-widget'],
-      modalDialogClasses: ['modal-lg'],
-      bodyClass: Ember.View.extend({
-        templateName: require('templates/main/dashboard/edit_widget_popup'),
-        configPropertyObj: configObj
-      }),
-      configObj: configObj,
-      disablePrimary: Em.computed.or('configObj.thresholdMinError', 'configObj.thresholdMaxError'),
-      primary: Em.I18n.t('common.apply'),
-      onPrimary: function () {
-        configObj.observeThreshMinValue();
-        configObj.observeThreshMaxValue();
-        if (!configObj.thresholdMinError && !configObj.thresholdMaxError) {
-          self.set('thresholdMin', parseFloat(configObj.get('thresholdMin')));
-          self.set('thresholdMax', parseFloat(configObj.get('thresholdMax')));
-
-          var parent = self.get('parentView');
-          var userPreferences = parent.get('userPreferences');
-          userPreferences.threshold[Number(self.get('id'))] = [configObj.get('thresholdMin'), configObj.get('thresholdMax')];
-          parent.saveWidgetsSettings(userPreferences);
-          parent.renderWidgets();
-
-          this.hide();
-        }
-      },
+  editWidget: function () {
+    return App.EditDashboardWidgetPopup.show({
 
-      didInsertElement: function () {
-        this._super();
-        var _this = this;
-        var handlers = [configObj.get('thresholdMin'), configObj.get('thresholdMax')];
+      widgetView: this,
 
-        $("#slider-range").slider({
-          range: true,
-          min: 0,
-          max: maxValue,
-          values: handlers,
-          create: function () {
-            _this.updateColors(handlers);
-          },
-          slide: function (event, ui) {
-            _this.updateColors(ui.values);
-            configObj.set('thresholdMin', ui.values[0] + '');
-            configObj.set('thresholdMax', ui.values[1] + '');
-          },
-          change: function (event, ui) {
-            _this.updateColors(ui.values);
-          }
-        });
-      },
-      updateColors: function (handlers) {
-        var colors = [App.healthStatusGreen, App.healthStatusOrange, App.healthStatusRed];
-        var colorStops = colors[0] + ", ";
+      sliderHandlersManager: App.EditDashboardWidgetPopup.DoubleHandlers.create({
+        thresholdMin: this.get('thresholdMin'),
+        thresholdMax: this.get('thresholdMax'),
+        maxValue: parseFloat(this.get('maxValue'))
+      }),
 
-        for (var i = 0; i < handlers.length; i++) {
-          colorStops += colors[i] + " " + handlers[i] * 100 / maxValue + "%,";
-          colorStops += colors[i + 1] + " " + handlers[i] * 100 / maxValue + "%,";
-        }
-        colorStops += colors[colors.length - 1];
-        var sliderElement = $('#slider-range');
-        var gradient = 'linear-gradient(left,' + colorStops + ')';
+      sliderColors: [App.healthStatusGreen, App.healthStatusOrange, App.healthStatusRed]
 
-        sliderElement.css('background-image', '-webkit-' + gradient);
-        sliderElement.css('background-image', '-ms-' + gradient);
-        sliderElement.css('background-image', '-moz-' + gradient);
-        sliderElement.find('.ui-widget-header').css({
-          'background-color': '#FF8E00',
-          'background-image': 'none'
-        });
-      }
     });
   }
+
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/views/main/dashboard/widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets.js b/ambari-web/app/views/main/dashboard/widgets.js
index 572625d..16840a5 100644
--- a/ambari-web/app/views/main/dashboard/widgets.js
+++ b/ambari-web/app/views/main/dashboard/widgets.js
@@ -32,184 +32,7 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
   name: 'mainDashboardWidgetsView',
   templateName: require('templates/main/dashboard/widgets'),
 
-  widgetsDefinition: [
-    {
-      id: 1,
-      viewName: 'NameNodeHeapPieChartView',
-      sourceName: 'HDFS',
-      title: Em.I18n.t('dashboard.widgets.NameNodeHeap'),
-      threshold: [80, 90]
-    },
-    {
-      id: 2,
-      viewName: 'NameNodeCapacityPieChartView',
-      sourceName: 'HDFS',
-      title: Em.I18n.t('dashboard.widgets.HDFSDiskUsage'),
-      threshold: [85, 95]
-    },
-    {
-      id: 3,
-      viewName: 'NameNodeCpuPieChartView',
-      sourceName: 'HDFS',
-      title: Em.I18n.t('dashboard.widgets.NameNodeCpu'),
-      threshold: [90, 95]
-    },
-    {
-      id: 4,
-      viewName: 'DataNodeUpView',
-      sourceName: 'HDFS',
-      title: Em.I18n.t('dashboard.widgets.DataNodeUp'),
-      threshold: [80, 90]
-    },
-    {
-      id: 5,
-      viewName: 'NameNodeRpcView',
-      sourceName: 'HDFS',
-      title: Em.I18n.t('dashboard.widgets.NameNodeRpc'),
-      threshold: [1000, 3000]
-    },
-    {
-      id: 6,
-      viewName: 'ChartClusterMetricsMemoryWidgetView',
-      sourceName: 'HOST_METRICS',
-      title: Em.I18n.t('dashboard.clusterMetrics.memory'),
-      threshold: []
-    },
-    {
-      id: 7,
-      viewName: 'ChartClusterMetricsNetworkWidgetView',
-      sourceName: 'HOST_METRICS',
-      title: Em.I18n.t('dashboard.clusterMetrics.network'),
-      threshold: []
-    },
-    {
-      id: 8,
-      viewName: 'ChartClusterMetricsCPUWidgetView',
-      sourceName: 'HOST_METRICS',
-      title: Em.I18n.t('dashboard.clusterMetrics.cpu'),
-      threshold: []
-    },
-    {
-      id: 9,
-      viewName: 'ChartClusterMetricsLoadWidgetView',
-      sourceName: 'HOST_METRICS',
-      title: Em.I18n.t('dashboard.clusterMetrics.load'),
-      threshold: []
-    },
-    {
-      id: 10,
-      viewName: 'NameNodeUptimeView',
-      sourceName: 'HDFS',
-      title: Em.I18n.t('dashboard.widgets.NameNodeUptime'),
-      threshold: []
-    },
-    {
-      id: 11,
-      viewName: 'HDFSLinksView',
-      sourceName: 'HDFS',
-      title: Em.I18n.t('dashboard.widgets.HDFSLinks'),
-      threshold: []
-    },
-    {
-      id: 12,
-      viewName: 'HBaseLinksView',
-      sourceName: 'HBASE',
-      title: Em.I18n.t('dashboard.widgets.HBaseLinks'),
-      threshold: []
-    },
-    {
-      id: 13,
-      viewName: 'HBaseMasterHeapPieChartView',
-      sourceName: 'HBASE',
-      title: Em.I18n.t('dashboard.widgets.HBaseMasterHeap'),
-      threshold: [70, 90]
-    },
-    {
-      id: 14,
-      viewName: 'HBaseAverageLoadView',
-      sourceName: 'HBASE',
-      title: Em.I18n.t('dashboard.widgets.HBaseAverageLoad'),
-      threshold: [150, 250]
-    },
-    {
-      id: 15,
-      viewName: 'HBaseRegionsInTransitionView',
-      sourceName: 'HBASE',
-      title: Em.I18n.t('dashboard.widgets.HBaseRegionsInTransition'),
-      threshold: [3, 10],
-      isHiddenByDefault: true
-    },
-    {
-      id: 16,
-      viewName: 'HBaseMasterUptimeView',
-      sourceName: 'HBASE',
-      title: Em.I18n.t('dashboard.widgets.HBaseMasterUptime'),
-      threshold: []
-    },
-    {
-      id: 17,
-      viewName: 'ResourceManagerHeapPieChartView',
-      sourceName: 'YARN',
-      title: Em.I18n.t('dashboard.widgets.ResourceManagerHeap'),
-      threshold: [70, 90]
-    },
-    {
-      id: 18,
-      viewName: 'ResourceManagerUptimeView',
-      sourceName: 'YARN',
-      title: Em.I18n.t('dashboard.widgets.ResourceManagerUptime'),
-      threshold: []
-    },
-    {
-      id: 19,
-      viewName: 'NodeManagersLiveView',
-      sourceName: 'YARN',
-      title: Em.I18n.t('dashboard.widgets.NodeManagersLive'),
-      threshold: [50, 75]
-    },
-    {
-      id: 20,
-      viewName: 'YARNMemoryPieChartView',
-      sourceName: 'YARN',
-      title: Em.I18n.t('dashboard.widgets.YARNMemory'),
-      threshold: [50, 75]
-    },
-    {
-      id: 21,
-      viewName: 'SuperVisorUpView',
-      sourceName: 'STORM',
-      title: Em.I18n.t('dashboard.widgets.SuperVisorUp'),
-      threshold: [85, 95]
-    },
-    {
-      id: 22,
-      viewName: 'FlumeAgentUpView',
-      sourceName: 'FLUME',
-      title: Em.I18n.t('dashboard.widgets.FlumeAgentUp'),
-      threshold: [85, 95]
-    },
-    {
-      id: 23,
-      viewName: 'YARNLinksView',
-      sourceName: 'YARN',
-      title: Em.I18n.t('dashboard.widgets.YARNLinks'),
-      threshold: []
-    },
-    {
-      id: 24,
-      viewName: 'HawqSegmentUpView',
-      sourceName: 'HAWQ',
-      title: Em.I18n.t('dashboard.widgets.HawqSegmentUp'),
-      threshold: [75, 90]
-    },
-    {
-      id: 25,
-      viewName: 'PxfUpView',
-      sourceName: 'PXF',
-      title: Em.I18n.t('dashboard.widgets.PxfUp'),
-      threshold: []
-    }
-  ],
+  widgetsDefinition: require('data/dashboard_widgets'),
 
   widgetsDefinitionMap: function () {
     return this.get('widgetsDefinition').toMapByProperty('id');
@@ -275,15 +98,13 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
   userPreferences: null,
 
   didInsertElement: function () {
-    var self = this;
-
     this._super();
-    this.loadWidgetsSettings().complete(function() {
-      self.checkServicesChange();
-      self.renderWidgets();
-      self.set('isDataLoaded', true);
+    this.loadWidgetsSettings().complete(() => {
+      this.checkServicesChange();
+      this.renderWidgets();
+      this.set('isDataLoaded', true);
       App.loadTimer.finish('Dashboard Metrics Page');
-      Em.run.next(self, 'makeSortable');
+      Em.run.next(this, 'makeSortable');
     });
   },
 
@@ -337,11 +158,10 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
 
   resolveConfigDependencies: function(widgetsDefinition) {
     var clusterEnv = App.router.get('clusterController.clusterEnv').properties;
-    var yarnMemoryWidget = widgetsDefinition.findProperty('id', 20);
-
     if (clusterEnv.hide_yarn_memory_widget === 'true') {
-      yarnMemoryWidget.isHiddenByDefault = true;
+      widgetsDefinition.findProperty('id', 20).isHiddenByDefault = true;
     }
+    return widgetsDefinition;
   },
 
   generateDefaultUserPreferences: function() {
@@ -353,14 +173,10 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     };
 
     this.resolveConfigDependencies(widgetsDefinition);
-
     widgetsDefinition.forEach(function(widget) {
       if (App.Service.find(widget.sourceName).get('isLoaded') || widget.sourceName === 'HOST_METRICS') {
-        if (widget.isHiddenByDefault) {
-          preferences.hidden.push(widget.id);
-        } else {
-          preferences.visible.push(widget.id);
-        }
+        let state = widget.isHiddenByDefault ? 'hidden' : 'visible';
+        preferences[state].push(widget.id);
       }
       preferences.threshold[widget.id] = widget.threshold;
     });
@@ -421,18 +237,13 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     };
     var isChanged = false;
 
-    defaultPreferences.visible.forEach(function(id) {
-      if (!userPreferences.visible.contains(id) && !userPreferences.hidden.contains(id)) {
-        isChanged = true;
-        newValue.visible.push(id);
-      }
-    });
-
-    defaultPreferences.hidden.forEach(function(id) {
-      if (!userPreferences.visible.contains(id) && !userPreferences.hidden.contains(id)) {
-        isChanged = true;
-        newValue.hidden.push(id);
-      }
+    ['visible', 'hidden'].forEach(state => {
+      defaultPreferences[state].forEach(id => {
+        if (!userPreferences.visible.contains(id) && !userPreferences.hidden.contains(id)) {
+          isChanged = true;
+          newValue[state].push(id);
+        }
+      });
     });
     if (isChanged) {
       this.saveWidgetsSettings(newValue);
@@ -443,15 +254,14 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
    * Reset widgets visibility-status
    */
   resetAllWidgets: function () {
-    var self = this;
-    App.showConfirmationPopup(function () {
-      self.saveWidgetsSettings(self.generateDefaultUserPreferences());
-      self.setProperties({
+    App.showConfirmationPopup(() => {
+      this.saveWidgetsSettings(this.generateDefaultUserPreferences());
+      this.setProperties({
         currentTimeRangeIndex: 0,
         customStartTime: null,
         customEndTime: null
       });
-      self.renderWidgets();
+      this.renderWidgets();
     });
   },
 
@@ -469,23 +279,21 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
         var widgetsArray = $('div[viewid]');
 
         var userPreferences = self.get('userPreferences') || self.getDBProperty(self.get('persistKey'));
-        var newValue = Em.Object.create({
+        var newValue = {
           visible: [],
           hidden: userPreferences.hidden,
           threshold: userPreferences.threshold
+        };
+        newValue.visible = userPreferences.visible.map((item, index) => {
+          var viewID = widgetsArray.get(index).getAttribute('viewid');
+          return Number(viewID.split('-')[1]);
         });
-        var size = userPreferences.visible.length;
-        for (var j = 0; j <= size - 1; j++) {
-          var viewID = widgetsArray.get(j).getAttribute('viewid');
-          var id = Number(viewID.split("-").get(1));
-          newValue.visible.push(id);
-        }
         self.saveWidgetsSettings(newValue);
       },
-      activate: function (event, ui) {
+      activate: function () {
         self.set('isMoving', true);
       },
-      deactivate: function (event, ui) {
+      deactivate: function () {
         self.set('isMoving', false);
       }
     }).disableSelection();
@@ -511,21 +319,9 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
     }),
     applyFilter: function () {
       var parent = this.get('parentView'),
-        hiddenWidgets = this.get('hiddenWidgets'),
-        userPreferences = parent.get('userPreferences'),
-        newValue = {
-          visible: userPreferences.visible.slice(0),
-          hidden: userPreferences.hidden.slice(0),
-          threshold: userPreferences.threshold
-        };
-
-      hiddenWidgets.filterProperty('checked').forEach(function (item) {
-        newValue.visible.push(item.id);
-        newValue.hidden = newValue.hidden.without(item.id);
-        hiddenWidgets.removeObject(item);
-      }, this);
-      parent.saveWidgetsSettings(newValue);
-      parent.renderWidgets();
+        hiddenWidgets = this.get('hiddenWidgets');
+      hiddenWidgets.filterProperty('checked').setEach('isVisible', true);
+      parent.saveWidgetsSettings();
     }
   }),
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/app/views/main/dashboard/widgets/text_widget.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/text_widget.js b/ambari-web/app/views/main/dashboard/widgets/text_widget.js
index daa354c..79f0573 100644
--- a/ambari-web/app/views/main/dashboard/widgets/text_widget.js
+++ b/ambari-web/app/views/main/dashboard/widgets/text_widget.js
@@ -32,26 +32,5 @@ App.TextDashboardWidgetView = App.DashboardWidgetView.extend({
     return this.get('data') === null;
   }.property('data'),
 
-  hiddenInfo: [],
-
-  maxValue: null,
-
-  updateColors: function (handlers, colors) {
-    var colorstops = colors[0] + ", "; // start with the first color
-    for (var i = 0; i < handlers.length; i++) {
-      colorstops += colors[i] + " " + handlers[i] + "%,";
-      colorstops += colors[i + 1] + " " + handlers[i] + "%,";
-    }
-    colorstops += colors[colors.length - 1];
-    var cssForChromeAndSafari = '-webkit-linear-gradient(left,' + colorstops + ')'; // chrome & safari
-    var slider = $('#slider-range');
-    slider.css('background-image', cssForChromeAndSafari);
-    var cssForIE = '-ms-linear-gradient(left,' + colorstops + ')'; // IE 10+
-    slider.css('background-image', cssForIE);
-    //$('#slider-range').css('filter', 'progid:DXImageTransform.Microsoft.gradient( startColorStr= ' + colors[0] + ', endColorStr= ' + colors[2] +',  GradientType=1 )' ); // IE 10-
-    var cssForFireFox = '-moz-linear-gradient(left,' + colorstops + ')'; // Firefox
-    slider.css('background-image', cssForFireFox);
-
-    slider.find('.ui-widget-header').css({'background-color': '#FF8E00', 'background-image': 'none'}); // change the  original ranger color
-  }
+  hiddenInfo: []
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/test/views/common/modal_popups/edit_dashboard_widget_popup_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/modal_popups/edit_dashboard_widget_popup_test.js b/ambari-web/test/views/common/modal_popups/edit_dashboard_widget_popup_test.js
new file mode 100644
index 0000000..3335cf3
--- /dev/null
+++ b/ambari-web/test/views/common/modal_popups/edit_dashboard_widget_popup_test.js
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+
+function getView() {
+  return App.EditDashboardWidgetPopup.create({
+    widgetView: Em.View.create(),
+    sliderHandlersManager: {}
+  });
+}
+
+describe('App.EditDashboardWidgetPopup', function () {
+
+  App.TestAliases.testAsComputedAlias(getView(), 'disablePrimary', 'sliderHandlersManager.hasErrors');
+
+  App.TestAliases.testAsComputedAlias(getView(), 'sliderMaxValue', 'sliderHandlersManager.maxValue');
+
+  describe('#init', function () {
+
+    it('should throw an Error if no `widgetView` provided', function () {
+      expect(function () {
+        App.EditDashboardWidgetPopup.create({
+          sliderHandlersManager: {}
+        });
+      }).to.throw(/`widgetView` should be valid view/);
+    });
+
+    it('should throw an Error if no `sliderHandlersManager` provided', function () {
+      expect(function () {
+        App.EditDashboardWidgetPopup.create({
+          widgetView: Em.View.create(),
+        });
+      }).to.throw(/`sliderHandlersManager` should be set/);
+    });
+
+  });
+
+});
+
+describe('App.EditDashboardWidgetPopup.SingleHandler', function () {
+
+  var handler;
+
+  function getSingleHandler() {
+    return App.EditDashboardWidgetPopup.SingleHandler.create();
+  }
+
+  beforeEach(function () {
+    handler = getSingleHandler();
+  });
+
+  App.TestAliases.testAsComputedAlias(getSingleHandler(), 'hasErrors', 'thresholdMinError');
+
+  describe('#updateThresholds', function () {
+
+    it('should update `thresholdMin`', function () {
+      handler.set('thresholdMin', -1);
+      handler.updateThresholds([100500]);
+      expect(handler.get('thresholdMin')).to.be.equal(100500);
+    });
+
+  });
+
+  describe('#thresholdMinErrorMessage', function() {
+
+    var minValue = 0;
+    var maxValue = 100;
+    var msg = Em.I18n.t('dashboard.widgets.error.invalid').format(minValue, maxValue);
+
+    beforeEach(function() {
+      handler.setProperties({
+        minValue: minValue,
+        maxValue: maxValue
+      });
+    });
+
+    [
+      {thresholdMin: -1, e: msg},
+      {thresholdMin: 101, e: msg},
+      {thresholdMin: 'abc', e: msg},
+      {thresholdMin: 60, e: ''}
+    ].forEach(function(test) {
+      it('thresholdMin: ' + JSON.stringify(test.thresholdMin), function () {
+        handler.set('thresholdMin', test.thresholdMin);
+        expect(handler.get('thresholdMinErrorMessage')).to.be.equal(test.e);
+      });
+    });
+
+  });
+
+  describe('#preparedThresholds', function () {
+
+    it('mapped to array threshold values', function () {
+      handler.setProperties({
+        thresholdMin: 1
+      });
+      expect(handler.get('preparedThresholds')).to.be.eql([1]);
+    });
+
+  });
+
+});
+
+describe('App.EditDashboardWidgetPopup.DoubleHandlers', function () {
+
+  var handler;
+
+  function getDoubleHandlers() {
+    return App.EditDashboardWidgetPopup.DoubleHandlers.create();
+  }
+
+  beforeEach(function () {
+    handler = getDoubleHandlers();
+  });
+
+  App.TestAliases.testAsComputedOr(getDoubleHandlers(), 'hasErrors', ['thresholdMinError', 'thresholdMaxError']);
+
+  describe('#updateThresholds', function () {
+
+    it('should update `thresholdMin` and `thresholdMax`', function () {
+      handler.set('thresholdMin', -1);
+      handler.set('thresholdMax', 1);
+      handler.updateThresholds([1234, 4321]);
+      expect(handler.get('thresholdMin')).to.be.equal(1234);
+      expect(handler.get('thresholdMax')).to.be.equal(4321);
+    });
+
+  });
+
+  describe('#thresholdMinErrorMessage', function() {
+
+    var minValue = 0;
+    var maxValue = 100;
+    var msg = Em.I18n.t('dashboard.widgets.error.invalid').format(minValue, maxValue);
+    var msg2 = Em.I18n.t('dashboard.widgets.error.smaller');
+
+    beforeEach(function() {
+      handler.setProperties({
+        minValue: minValue,
+        maxValue: maxValue
+      });
+    });
+
+    [
+      {thresholdMin: -1, e: msg},
+      {thresholdMin: 101, e: msg},
+      {thresholdMin: 'abc', e: msg},
+      {thresholdMin: 60, e: ''},
+      {thresholdMin: 99, e: msg2}
+    ].forEach(function(test) {
+      it('thresholdMin: ' + JSON.stringify(test.thresholdMin), function () {
+        handler.set('thresholdMin', test.thresholdMin);
+        handler.set('thresholdMax', 98);
+        expect(handler.get('thresholdMinErrorMessage')).to.be.equal(test.e);
+      });
+    });
+
+  });
+
+  describe('#thresholdMaxErrorMessage', function () {
+    var minValue = 0;
+    var maxValue = 100;
+    var msg = Em.I18n.t('dashboard.widgets.error.invalid').format(minValue, maxValue);
+
+    beforeEach(function() {
+      handler.setProperties({
+        minValue: minValue,
+        maxValue: maxValue
+      });
+    });
+
+    [
+      {thresholdMax: -1, e: msg},
+      {thresholdMax: 101, e: msg},
+      {thresholdMax: 'abc', e: msg},
+      {thresholdMax: 60, e: ''}
+    ].forEach(function(test) {
+      it('thresholdMax: ' + JSON.stringify(test.thresholdMax), function () {
+        handler.set('thresholdMax', test.thresholdMax);
+        expect(handler.get('thresholdMaxErrorMessage')).to.be.equal(test.e);
+      });
+    });
+  });
+
+  describe('#preparedThresholds', function () {
+
+    it('mapped to array threshold values', function () {
+      handler.setProperties({
+        thresholdMin: 1,
+        thresholdMax: 2
+      });
+      expect(handler.get('preparedThresholds')).to.be.eql([1, 2]);
+    });
+
+  });
+
+});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/test/views/main/dashboard/widget_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widget_test.js b/ambari-web/test/views/main/dashboard/widget_test.js
index a504c6d..8e10cef 100644
--- a/ambari-web/test/views/main/dashboard/widget_test.js
+++ b/ambari-web/test/views/main/dashboard/widget_test.js
@@ -89,97 +89,6 @@ describe('App.DashboardWidgetView', function () {
     });
   });
 
-  describe('#widgetConfig', function () {
-    var widgetConfig;
-
-    beforeEach(function() {
-      widgetConfig = view.get('widgetConfig').create();
-    });
-
-    describe('#validateThreshold()', function () {
-
-      beforeEach(function () {
-        sinon.stub(widgetConfig, 'updateSlider');
-      });
-
-      afterEach(function () {
-        widgetConfig.updateSlider.restore();
-      });
-
-      it('updateSlider should be called', function () {
-        widgetConfig.validateThreshold('thresholdMin');
-        expect(widgetConfig.updateSlider).to.be.called;
-      });
-
-      it('thresholdMin is empty', function () {
-        widgetConfig.set('thresholdMin', '');
-        widgetConfig.validateThreshold('thresholdMin');
-        expect(widgetConfig.get('thresholdMinError')).to.be.true;
-        expect(widgetConfig.get('thresholdMinErrorMessage')).to.be.equal(Em.I18n.t('admin.users.editError.requiredField'));
-      });
-
-      it('thresholdMin is NaN', function () {
-        widgetConfig.set('thresholdMin', 'a');
-        widgetConfig.validateThreshold('thresholdMin');
-        expect(widgetConfig.get('thresholdMinError')).to.be.true;
-        expect(widgetConfig.get('thresholdMinErrorMessage')).to.be.equal(Em.I18n.t('dashboard.widgets.error.invalid').format(0));
-      });
-
-      it('thresholdMin bigger than maxValue', function () {
-        widgetConfig.set('thresholdMin', '1');
-        widgetConfig.validateThreshold('thresholdMin');
-        expect(widgetConfig.get('thresholdMinError')).to.be.true;
-        expect(widgetConfig.get('thresholdMinErrorMessage')).to.be.equal(Em.I18n.t('dashboard.widgets.error.invalid').format(0));
-      });
-
-      it('thresholdMin less than 0', function () {
-        widgetConfig.set('thresholdMin', '-1');
-        widgetConfig.validateThreshold('thresholdMin');
-        expect(widgetConfig.get('thresholdMinError')).to.be.true;
-        expect(widgetConfig.get('thresholdMinErrorMessage')).to.be.equal(Em.I18n.t('dashboard.widgets.error.invalid').format(0));
-      });
-
-      it('thresholdMin bigger than thresholdMax', function () {
-        widgetConfig.set('thresholdMin', '2');
-        widgetConfig.set('thresholdMax', '1');
-        widgetConfig.set('maxValue', 100);
-        widgetConfig.validateThreshold('thresholdMin');
-        expect(widgetConfig.get('thresholdMinError')).to.be.true;
-        expect(widgetConfig.get('thresholdMinErrorMessage')).to.be.equal(Em.I18n.t('dashboard.widgets.error.smaller'));
-      });
-    });
-
-    describe('#observeThreshMinValue()', function() {
-
-      beforeEach(function() {
-        sinon.stub(widgetConfig, 'validateThreshold');
-      });
-      afterEach(function() {
-        widgetConfig.validateThreshold.restore();
-      });
-
-      it('validateThreshold should be called', function() {
-        widgetConfig.observeThreshMinValue();
-        expect(widgetConfig.validateThreshold.calledWith('thresholdMin')).to.be.true;
-      });
-    });
-
-    describe('#observeThreshMaxValue()', function() {
-
-      beforeEach(function() {
-        sinon.stub(widgetConfig, 'validateThreshold');
-      });
-      afterEach(function() {
-        widgetConfig.validateThreshold.restore();
-      });
-
-      it('validateThreshold should be called', function() {
-        widgetConfig.observeThreshMaxValue();
-        expect(widgetConfig.validateThreshold.calledWith('thresholdMax')).to.be.true;
-      });
-    });
-  });
-
   describe('#didInsertElement()', function() {
 
     beforeEach(function() {
@@ -250,25 +159,6 @@ describe('App.DashboardWidgetView', function () {
   describe('#editWidget()', function() {
 
     beforeEach(function() {
-      sinon.stub(view, 'showEditDialog');
-    });
-
-    afterEach(function() {
-      view.showEditDialog.restore();
-    });
-
-    it('showEditDialog should be called', function() {
-      view.reopen({
-        widgetConfig: Em.Object.extend()
-      });
-      view.editWidget();
-      expect(view.showEditDialog).to.be.calledOnce;
-    });
-  });
-
-  describe('#showEditDialog()', function() {
-
-    beforeEach(function() {
       sinon.stub(App.ModalPopup, 'show');
     });
 
@@ -277,7 +167,7 @@ describe('App.DashboardWidgetView', function () {
     });
 
     it('App.ModalPopup.show should be called', function() {
-      view.showEditDialog();
+      view.editWidget();
       expect(App.ModalPopup.show).to.be.calledOnce;
     });
   });

http://git-wip-us.apache.org/repos/asf/ambari/blob/05c76ed6/ambari-web/test/views/main/dashboard/widgets_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets_test.js b/ambari-web/test/views/main/dashboard/widgets_test.js
index 5d1ddb2..4700ac8 100644
--- a/ambari-web/test/views/main/dashboard/widgets_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets_test.js
@@ -426,15 +426,7 @@ describe('App.MainDashboardWidgetsView', function () {
       });
 
       it('saveWidgetsSettings should be called', function() {
-        expect(plusButtonFilterView.get('parentView').saveWidgetsSettings.getCall(0).args[0]).to.be.eql({
-          visible: [2, 1],
-          hidden: [3],
-          threshold: {}
-        });
-      });
-
-      it('renderWidgets should be called', function() {
-        expect(plusButtonFilterView.get('parentView').renderWidgets).to.be.calledOnce;
+        expect(plusButtonFilterView.get('parentView').saveWidgetsSettings.calledOnce).to.be.true;
       });
     });
   });


[27/50] [abbrv] ambari git commit: AMBARI-20040. Workflow Manager workflow rendering is broken in designer page. (Belliraj HB via gauravn7)

Posted by nc...@apache.org.
AMBARI-20040. Workflow Manager workflow rendering is broken in designer page. (Belliraj HB via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fb322e27
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fb322e27
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fb322e27

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: fb322e2763382eb7aa6ed54fabb508482dcaebb0
Parents: fa52736
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 16 18:31:00 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 16 18:31:00 2017 +0530

----------------------------------------------------------------------
 .../ui/app/domain/cytoscape-flow-renderer.js    | 43 ++++++++++++++------
 .../resources/ui/app/domain/cytoscape-style.js  |  6 +++
 .../app/validators/duplicate-data-node-name.js  |  2 +-
 3 files changed, 38 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fb322e27/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
index 4194d5e..6f46fdc 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-flow-renderer.js
@@ -75,26 +75,28 @@ var CytoscapeRenderer= Ember.Object.extend({
       case 'end' :
       case 'kill' :
       case 'placeholder' :
-        return 'ellipse';
+      return 'ellipse';
       case 'action' :
-        return 'roundrectangle';
+      return 'roundrectangle';
       case 'fork' :
       case 'join' :
-        return 'roundrectangle';
+      return 'roundrectangle';
       case 'decision' :
-        return 'diamond';
+      return 'diamond';
       default :
-        return 'star';
+      return 'star';
     }
   },
 
   _getCyDataNodes(workflow){
     this.get('dataNodes').clear();
     var self=this;
+    var errorNodeCounter=1;
     workflow.nodeVisitor.process(workflow.startNode, function(node) {
       if (node.type === 'kill') {
         return;
       }
+
       self.get('dataNodes').pushObject({
         data: {
           id: node.id, name: node.name, type: node.type,
@@ -106,20 +108,37 @@ var CytoscapeRenderer= Ember.Object.extend({
       });
       if (node.transitions.length > 0) {
         node.transitions.forEach(function(transition){
-          if (transition.isOnError()|| transition.targetNode.isKillNode()){
+          //if (transition.isOnError()|| transition.targetNode.isKillNode()){
+          if ((transition.isOnError() && transition.getTargetNode().isKillNode())){
             return;
           }
+          var targetNodeId=transition.targetNode.id;
+          if (transition.targetNode.isKillNode()){
+            errorNodeCounter++;
+            var errorNode=transition.targetNode;
+            targetNodeId=errorNode.id+errorNodeCounter;
+            self.get('dataNodes').pushObject({
+              data: {
+                id: targetNodeId, name: errorNode.name, type: errorNode.type,
+                shape: self._getShape(errorNode.type),
+                type : errorNode.type,
+                node: errorNode
+              },
+              dataNodeName: Ember.computed.alias('errorNode.node.name')
+            });
+          }
           self.get('dataNodes').pushObject(
             {
               data: {
-                id: transition.sourceNodeId + '_to_' + transition.targetNode.id,
+                id: transition.sourceNodeId + '_to_' + targetNodeId,
                 source:transition.sourceNodeId,
-                target: transition.targetNode.id,
+                target: targetNodeId,
                 transition: transition,
                 transitionCount: node.getOkTransitionCount()
               }
             }
           );
+
         });
       }
     });
@@ -192,7 +211,7 @@ var CytoscapeRenderer= Ember.Object.extend({
       var node = event.cyTarget;
       var nodeObj = cy.$('#' + node.id());
       this._showNodeEditor(node, nodeObj);
-      if (!(node.data().type === 'start' || node.data().type === 'end' || node.data().type === 'placeholder')) {
+      if (!(node.data().type === 'start' || node.data().type === 'end' || node.data().type === 'placeholder' ||  node.data().type === 'kill')) {
         this.get("context").$(".overlay-node-actions, .overlay-trash-icon").show();
       }
       if (node.data().type === 'action' || node.data().type === 'decision') {
@@ -226,9 +245,9 @@ var CytoscapeRenderer= Ember.Object.extend({
         left: event.originalEvent.offsetX + 15
       });
       if (event.cyTarget.data().transitionCount>1){
-            this.get("context").$(".overlay-trash-transition-icon").show();
+        this.get("context").$(".overlay-trash-transition-icon").show();
       }else{
-          this.get("context").$(".overlay-trash-transition-icon").hide();
+        this.get("context").$(".overlay-trash-transition-icon").hide();
       }
       this.get("context").$(".overlay-transition-content").data("sourceNode",event.cyTarget.source().data("node"));
       this.get("context").$(".overlay-transition-content").data("targetNode",event.cyTarget.target().data("node"));
@@ -276,7 +295,7 @@ var CytoscapeRenderer= Ember.Object.extend({
           if (incomingTran.targetNode.id===currentNodeId){
             incomingTran.sourceNode=incomingNode;
             transitionList=transitionList.concat(incomingTran);
-           }
+          }
         }
       }
       this.get("context").deleteWorkflowNode(this.get("context").$(".overlay-trash-icon").data("node"),transitionList);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb322e27/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
index 5a074df..2eb01d1 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/cytoscape-style.js
@@ -83,6 +83,12 @@ export default Ember.Object.create({
       }
     },
     {
+      selector: 'node[type = "kill"]',
+      style: {
+        'color': '#a52a2a'
+      }
+    },
+    {
       selector: 'node[type = "placeholder"]',
       style: {
         width: 1,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb322e27/contrib/views/wfmanager/src/main/resources/ui/app/validators/duplicate-data-node-name.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/validators/duplicate-data-node-name.js b/contrib/views/wfmanager/src/main/resources/ui/app/validators/duplicate-data-node-name.js
index 5282544..86e5182 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/validators/duplicate-data-node-name.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/validators/duplicate-data-node-name.js
@@ -25,7 +25,7 @@ const DuplicateDataNodeName = BaseValidator.extend({
       model.get('dataNodes').forEach((item)=>{
         if (item.data.node && item.data.node.name) {
           Ember.set(item.data.node, "errors", false);
-          if(nodeNames.get(item.data.node.name)){
+          if(nodeNames.get(item.data.node.name) && item.data.node.type!=='kill'){
             Ember.set(item.data.node, "errors", true);
             model.get("validationErrors").pushObject({node:item.data,message:"Node name should be unique"});
           }else{


[09/50] [abbrv] ambari git commit: AMBARI-20018. Document security issue related to setting security.agent.hostname.validate to false (rlevas)

Posted by nc...@apache.org.
AMBARI-20018. Document security issue related to setting security.agent.hostname.validate to false (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/45842645
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/45842645
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/45842645

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 45842645c546a176f1692d0d7be008e2d51c5086
Parents: a1f23ad
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Feb 15 11:20:03 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Feb 15 11:20:03 2017 -0500

----------------------------------------------------------------------
 ambari-server/docs/configuration/index.md                          | 2 +-
 .../java/org/apache/ambari/server/configuration/Configuration.java | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/45842645/ambari-server/docs/configuration/index.md
----------------------------------------------------------------------
diff --git a/ambari-server/docs/configuration/index.md b/ambari-server/docs/configuration/index.md
index 50864f2..ae2d549 100644
--- a/ambari-server/docs/configuration/index.md
+++ b/ambari-server/docs/configuration/index.md
@@ -172,7 +172,7 @@ The following are the properties which can be used to configure Ambari.
 | repo.validation.suffixes.ubuntu | The suffixes to use when validating Ubuntu repositories. |`/dists/%s/Release` | 
 | resources.dir | The location on the Ambari Server where all resources exist, including common services, stacks, and scripts. |`/var/lib/ambari-server/resources/` | 
 | rolling.upgrade.skip.packages.prefixes | A comma-separated list of packages which will be skipped during a stack upgrade. | | 
-| security.agent.hostname.validate | Determines whether the Ambari Agent host names should be validated against a regular expression to ensure that they are well-formed. |`true` | 
+| security.agent.hostname.validate | Determines whether the Ambari Agent host names should be validated against a regular expression to ensure that they are well-formed.<br><br>WARNING: By setting this value to false, host names will not be validated, allowing a possible security vulnerability as described in CVE-2014-3582. See https://cwiki.apache.org/confluence/display/AMBARI/Ambari+Vulnerabilities for more information.|`true` | 
 | security.master.key.location | The location on the Ambari Server of the master key file. This is the key to the master keystore. | | 
 | security.master.keystore.location | The location on the Ambari Server of the master keystore file. | | 
 | security.server.cert_name | The name of the file located in the `security.server.keys_dir` directory where certificates will be generated when Ambari uses the `openssl ca` command. |`ca.crt` | 

http://git-wip-us.apache.org/repos/asf/ambari/blob/45842645/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 5020790..e1df5bd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -510,7 +510,7 @@ public class Configuration {
    * Determines whether the Ambari Agent host names should be validated against
    * a regular expression to ensure that they are well-formed.
    */
-  @Markdown(description = "Determines whether the Ambari Agent host names should be validated against a regular expression to ensure that they are well-formed.")
+  @Markdown(description = "Determines whether the Ambari Agent host names should be validated against a regular expression to ensure that they are well-formed.<br><br>WARNING: By setting this value to false, host names will not be validated, allowing a possible security vulnerability as described in CVE-2014-3582. See https://cwiki.apache.org/confluence/display/AMBARI/Ambari+Vulnerabilities for more information.")
   public static final ConfigurationProperty<String> SRVR_AGENT_HOSTNAME_VALIDATE = new ConfigurationProperty<>(
       "security.agent.hostname.validate", "true");
 


[11/50] [abbrv] ambari git commit: AMBARI-20026 Ambari server start returns prematurely before extracting views. (dsen)

Posted by nc...@apache.org.
AMBARI-20026 Ambari server start returns prematurely before extracting views. (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e890f01d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e890f01d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e890f01d

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: e890f01d0f562b81489c44fea9596f3455d8d9bc
Parents: d75756e
Author: Dmytro Sen <ds...@apache.org>
Authored: Wed Feb 15 19:05:00 2017 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Wed Feb 15 19:05:00 2017 +0200

----------------------------------------------------------------------
 ambari-server/src/main/python/ambari_server_main.py | 5 -----
 1 file changed, 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e890f01d/ambari-server/src/main/python/ambari_server_main.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server_main.py b/ambari-server/src/main/python/ambari_server_main.py
index 7a21333..4667899 100644
--- a/ambari-server/src/main/python/ambari_server_main.py
+++ b/ambari-server/src/main/python/ambari_server_main.py
@@ -217,15 +217,10 @@ def wait_for_server_start(pidFile, scmStatus):
   # looking_for_pid() might return partrial pid list on slow hardware
   for i in range(1, SERVER_START_RETRIES):
     pids = looking_for_pid(SERVER_SEARCH_PATTERN, SERVER_START_TIMEOUT)
-
-    sys.stdout.write('\n')
-    sys.stdout.flush()
-
     if save_main_pid_ex(pids, pidFile, locate_all_file_paths('sh', '/bin') +
                         locate_all_file_paths('bash', '/bin') +
                         locate_all_file_paths('dash', '/bin'), IS_FOREGROUND):
       server_started = True
-      sys.stdout.write("Server PID determined " + AMBARI_SERVER_STARTED_SUCCESS_MSG + "\n")
       break
     else:
       sys.stdout.write("Unable to determine server PID. Retrying...\n")


[42/50] [abbrv] ambari git commit: AMBARI-20044. NullPointerException after server restart in case not all hosts were joined (magyari_sandor)

Posted by nc...@apache.org.
AMBARI-20044. NullPointerException after server restart in case not all hosts were joined (magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bcf72893
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bcf72893
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bcf72893

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: bcf7289331765c6f67b99fd08a2674c90fa53589
Parents: c8c134e
Author: Sandor Magyari <sm...@hortonworks.com>
Authored: Thu Feb 16 16:22:59 2017 +0100
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Fri Feb 17 17:35:42 2017 +0100

----------------------------------------------------------------------
 .../main/java/org/apache/ambari/server/topology/HostRequest.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bcf72893/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java
index a18999b..a6f677a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java
@@ -313,7 +313,7 @@ public class HostRequest implements Comparable<HostRequest> {
     for (HostRoleCommand logicalTask : logicalTasks.values()) {
       // set host on command detail if it is set to null
       String commandDetail = logicalTask.getCommandDetail();
-      if (commandDetail != null && commandDetail.contains("null")) {
+      if (commandDetail != null && commandDetail.contains("null") && hostname != null) {
         logicalTask.setCommandDetail(commandDetail.replace("null", hostname));
       }
       Long physicalTaskId = physicalTasks.get(logicalTask.getTaskId());


[34/50] [abbrv] ambari git commit: AMBARI-20013. Add Solr authorization settings during LogSearch/Atlas/Ranger startup (oleewere)

Posted by nc...@apache.org.
AMBARI-20013. Add Solr authorization settings during LogSearch/Atlas/Ranger startup (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/347ba2a9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/347ba2a9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/347ba2a9

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 347ba2a9983d400cddf4d888e7f8c15d72b71d5a
Parents: bfaaba2
Author: oleewere <ol...@gmail.com>
Authored: Mon Feb 13 18:34:50 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Thu Feb 16 20:18:59 2017 +0100

----------------------------------------------------------------------
 .../libraries/functions/solr_cloud_util.py      | 110 ++++++++++++++++++-
 .../configuration/infra-solr-security-json.xml  |  82 +++++++++++---
 .../0.1.0/package/scripts/params.py             |   9 +-
 .../0.1.0/package/scripts/setup_infra_solr.py   |  17 ++-
 .../templates/infra-solr-security.json.j2       |  68 ++++++++++++
 .../properties/infra-solr-security.json.j2      |  68 ------------
 .../ATLAS/0.1.0.2.3/package/scripts/metadata.py |  20 ++++
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |   3 +
 .../ATLAS/0.7.0.2.5/kerberos.json               |   3 +
 .../LOGSEARCH/0.5.0/kerberos.json               |  39 ++++---
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |   5 +
 .../0.5.0/package/scripts/setup_logsearch.py    |  22 +++-
 .../RANGER/0.4.0/package/scripts/params.py      |   3 +
 .../0.4.0/package/scripts/setup_ranger_xml.py   |  41 +++++++
 .../common-services/RANGER/0.6.0/kerberos.json  |   3 +
 .../stacks/2.3/ATLAS/test_metadata_server.py    |   8 ++
 .../test/python/stacks/2.3/configs/secure.json  |   7 +-
 .../stacks/2.4/AMBARI_INFRA/test_infra_solr.py  |   4 +-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |   3 +-
 .../stacks/2.5/RANGER/test_ranger_admin.py      |  11 ++
 .../stacks/2.6/RANGER/test_ranger_admin.py      |   9 ++
 21 files changed, 418 insertions(+), 117 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
index 4628211..1eeb86b 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
@@ -17,12 +17,17 @@ limitations under the License.
 
 """
 import random
+import json
+from random import randrange
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_jinja2 import Environment as JinjaEnvironment
+from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.core.resources.system import Directory, Execute, File
 from resource_management.core.source import StaticFile
+from resource_management.core.shell import as_sudo
+from resource_management.core.logger import Logger
 
 __all__ = ["upload_configuration_to_zk", "create_collection", "setup_kerberos", "set_cluster_prop",
            "setup_kerberos_plugin", "create_znode", "check_znode", "secure_solr_znode", "secure_znode"]
@@ -163,13 +168,16 @@ def set_cluster_prop(zookeeper_quorum, solr_znode, prop_name, prop_value, java64
     set_cluster_prop_cmd+=format(' --jaas-file {jaas_file}')
   Execute(set_cluster_prop_cmd)
 
-def secure_znode(zookeeper_quorum, solr_znode, jaas_file, java64_home, sasl_users=[]):
+def secure_znode(config, zookeeper_quorum, solr_znode, jaas_file, java64_home, sasl_users=[], retry = 5 , interval = 10):
   """
-  Secure znode, set a list of sasl users acl to 'cdrwa', and set acl to 'r' only for the world. 
+  Secure znode, set a list of sasl users acl to 'cdrwa', and set acl to 'r' only for the world.
+  Add infra-solr user by default if its available.
   """
   solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
-  sasl_users_str = ",".join(str(x) for x in sasl_users)
-  secure_znode_cmd = format('{solr_cli_prefix} --secure-znode --jaas-file {jaas_file} --sasl-users {sasl_users_str}')
+  if "infra-solr-env" in config['configurations']:
+    sasl_users.append(__get_name_from_principal(config['configurations']['infra-solr-env']['infra_solr_kerberos_principal']))
+  sasl_users_str = ",".join(str(__get_name_from_principal(x)) for x in sasl_users)
+  secure_znode_cmd = format('{solr_cli_prefix} --secure-znode --jaas-file {jaas_file} --sasl-users {sasl_users_str} --retry {retry} --interval {interval}')
   Execute(secure_znode_cmd)
 
 
@@ -243,3 +251,97 @@ def setup_solr_client(config, custom_log4j = True, custom_log_location = None, l
          mode=0664,
          content=''
          )
+
+def __get_name_from_principal(principal):
+  if not principal:  # return if empty
+    return principal
+  slash_split = principal.split('/')
+  if len(slash_split) == 2:
+    return slash_split[0]
+  else:
+    at_split = principal.split('@')
+    return at_split[0]
+
+def __remove_host_from_principal(principal, realm):
+  if not realm:
+    raise Exception("Realm parameter is missing.")
+  if not principal:
+    raise Exception("Principal parameter is missing.")
+  username=__get_name_from_principal(principal)
+  at_split = principal.split('@')
+  if len(at_split) == 2:
+    realm = at_split[1]
+  return format('{username}@{realm}')
+
+def __get_random_solr_host(actual_host, solr_hosts = []):
+  """
+  Get a random solr host, use the actual one, if there is an installed infra solr there (helps blueprint installs)
+  If there is only one solr host on the cluster, use that.
+  """
+  if not solr_hosts:
+    raise Exception("Solr hosts parameter is empty.")
+  if len(solr_hosts) == 1:
+    return solr_hosts[0]
+  if actual_host in solr_hosts:
+    return actual_host
+  else:
+    random_index = randrange(0, len(solr_hosts))
+    return solr_hosts[random_index]
+
+def add_solr_roles(config, roles = [], new_service_principals = [], tries = 30, try_sleep = 10):
+  """
+  Set user-role mappings based on roles and principal users for secured cluster. Use solr REST API to check is there any authoirzation enabled,
+  if it is then update the user-roles mapping for Solr (this will upgrade the solr_znode/security.json file).
+  In case of custom security.json is used for infra-solr, this step will be skipped.
+  """
+  sudo = AMBARI_SUDO_BINARY
+  solr_hosts = default_config(config, "/clusterHostInfo/infra_solr_hosts", [])
+  security_enabled = config['configurations']['cluster-env']['security_enabled']
+  solr_ssl_enabled = default_config(config, 'configurations/infra-solr-env/infra_solr_ssl_enabled', False)
+  solr_port = default_config(config, 'configurations/infra-solr-env/infra_solr_port', '8886')
+  kinit_path_local = get_kinit_path(default_config(config, '/configurations/kerberos-env/executable_search_paths', None))
+  infra_solr_custom_security_json_content = None
+
+  if 'infra-solr-security-json' in config['configurations']:
+    infra_solr_custom_security_json_content = config['configurations']['infra-solr-security-json']['content']
+
+  Logger.info(format("Adding {roles} roles to {new_service_principals} if infra-solr is installed."))
+  if infra_solr_custom_security_json_content and str(infra_solr_custom_security_json_content).strip():
+    Logger.info("Custom security.json is not empty for infra-solr, skip adding roles...")
+  elif security_enabled \
+    and "infra-solr-env" in config['configurations'] \
+    and solr_hosts is not None \
+    and len(solr_hosts) > 0:
+    solr_protocol = "https" if solr_ssl_enabled else "http"
+    hostname = config['hostname'].lower()
+    solr_host = __get_random_solr_host(hostname, solr_hosts)
+    solr_url = format("{solr_protocol}://{solr_host}:{solr_port}/solr/admin/authorization")
+    solr_user_keytab = config['configurations']['infra-solr-env']['infra_solr_kerberos_keytab']
+    solr_user_principal = config['configurations']['infra-solr-env']['infra_solr_kerberos_principal'].replace('_HOST', hostname)
+    solr_user_kinit_cmd = format("{kinit_path_local} -kt {solr_user_keytab} {solr_user_principal};")
+    solr_authorization_enabled_cmd=format("{sudo} {solr_user_kinit_cmd} {sudo} curl -k -s --negotiate -u : {solr_protocol}://{solr_host}:{solr_port}/solr/admin/authorization | grep authorization.enabled")
+
+    if len(new_service_principals) > 0:
+      new_service_users = []
+
+      kerberos_realm = config['configurations']['kerberos-env']['realm']
+      for new_service_user in new_service_principals:
+        new_service_users.append(__remove_host_from_principal(new_service_user, kerberos_realm))
+      user_role_map = {}
+
+      for new_service_user in new_service_users:
+        user_role_map[new_service_user] = roles
+
+      Logger.info(format("New service users after removing fully qualified names: {new_service_users}"))
+
+      set_user_role_map = {}
+      set_user_role_map['set-user-role'] = user_role_map
+      set_user_role_json = json.dumps(set_user_role_map)
+
+      add_solr_role_cmd = format("{sudo} {solr_user_kinit_cmd} {sudo} curl -H 'Content-type:application/json' -d '{set_user_role_json}' -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k {solr_url} | grep 200")
+
+      Logger.info(format("Check authorization enabled command: {solr_authorization_enabled_cmd} \nSet user-role settings command: {add_solr_role_cmd}"))
+      Execute(solr_authorization_enabled_cmd + " && "+ add_solr_role_cmd,
+              tries=tries,
+              try_sleep=try_sleep,
+              logoutput=True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml
index e193a8c..e99d961 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml
@@ -26,9 +26,12 @@
     <display-name>Ranger audit service users</display-name>
     <value>{default_ranger_audit_users}</value>
     <description>
-      List of comma separated kerberos service users who can write into ranger audit collections if the cluster is secure. (atlas and rangeradmin supported by default)
-      Change values in that case of custom values are used for kerberos principals. (default_ranger_audit_users is resolved ranger-*-audit/xasecure.audit.jaas.Client.option.principal,
-      by default namenode, hbase, hive knox, kafka, ranger kms and nifi are supported, to change it you can edit the security content,
+      List of comma separated kerberos service users who can write into ranger audit collections if the cluster is
+      secure. (atlas and rangeradmin supported by default)
+      Change values in that case of custom values are used for kerberos principals. (default_ranger_audit_users is
+      resolved ranger-*-audit/xasecure.audit.jaas.Client.option.principal,
+      by default namenode, hbase, hive knox, kafka, ranger kms and nifi are supported, to change it you can edit the
+      security content,
       or add a new username next to the default value, e.g.: {default_ranger_audit_users},customuser)
     </description>
     <depends-on>
@@ -68,20 +71,6 @@
         <type>ranger-nifi-audit</type>
         <name>xasecure.audit.jaas.Client.option.principal</name>
       </property>
-    </depends-on>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>security.json template</display-name>
-    <description>This is the jinja template for security.json file on the solr znode (only used if the cluster is secure)</description>
-    <value/>
-    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
-    <value-attributes>
-      <property-file-name>infra-solr-security.json.j2</property-file-name>
-      <property-file-type>text</property-file-type>
-    </value-attributes>
-    <depends-on>
       <property>
         <type>application-properties</type>
         <name>atlas.authentication.principal</name>
@@ -93,4 +82,63 @@
     </depends-on>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>infra_solr_role_ranger_admin</name>
+    <display-name>Ranger admin role</display-name>
+    <value>ranger_admin_user</value>
+    <description>Ranger admin role, it allows users to create collection, and perform any action on ranger audit collection.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_role_ranger_audit</name>
+    <display-name>Ranger audit role</display-name>
+    <value>ranger_audit_user</value>
+    <description>Ranger audit role, it allows users to perform any action on ranger audit collection.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_role_atlas</name>
+    <display-name>Atlas role</display-name>
+    <value>atlas_user</value>
+    <description>Atlas role, it allows users to create collection, and perform any action on atlas collections.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_role_logsearch</name>
+    <display-name>Log Search role</display-name>
+    <value>logsearch_user</value>
+    <description>Log Search role, it allows users to create collection, and perform any action on Log Search collections.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_role_logfeeder</name>
+    <display-name>Log Feeder role</display-name>
+    <value>logfeeder_user</value>
+    <description>Log Feeder role, it allows users to perform any action on Log Search collections.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>infra_solr_role_dev</name>
+    <display-name>Dev role</display-name>
+    <value>dev</value>
+    <description>Dev role, it allows to perform any read action on any collection.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Custom security.json template</display-name>
+    <description>
+      This is the jinja template for custom security.json file on the solr znode
+      (only used if the cluster is secure and this property overrides the security.json which generated during solr
+      start).
+    </description>
+    <value>
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
 </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
index ab9aa61..acf420e 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
@@ -129,7 +129,7 @@ if security_enabled:
   ranger_audit_principals.append(default('configurations/ranger-hive-audit/' + ranger_audit_principal_conf_key, 'hive'))
   ranger_audit_principals.append(default('configurations/ranger-knox-audit/' + ranger_audit_principal_conf_key, 'knox'))
   ranger_audit_principals.append(default('configurations/ranger-kafka-audit/' + ranger_audit_principal_conf_key, 'kafka'))
-  ranger_audit_principals.append(default('configurations/ranger-kms-audit/' + ranger_audit_principal_conf_key, 'kms'))
+  ranger_audit_principals.append(default('configurations/ranger-kms-audit/' + ranger_audit_principal_conf_key, 'rangerkms'))
   ranger_audit_principals.append(default('configurations/ranger-storm-audit/' + ranger_audit_principal_conf_key, 'storm'))
   ranger_audit_principals.append(default('configurations/ranger-yarn-audit/' + ranger_audit_principal_conf_key, 'yarn'))
   ranger_audit_principals.append(default('configurations/ranger-nifi-audit/' + ranger_audit_principal_conf_key, 'nifi'))
@@ -160,3 +160,10 @@ logsearch_kerberos_service_user = get_name_from_principal(default('configuration
 logfeeder_kerberos_service_user = get_name_from_principal(default('configurations/logfeeder-env/logfeeder_kerberos_principal', 'logfeeder'))
 infra_solr_kerberos_service_user = get_name_from_principal(default('configurations/infra-solr-env/infra_solr_kerberos_principal', 'infra-solr'))
 
+infra_solr_role_ranger_admin = default('configurations/infra-solr-security-json/infra_solr_role_ranger_admin', 'ranger_user')
+infra_solr_role_ranger_audit = default('configurations/infra-solr-security-json/infra_solr_role_ranger_audit', 'ranger_audit_user')
+infra_solr_role_atlas = default('configurations/infra-solr-security-json/infra_solr_role_atlas', 'atlas_user')
+infra_solr_role_logsearch = default('configurations/infra-solr-security-json/infra_solr_role_logsearch', 'logsearch_user')
+infra_solr_role_logfeeder = default('configurations/infra-solr-security-json/infra_solr_role_logfeeder', 'logfeeder_user')
+infra_solr_role_dev = default('configurations/infra-solr-security-json/infra_solr_role_dev', 'dev')
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
index 8d72f42..f3dbcf3 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
@@ -72,13 +72,12 @@ def setup_infra_solr(name = None):
          group=params.user_group
          )
 
-    security_json_file_location = format("{infra_solr_conf}/security.json")
-
-    File(security_json_file_location,
+    custom_security_json_location = format("{infra_solr_conf}/custom-security.json")
+    File(custom_security_json_location,
          content=InlineTemplate(params.infra_solr_security_json_content),
          owner=params.infra_solr_user,
          group=params.user_group,
-         mode=0644
+         mode=0640
          )
 
     jaas_file = params.infra_solr_jaas_file if params.security_enabled else None
@@ -86,11 +85,21 @@ def setup_infra_solr(name = None):
 
     create_ambari_solr_znode()
 
+    security_json_file_location = custom_security_json_location \
+      if params.infra_solr_security_json_content and str(params.infra_solr_security_json_content).strip() \
+      else format("{infra_solr_conf}/security.json") # security.json file to upload
+
     if params.security_enabled:
       File(format("{infra_solr_jaas_file}"),
            content=Template("infra_solr_jaas.conf.j2"),
            owner=params.infra_solr_user)
 
+      File(format("{infra_solr_conf}/security.json"),
+           content=Template("infra-solr-security.json.j2"),
+           owner=params.infra_solr_user,
+           group=params.user_group,
+           mode=0640)
+
     solr_cloud_util.set_cluster_prop(
       zookeeper_quorum=params.zookeeper_quorum,
       solr_znode=params.infra_solr_znode,

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/infra-solr-security.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/infra-solr-security.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/infra-solr-security.json.j2
new file mode 100644
index 0000000..65d38e9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/infra-solr-security.json.j2
@@ -0,0 +1,68 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+{
+  "authentication": {
+    "class": "org.apache.solr.security.KerberosPlugin"
+  },
+  "authorization": {
+    "class": "org.apache.ambari.infra.security.InfraRuleBasedAuthorizationPlugin",
+    "user-role": {
+      "{{infra_solr_kerberos_service_user}}@{{kerberos_realm}}": "admin",
+      "{{logsearch_kerberos_service_user}}@{{kerberos_realm}}": ["{{infra_solr_role_logsearch}}", "{{infra_solr_role_ranger_admin}}", "{{infra_solr_role_dev}}"],
+      "{{logfeeder_kerberos_service_user}}@{{kerberos_realm}}": ["{{infra_solr_role_logfeeder}}", "{{infra_solr_role_dev}}"],
+      "{{atlas_kerberos_service_user}}@{{kerberos_realm}}": ["{{infra_solr_role_atlas}}", "{{infra_solr_role_ranger_audit}}", "{{infra_solr_role_dev}}"],
+{% if infra_solr_ranger_audit_service_users %}
+{%   for ranger_audit_service_user in infra_solr_ranger_audit_service_users %}
+      "{{ranger_audit_service_user}}@{{kerberos_realm}}": ["{{infra_solr_role_ranger_audit}}", "{{infra_solr_role_dev}}"],
+{%   endfor %}
+{% endif %}
+      "{{ranger_admin_kerberos_service_user}}@{{kerberos_realm}}": ["{{infra_solr_role_ranger_admin}}", "{{infra_solr_role_ranger_audit}}", "{{infra_solr_role_dev}}"]
+    },
+    "permissions": [
+    {
+      "name" : "collection-admin-read",
+      "role" :null
+    },
+    {
+      "name" : "collection-admin-edit",
+      "role" : ["admin", "{{infra_solr_role_logsearch}}", "{{infra_solr_role_logfeeder}}", "{{infra_solr_role_atlas}}", "{{infra_solr_role_ranger_admin}}"]
+    },
+    {
+      "name":"read",
+      "role": "{{infra_solr_role_dev}}"
+    },
+    {
+      "collection": ["{{logsearch_service_logs_collection}}", "{{logsearch_audit_logs_collection}}", "history"],
+      "role": ["admin", "{{infra_solr_role_logsearch}}", "{{infra_solr_role_logfeeder}}"],
+      "name": "logsearch-manager",
+      "path": "/*"
+    },
+    {
+       "collection": ["vertex_index", "edge_index", "fulltext_index"],
+       "role": ["admin", "{{infra_solr_role_atlas}}"],
+       "name": "atlas-manager",
+       "path": "/*"
+    },
+    {
+       "collection": "{{ranger_solr_collection_name}}",
+       "role": ["admin", "{{infra_solr_role_ranger_admin}}", "{{infra_solr_role_ranger_audit}}"],
+       "name": "ranger-manager",
+       "path": "/*"
+    }]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2
deleted file mode 100644
index ed764f0..0000000
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2
+++ /dev/null
@@ -1,68 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-{
-  "authentication": {
-    "class": "org.apache.solr.security.KerberosPlugin"
-  },
-  "authorization": {
-    "class": "org.apache.ambari.infra.security.InfraRuleBasedAuthorizationPlugin",
-    "user-role": {
-      "{{infra_solr_kerberos_service_user}}@{{kerberos_realm}}": "admin",
-      "{{logsearch_kerberos_service_user}}@{{kerberos_realm}}": ["logsearch_user", "ranger_user", "dev"],
-      "{{logfeeder_kerberos_service_user}}@{{kerberos_realm}}": ["logfeeder_user", "dev"],
-      "{{atlas_kerberos_service_user}}@{{kerberos_realm}}": ["atlas_user", "ranger_audit_user", "dev"],
-{% if infra_solr_ranger_audit_service_users %}
-{%   for ranger_audit_service_user in infra_solr_ranger_audit_service_users %}
-      "{{ranger_audit_service_user}}@{{kerberos_realm}}": ["ranger_audit_user", "dev"],
-{%   endfor %}
-{% endif %}
-      "{{ranger_admin_kerberos_service_user}}@{{kerberos_realm}}": ["ranger_user", "ranger_audit_user", "dev"]
-    },
-    "permissions": [
-    {
-      "name" : "collection-admin-read",
-      "role" :null
-    },
-    {
-      "name" : "collection-admin-edit",
-      "role" : ["admin", "logsearch_user", "logfeeder_user", "atlas_user", "ranger_user"]
-    },
-    {
-      "name":"read",
-      "role": "dev"
-    },
-    {
-      "collection": ["{{logsearch_service_logs_collection}}", "{{logsearch_audit_logs_collection}}", "history"],
-      "role": ["admin", "logsearch_user", "logfeeder_user"],
-      "name": "logsearch-manager",
-      "path": "/*"
-    },
-    {
-       "collection": ["vertex_index", "edge_index", "fulltext_index"],
-       "role": ["admin", "atlas_user"],
-       "name": "atlas-manager",
-       "path": "/*"
-    },
-    {
-       "collection": "{{ranger_solr_collection_name}}",
-       "role": ["admin", "ranger_user", "ranger_audit_user"],
-       "name": "ranger-manager",
-       "path": "/*"
-    }]
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py
index 2232bb2..c25445c 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata.py
@@ -134,10 +134,21 @@ def metadata(type='server'):
       jaasFile=params.atlas_jaas_file if params.security_enabled else None
       upload_conf_set('atlas_configs', jaasFile)
 
+      if params.security_enabled: # update permissions before creating the collections
+        solr_cloud_util.add_solr_roles(params.config,
+                                       roles = [params.infra_solr_role_atlas, params.infra_solr_role_ranger_audit, params.infra_solr_role_dev],
+                                       new_service_principals = [params.atlas_jaas_principal])
+
       create_collection('vertex_index', 'atlas_configs', jaasFile)
       create_collection('edge_index', 'atlas_configs', jaasFile)
       create_collection('fulltext_index', 'atlas_configs', jaasFile)
 
+      if params.security_enabled:
+        secure_znode(format('{infra_solr_znode}/configs/atlas_configs'), jaasFile)
+        secure_znode(format('{infra_solr_znode}/collections/vertex_index'), jaasFile)
+        secure_znode(format('{infra_solr_znode}/collections/edge_index'), jaasFile)
+        secure_znode(format('{infra_solr_znode}/collections/fulltext_index'), jaasFile)
+
     File(params.atlas_hbase_setup,
          group=params.user_group,
          owner=params.hbase_user,
@@ -204,6 +215,15 @@ def create_collection(collection, config_set, jaasFile):
       shards=params.atlas_solr_shards,
       replication_factor = params.infra_solr_replication_factor)
 
+def secure_znode(znode, jaasFile):
+  import params
+  solr_cloud_util.secure_znode(config=params.config, zookeeper_quorum=params.zookeeper_quorum,
+                               solr_znode=znode,
+                               jaas_file=jaasFile,
+                               java64_home=params.java64_home, sasl_users=[params.atlas_jaas_principal])
+
+
+
 @retry(times=10, sleep_time=5, err_class=Fail)
 def check_znode():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 682fc9f..e270733 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -205,6 +205,9 @@ infra_solr_hosts = default("/clusterHostInfo/infra_solr_hosts", [])
 infra_solr_replication_factor = 2 if len(infra_solr_hosts) > 1 else 1
 atlas_solr_shards = default("/configurations/atlas-env/atlas_solr-shards", 1)
 has_infra_solr = len(infra_solr_hosts) > 0
+infra_solr_role_atlas = default('configurations/infra-solr-security-json/infra_solr_role_atlas', 'atlas_user')
+infra_solr_role_dev = default('configurations/infra-solr-security-json/infra_solr_role_dev', 'dev')
+infra_solr_role_ranger_audit = default('configurations/infra-solr-security-json/infra_solr_role_ranger_audit', 'ranger_audit_user')
 
 # zookeeper
 zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts']

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
index bc8e351..d024146 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
@@ -87,6 +87,9 @@
             },
             {
               "name": "/KAFKA/KAFKA_BROKER/kafka_broker"
+            },
+            {
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr"
             }
           ]
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
index 49d1b10..60c8afb 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
@@ -11,26 +11,29 @@
         {
           "name": "LOGSEARCH_SERVER",
           "identities": [
-          {
-            "name": "logsearch",
-            "principal": {
-              "value": "logsearch/_HOST@${realm}",
-              "type": "service",
-              "configuration": "logsearch-env/logsearch_kerberos_principal"
-            },
-            "keytab": {
-              "file": "${keytab_dir}/logsearch.service.keytab",
-              "owner": {
-                "name": "${logsearch-env/logsearch_user}",
-                "access": "r"
-              },
-              "group": {
-                "name": "${cluster-env/user_group}",
-                "access": ""
+            {
+              "name": "logsearch",
+              "principal": {
+                "value": "logsearch/_HOST@${realm}",
+                "type": "service",
+                "configuration": "logsearch-env/logsearch_kerberos_principal"
               },
-              "configuration": "logsearch-env/logsearch_kerberos_keytab"
+              "keytab": {
+                "file": "${keytab_dir}/logsearch.service.keytab",
+                "owner": {
+                  "name": "${logsearch-env/logsearch_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "logsearch-env/logsearch_kerberos_keytab"
+              }
+            },
+            {
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr"
             }
-          }
           ]
         },
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index fecd802..a023f2f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -106,6 +106,11 @@ if 'infra-solr-env' in config['configurations']:
   infra_solr_ssl_enabled = default('configurations/infra-solr-env/infra_solr_ssl_enabled', False)
   infra_solr_jmx_port = config['configurations']['infra-solr-env']['infra_solr_jmx_port']
 
+infra_solr_role_logsearch = default('configurations/infra-solr-security-json/infra_solr_role_logsearch', 'logsearch_user')
+infra_solr_role_logfeeder = default('configurations/infra-solr-security-json/infra_solr_role_logfeeder', 'logfeeder_user')
+infra_solr_role_dev = default('configurations/infra-solr-security-json/infra_solr_role_dev', 'dev')
+infra_solr_role_ranger_admin = default('configurations/infra-solr-security-json/infra_solr_role_ranger_admin', 'ranger_user')
+
 _hostname_lowercase = config['hostname'].lower()
 if security_enabled:
   kinit_path_local = status_params.kinit_path_local

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
index ba91e20..f96bfd0 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
@@ -17,9 +17,12 @@ limitations under the License.
 
 """
 
+from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Directory, Execute, File
 from resource_management.libraries.functions.format import format
 from resource_management.core.source import InlineTemplate, Template
+from resource_management.libraries.functions import solr_cloud_util
+from resource_management.libraries.functions.decorator import retry
 from resource_management.libraries.resources.properties_file import PropertiesFile
 from resource_management.libraries.functions.security_commons import update_credential_provider_path, HADOOP_CREDENTIAL_PROVIDER_PROPERTY_NAME
 
@@ -110,7 +113,24 @@ def setup_logsearch():
          content=Template("logsearch_jaas.conf.j2"),
          owner=params.logsearch_user
          )
-
   Execute(("chmod", "-R", "ugo+r", format("{logsearch_server_conf}/solr_configsets")),
           sudo=True
           )
+  check_znode()
+
+  if params.security_enabled and not params.logsearch_use_external_solr:
+    solr_cloud_util.add_solr_roles(params.config,
+                                   roles = [params.infra_solr_role_logsearch, params.infra_solr_role_ranger_admin, params.infra_solr_role_dev],
+                                   new_service_principals = [params.logsearch_kerberos_principal])
+    solr_cloud_util.add_solr_roles(params.config,
+                                   roles = [params.infra_solr_role_logfeeder, params.infra_solr_role_dev],
+                                   new_service_principals = [params.logfeeder_kerberos_principal])
+
+@retry(times=30, sleep_time=5, err_class=Fail)
+def check_znode():
+  import params
+  solr_cloud_util.check_znode(
+    zookeeper_quorum=params.logsearch_solr_zk_quorum,
+    solr_znode=params.logsearch_solr_zk_znode,
+    java64_home=params.java64_home,
+    retry=30, interval=5)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 0b4532b..49cd98b 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -309,6 +309,9 @@ if stack_supports_infra_client and is_solrCloud_enabled:
 solr_user = unix_user
 if has_infra_solr and not is_external_solrCloud_enabled:
   solr_user = default('/configurations/infra-solr-env/infra_solr_user', unix_user)
+  infra_solr_role_ranger_admin = default('configurations/infra-solr-security-json/infra_solr_role_ranger_admin', 'ranger_user')
+  infra_solr_role_ranger_audit = default('configurations/infra-solr-security-json/infra_solr_role_ranger_audit', 'ranger_audit_user')
+  infra_solr_role_dev = default('configurations/infra-solr-security-json/infra_solr_role_dev', 'dev')
 custom_log4j = has_infra_solr and not is_external_solrCloud_enabled
 
 ranger_audit_max_retention_days = config['configurations']['ranger-solr-configuration']['ranger_audit_max_retention_days']

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
index ae49c4f..acb5385 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/setup_ranger_xml.py
@@ -19,6 +19,7 @@ limitations under the License.
 """
 import os
 import re
+from collections import OrderedDict
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions.default import default
 from resource_management.core.logger import Logger
@@ -669,6 +670,20 @@ def setup_ranger_audit_solr():
       jaas_file=params.solr_jaas_file,
       retry=30, interval=5)
 
+  if params.security_enabled and params.has_infra_solr \
+    and not params.is_external_solrCloud_enabled and params.stack_supports_ranger_kerberos:
+
+    solr_cloud_util.add_solr_roles(params.config,
+                                   roles = [params.infra_solr_role_ranger_admin, params.infra_solr_role_ranger_audit, params.infra_solr_role_dev],
+                                   new_service_principals = [params.ranger_admin_jaas_principal])
+    service_default_principals_map = OrderedDict([('hdfs', 'nn'), ('hbase', 'hbase'), ('hive', 'hive'), ('kafka', 'kafka'), ('kms', 'rangerkms'),
+                                                  ('knox', 'knox'), ('nifi', 'nifi'), ('storm', 'storm'), ('yanr', 'yarn')])
+    service_principals = get_ranger_plugin_principals(service_default_principals_map)
+    solr_cloud_util.add_solr_roles(params.config,
+                                   roles = [params.infra_solr_role_ranger_audit, params.infra_solr_role_dev],
+                                   new_service_principals = service_principals)
+
+
   solr_cloud_util.create_collection(
     zookeeper_quorum = params.zookeeper_quorum,
     solr_znode = params.solr_znode,
@@ -679,6 +694,11 @@ def setup_ranger_audit_solr():
     replication_factor = int(params.replication_factor),
     jaas_file = params.solr_jaas_file)
 
+  if params.security_enabled and params.has_infra_solr \
+    and not params.is_external_solrCloud_enabled and params.stack_supports_ranger_kerberos:
+    secure_znode(format('{solr_znode}/configs/{ranger_solr_config_set}'), params.solr_jaas_file)
+    secure_znode(format('{solr_znode}/collections/{ranger_solr_collection_name}'), params.solr_jaas_file)
+
 def setup_ranger_admin_passwd_change():
   import params
 
@@ -695,6 +715,27 @@ def check_znode():
     solr_znode=params.solr_znode,
     java64_home=params.java_home)
 
+def secure_znode(znode, jaasFile):
+  import params
+  solr_cloud_util.secure_znode(config=params.config, zookeeper_quorum=params.zookeeper_quorum,
+                               solr_znode=znode,
+                               jaas_file=jaasFile,
+                               java64_home=params.java_home, sasl_users=[params.ranger_admin_jaas_principal])
+
+def get_ranger_plugin_principals(services_defaults_map):
+  """
+  Get ranger plugin user principals from service-default value maps using ranger-*-audit configurations
+  """
+  import params
+  user_principals = []
+  if len(services_defaults_map) < 1:
+    raise Exception("Services - defaults map parameter is missing.")
+
+  for key, default_value in services_defaults_map.iteritems():
+    user_principal = default(format("configurations/ranger-{key}-audit/xasecure.audit.jaas.Client.option.principal"), default_value)
+    user_principals.append(user_principal)
+  return user_principals
+
 
 def setup_tagsync_ssl_configs():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
index 253e32e..c5b3201 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
@@ -72,6 +72,9 @@
               "keytab": {
                 "configuration": "ranger-admin-site/xasecure.audit.jaas.Client.option.keyTab"
               }
+            },
+            {
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr"
             }
           ]
         },

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
index 1bbf75e..12f8412 100644
--- a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
@@ -303,10 +303,18 @@ class TestMetadataServer(RMFTestCase):
                                     action=['delete'],
                                     create_parents=True)
 
+    self.assertResourceCalled('Execute', "ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/ambari-infra-solr.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -k -s --negotiate -u : http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep authorization.enabled && ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/ambari-infra-solr.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -H 'Content-type:application/json' -d '{\"set-user-role\": {\"atlas@EXAMPLE.COM\": [\"atlas_user\", \"ranger_audit_user\", \"dev\"]}}' -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep 200",
+                              logoutput = True, tries = 30, try_sleep = 10)
+
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection vertex_index --config-set atlas_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection edge_index --config-set atlas_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection fulltext_index --config-set atlas_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
+    self.assertResourceCalled('Execute', "ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr/configs/atlas_configs --secure-znode --jaas-file /usr/hdp/current/atlas-server/conf/atlas_jaas.conf --sasl-users atlas,infra-solr --retry 5 --interval 10")
+    self.assertResourceCalled('Execute', "ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr/collections/vertex_index --secure-znode --jaas-file /usr/hdp/current/atlas-server/conf/atlas_jaas.conf --sasl-users atlas,infra-solr --retry 5 --interval 10")
+    self.assertResourceCalled('Execute', "ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr/collections/edge_index --secure-znode --jaas-file /usr/hdp/current/atlas-server/conf/atlas_jaas.conf --sasl-users atlas,infra-solr --retry 5 --interval 10")
+    self.assertResourceCalled('Execute', "ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr/collections/fulltext_index --secure-znode --jaas-file /usr/hdp/current/atlas-server/conf/atlas_jaas.conf --sasl-users atlas,infra-solr --retry 5 --interval 10")
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metadata_server.py",
                        classname = "MetadataServer",

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/test/python/stacks/2.3/configs/secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/secure.json b/ambari-server/src/test/python/stacks/2.3/configs/secure.json
index 4501b81..e2a3d1d 100644
--- a/ambari-server/src/test/python/stacks/2.3/configs/secure.json
+++ b/ambari-server/src/test/python/stacks/2.3/configs/secure.json
@@ -169,7 +169,9 @@
       "infra_solr_znode": "/infra-solr",
       "infra_solr_user": "solr",
       "infra_solr_group": "solr",
-      "infra_solr_client_log_dir" :"/var/log/ambari-infra-solr-client"
+      "infra_solr_client_log_dir" :"/var/log/ambari-infra-solr-client",
+      "infra_solr_kerberos_principal" : "infra-solr/c6401.ambari.apache.org@EXAMPLE.COM",
+      "infra_solr_kerberos_keytab" : "/etc/security/keytabs/ambari-infra-solr.keytab"
     },
     "infra-solr-client-log4j" : {
       "infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
@@ -236,6 +238,9 @@
     },
     "ranger-env": {
       "xml_configurations_supported" : "true"
+    },
+    "kerberos-env" : {
+      "realm" : "EXAMPLE.COM"
     }
   },
   "configuration_attributes": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py b/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
index cd88fec..2de3fba 100644
--- a/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
+++ b/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
@@ -95,11 +95,11 @@ class TestInfraSolr(RMFTestCase):
                                 content = InlineTemplate(self.getConfig()['configurations']['infra-solr-log4j']['content'])
       )
 
-      self.assertResourceCalled('File', '/etc/ambari-infra-solr/conf/security.json',
+      self.assertResourceCalled('File', '/etc/ambari-infra-solr/conf/custom-security.json',
                                 owner = 'solr',
                                 group='hadoop',
                                 content = InlineTemplate(self.getConfig()['configurations']['infra-solr-security-json']['content']),
-                                mode = 0644
+                                mode = 0640
                                 )
 
       self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --create-znode --retry 30 --interval 5')

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
index db9cbb9..587561a 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
@@ -139,7 +139,8 @@ class TestLogSearch(RMFTestCase):
     self.assertResourceCalled('Execute', ('chmod', '-R', 'ugo+r', '/etc/ambari-logsearch-portal/conf/solr_configsets'),
                               sudo = True
     )
-    
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --check-znode --retry 30 --interval 5')
+
 
 
   def test_configure_default(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
index b01e7da..1b5d7ae 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER/test_ranger_admin.py
@@ -80,6 +80,7 @@ class TestRangerAdmin(RMFTestCase):
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_ranger_audits_0.[0-9]*',
                                     action=['delete'],
                                     create_parents=True)
+
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection ranger_audits --config-set ranger_audits --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
     self.assertResourceCalled('Execute', '/usr/bin/ranger-admin-start',
@@ -165,8 +166,18 @@ class TestRangerAdmin(RMFTestCase):
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_ranger_audits_0.[0-9]*',
                                     action=['delete'],
                                     create_parents=True)
+
+    self.assertResourceCalled('Execute', "ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -k -s --negotiate -u : http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep authorization.enabled && ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -H 'Content-type:application/json' -d '{\"set-user-role\": {\"rangeradmin@EXAMPLE.COM\": [\"ranger_user\", \"ranger_audit_user\", \"dev\"]}}' -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep 200",
+                              logoutput = True, tries = 30, try_sleep = 10)
+    self.assertResourceCalled('Execute', "ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -k -s --negotiate -u : http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep authorization.enabled && ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -H \'Content-type:application/json\' -d "
+                                         "\'{\"set-user-role\": {\"hbase@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"nn@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"knox@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"rangerkms@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"kafka@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"hive@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"nifi@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"storm@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"yarn@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"]}}\' -s -o /dev/null -w\'%{http_code}\' --negotiate -u: -k http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep 200",
+                              logoutput = True, tries = 30, try_sleep = 10)
+
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/ambari-solr --create-collection --collection ranger_audits --config-set ranger_audits --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
+    self.assertResourceCalled('Execute','ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /ambari-solr/configs/ranger_audits --secure-znode --jaas-file /usr/hdp/current/ranger-admin/conf/ranger_solr_jaas.conf --sasl-users rangeradmin,infra-solr --retry 5 --interval 10')
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /ambari-solr/collections/ranger_audits --secure-znode --jaas-file /usr/hdp/current/ranger-admin/conf/ranger_solr_jaas.conf --sasl-users rangeradmin,infra-solr --retry 5 --interval 10')
+
     self.assertResourceCalled('Execute', '/usr/bin/ranger-admin-start',
       environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
       not_if = 'ps -ef | grep proc_rangeradmin | grep -v grep',

http://git-wip-us.apache.org/repos/asf/ambari/blob/347ba2a9/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py b/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
index 8dda363..fb1dd0e 100644
--- a/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
+++ b/ambari-server/src/test/python/stacks/2.6/RANGER/test_ranger_admin.py
@@ -156,8 +156,17 @@ class TestRangerAdmin(RMFTestCase):
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_ranger_audits_0.[0-9]*',
                                     action=['delete'],
                                     create_parents=True)
+    self.assertResourceCalled('Execute', "ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -k -s --negotiate -u : http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep authorization.enabled && ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -H 'Content-type:application/json' -d '{\"set-user-role\": {\"rangeradmin@EXAMPLE.COM\": [\"ranger_user\", \"ranger_audit_user\", \"dev\"]}}' -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep 200",
+                              logoutput = True, tries = 30, try_sleep = 10)
+    self.assertResourceCalled('Execute', "ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -k -s --negotiate -u : http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep authorization.enabled && ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/infra-solr.service.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -H \'Content-type:application/json\' -d "
+                                         "\'{\"set-user-role\": {\"hbase@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"nn@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"knox@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"rangerkms@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"kafka@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"hive@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"nifi@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"storm@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"], \"yarn@EXAMPLE.COM\": [\"ranger_audit_user\", \"dev\"]}}\' -s -o /dev/null -w\'%{http_code}\' --negotiate -u: -k http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep 200",
+                              logoutput = True, tries = 30, try_sleep = 10)
+
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection ranger_audits --config-set ranger_audits --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')
 
+    self.assertResourceCalled('Execute','ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr/configs/ranger_audits --secure-znode --jaas-file /usr/hdp/current/ranger-admin/conf/ranger_solr_jaas.conf --sasl-users rangeradmin,infra-solr --retry 5 --interval 10')
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr/collections/ranger_audits --secure-znode --jaas-file /usr/hdp/current/ranger-admin/conf/ranger_solr_jaas.conf --sasl-users rangeradmin,infra-solr --retry 5 --interval 10')
+
     self.assertResourceCalled('Execute', '/usr/bin/ranger-admin-start',
       environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
       not_if = 'ps -ef | grep proc_rangeradmin | grep -v grep',


[46/50] [abbrv] ambari git commit: Updated committer list. (yusaku)

Posted by nc...@apache.org.
Updated committer list. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/db51ba46
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/db51ba46
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/db51ba46

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: db51ba46dde65bc6034d7deb1e64b18d8a460185
Parents: 0fc7a66
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Fri Feb 17 10:27:26 2017 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Fri Feb 17 10:27:26 2017 -0800

----------------------------------------------------------------------
 docs/pom.xml | 34 +++++++++++++++++++++++-----------
 1 file changed, 23 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/db51ba46/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index db907d1..6c0da94 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -386,27 +386,39 @@
             </organization>
         </developer>
         <developer>
-            <id>eyang</id>
-            <name>Eric Yang</name>
-            <email>eyang@apache.org</email>
+            <id>eboyd</id>
+            <name>Erin A. Boyd</name>
+            <email>eboyd@apache.org</email>
             <timezone>-8</timezone>
             <roles>
-                <role>PMC</role>
+                <role>Committer</role>
             </roles>
             <organization>
-                IBM            
-            </organization>            
+                 Red Hat
+            </organization>
         </developer>
         <developer>
-            <id>eboyd</id>
-            <name>Erin A. Boyd</name>
-            <email>eboyd@apache.org</email>
-            <timezone>-8</timezone>
+            <id>echekanskiy</id>
+            <name>Eugene Chekanskiy</name>
+            <email>echekanskiy@apache.org</email>
+            <timezone>+2</timezone>
             <roles>
                 <role>Committer</role>
             </roles>
             <organization>
-                 Red Hat
+                 Hortonworks
+            </organization>
+        </developer>
+        <developer>
+            <id>eyang</id>
+            <name>Eric Yang</name>
+            <email>eyang@apache.org</email>
+            <timezone>-8</timezone>
+            <roles>
+                <role>PMC</role>
+            </roles>
+            <organization>
+                IBM
             </organization>
         </developer>
         <developer>


[43/50] [abbrv] ambari git commit: AMBARI-20013. Appendum - Add Solr authorization settings during LogSearch/Atlas/Ranger startup (oleewere)

Posted by nc...@apache.org.
AMBARI-20013. Appendum - Add Solr authorization settings during LogSearch/Atlas/Ranger startup (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dd174f41
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dd174f41
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dd174f41

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: dd174f417cbfe4181e5503c57b4c273009c856ae
Parents: bcf7289
Author: oleewere <ol...@gmail.com>
Authored: Fri Feb 17 17:52:29 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Fri Feb 17 17:52:48 2017 +0100

----------------------------------------------------------------------
 .../src/test/python/stacks/2.3/ATLAS/test_metadata_server.py   | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/dd174f41/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
index 12f8412..388aa57 100644
--- a/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/ATLAS/test_metadata_server.py
@@ -20,6 +20,7 @@ limitations under the License.
 
 from mock.mock import MagicMock, call, patch
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import get_kinit_path
 from stacks.utils.RMFTestCase import *
 import json
 import sys
@@ -302,8 +303,9 @@ class TestMetadataServer(RMFTestCase):
     self.assertResourceCalledRegexp('^Directory$', '^/tmp/solr_config_atlas_configs_0.[0-9]*',
                                     action=['delete'],
                                     create_parents=True)
-
-    self.assertResourceCalled('Execute', "ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/ambari-infra-solr.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -k -s --negotiate -u : http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep authorization.enabled && ambari-sudo.sh /usr/bin/kinit -kt /etc/security/keytabs/ambari-infra-solr.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -H 'Content-type:application/json' -d '{\"set-user-role\": {\"atlas@EXAMPLE.COM\": [\"atlas_user\", \"ranger_audit_user\", \"dev\"]}}' -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep 200",
+    kinit_path_local = get_kinit_path()
+    self.assertResourceCalled('Execute', "ambari-sudo.sh " + kinit_path_local + " -kt /etc/security/keytabs/ambari-infra-solr.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -k -s --negotiate -u : http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep authorization.enabled && ambari-sudo.sh "
+                              + kinit_path_local +" -kt /etc/security/keytabs/ambari-infra-solr.keytab infra-solr/c6401.ambari.apache.org@EXAMPLE.COM; ambari-sudo.sh curl -H 'Content-type:application/json' -d '{\"set-user-role\": {\"atlas@EXAMPLE.COM\": [\"atlas_user\", \"ranger_audit_user\", \"dev\"]}}' -s -o /dev/null -w'%{http_code}' --negotiate -u: -k http://c6401.ambari.apache.org:8886/solr/admin/authorization | grep 200",
                               logoutput = True, tries = 30, try_sleep = 10)
 
     self.assertResourceCalledRegexp('^Execute$', '^ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --create-collection --collection vertex_index --config-set atlas_configs --shards 1 --replication 1 --max-shards 1 --retry 5 --interval 10')


[19/50] [abbrv] ambari git commit: AMBARI-19965. Move kerberos properties for superset to correct level in kerberos.json. (Nishant Bangarwa via Swapan Shridhar)

Posted by nc...@apache.org.
AMBARI-19965. Move kerberos properties for superset to correct level in kerberos.json. (Nishant Bangarwa via Swapan Shridhar)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/11618bb6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/11618bb6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/11618bb6

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 11618bb6f65b9723c7198b6f59ef25515499739e
Parents: 9023528
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Feb 15 13:44:28 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Feb 15 13:44:28 2017 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.6/services/DRUID/kerberos.json         | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/11618bb6/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
index 0ba5071..6aefc63 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/DRUID/kerberos.json
@@ -112,15 +112,17 @@
         {
           "druid-common": {
             "druid.hadoop.security.spnego.excludedPaths": "[\"/status\"]",
-            "druid.security.extensions.loadList" : "[\"druid-kerberos\"]"
-          },
+            "druid.security.extensions.loadList": "[\"druid-kerberos\"]"
+          }
+        },
+        {
           "druid-superset": {
-            "ENABLE_KERBEROS_AUTHENTICATION" : "True",
-            "KERBEROS_REINIT_TIME_SEC" : 3600
+            "ENABLE_KERBEROS_AUTHENTICATION": "True",
+            "KERBEROS_REINIT_TIME_SEC": "3600"
           }
         }
       ],
-      "auth_to_local_properties" : [
+      "auth_to_local_properties": [
         "druid-common/druid.hadoop.security.spnego.authToLocal|new_lines_escaped"
       ]
     }


[25/50] [abbrv] ambari git commit: AMBARI-19979. Issue while resetting the coordinator.(Venkata Sairam via gauravn7)

Posted by nc...@apache.org.
AMBARI-19979. Issue while resetting the coordinator.(Venkata Sairam via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8448d5a1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8448d5a1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8448d5a1

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 8448d5a18512565895fcf79f675437d9652a0d1d
Parents: f75eeba
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 16 16:16:51 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 16 16:18:09 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/designer-workspace.js     | 2 +-
 contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8448d5a1/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
index aa2e791..f93e1b8 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
@@ -234,7 +234,7 @@ export default Ember.Component.extend({
       this.createOrShowProjManager();
     },
     showWarning(index){
-      this.$('#ConfirmDialog').remove();
+      this.set('showingWarning', false);
       var tab = this.get('tabs').objectAt(index);
       this.set('indexToClose', index);
       if(tab && tab.type ==='dashboard'){

http://git-wip-us.apache.org/repos/asf/ambari/blob/8448d5a1/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index f4869e4..86c1dd3 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -80,7 +80,7 @@ a {
   margin-right: 10px !important;
 }
 .padding15pcnt {
-  padding:15%;
+  padding:8px;
 }
 #arrow{
     top: 24%;


[05/50] [abbrv] ambari git commit: AMBARI-20020. Ambari-server restart --debug failed (aonishuk)

Posted by nc...@apache.org.
AMBARI-20020. Ambari-server restart --debug failed (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/11893d46
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/11893d46
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/11893d46

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 11893d463f8001e97bfc71c18c7a650b19e969c0
Parents: f91095b
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Feb 15 14:42:03 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Feb 15 14:42:03 2017 +0200

----------------------------------------------------------------------
 ambari-server/src/main/python/ambari-server.py | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/11893d46/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index 5002cd0..c985d65 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -167,15 +167,18 @@ def stop(args):
     try:
       os.kill(pid, signal.SIGTERM)
     except OSError, e:
-      print_info_msg("Unable to stop Ambari Server - " + str(e))
-      return
+      err = "Unable to stop Ambari Server - " + str(e)
+      print_info_msg(err)
+      raise FatalException(1, err)
 
     print "Waiting for server stop..."
     logger.info("Waiting for server stop...")
 
     if not wait_for_server_to_stop(SERVER_STOP_TIMEOUT):
-      print "Ambari-server failed to stop"
-      logger.info("Ambari-server failed to stop")
+      err = "Ambari-server failed to stop"
+      print err
+      logger.error(err)
+      raise FatalException(1, err)
 
     pid_file_path = os.path.join(configDefaults.PID_DIR, PID_NAME)
     os.remove(pid_file_path)


[06/50] [abbrv] ambari git commit: AMBARI-19984. ambari-server upgrade is not idempotent (magyari_sandor)

Posted by nc...@apache.org.
AMBARI-19984. ambari-server upgrade is not idempotent (magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/513b5275
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/513b5275
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/513b5275

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 513b527564cef89dff2154504ef4108d6eea5123
Parents: 11893d4
Author: Sandor Magyari <sm...@hortonworks.com>
Authored: Wed Feb 15 13:46:29 2017 +0100
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Wed Feb 15 14:00:04 2017 +0100

----------------------------------------------------------------------
 .../server/orm/entities/PermissionEntity.java   | 30 ++++++++--
 .../internal/InternalAuthenticationToken.java   | 24 +-------
 .../server/upgrade/AbstractUpgradeCatalog.java  |  2 +-
 .../security/TestAuthenticationFactory.java     | 44 ++++----------
 .../authorization/AuthorizationHelperTest.java  | 24 ++++----
 .../server/upgrade/UpgradeCatalog242Test.java   | 27 +++------
 .../server/upgrade/UpgradeCatalog250Test.java   | 61 ++++++++++++++------
 7 files changed, 104 insertions(+), 108 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/513b5275/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
index fb01cca..a7a07f3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/PermissionEntity.java
@@ -20,6 +20,8 @@ package org.apache.ambari.server.orm.entities;
 
 
 import java.util.Collection;
+import java.util.LinkedHashSet;
+import java.util.Set;
 
 import javax.persistence.Column;
 import javax.persistence.Entity;
@@ -37,6 +39,8 @@ import javax.persistence.OneToOne;
 import javax.persistence.Table;
 import javax.persistence.TableGenerator;
 
+import org.apache.ambari.server.security.authorization.RoleAuthorization;
+
 /**
  * Represents an admin permission.
  */
@@ -120,7 +124,7 @@ public class PermissionEntity {
       joinColumns = {@JoinColumn(name = "permission_id")},
       inverseJoinColumns = {@JoinColumn(name = "authorization_id")}
   )
-  private Collection<RoleAuthorizationEntity> authorizations;
+  private Set<RoleAuthorizationEntity> authorizations = new LinkedHashSet<>();
 
   /**
    * The permission's explicit sort order
@@ -230,12 +234,26 @@ public class PermissionEntity {
   }
 
   /**
-   * Sets the collection of granular authorizations for this PermissionEntity
-   *
-   * @param authorizations a collection of granular authorizations
+   * Add roleAuthorization if it's not already added
+   */
+  public void addAuthorization(RoleAuthorizationEntity roleAuthorization) {
+    authorizations.add(roleAuthorization);
+  }
+
+  /**
+   * Add multiple role authorizations
    */
-  public void setAuthorizations(Collection<RoleAuthorizationEntity> authorizations) {
-    this.authorizations = authorizations;
+  public void addAuthorizations(Collection<RoleAuthorization> roleAuthorizations) {
+    for (RoleAuthorization roleAuthorization : roleAuthorizations) {
+      addAuthorization(createRoleAuthorizationEntity(roleAuthorization));
+    }
+  }
+
+  private static RoleAuthorizationEntity createRoleAuthorizationEntity(RoleAuthorization authorization) {
+    RoleAuthorizationEntity roleAuthorizationEntity = new RoleAuthorizationEntity();
+    roleAuthorizationEntity.setAuthorizationId(authorization.getId());
+    roleAuthorizationEntity.setAuthorizationName(authorization.name());
+    return roleAuthorizationEntity;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/513b5275/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
index 8e69004..920db7a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/internal/InternalAuthenticationToken.java
@@ -18,16 +18,14 @@
 
 package org.apache.ambari.server.security.authorization.internal;
 
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
+import java.util.EnumSet;
 
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
-import org.apache.ambari.server.orm.entities.RoleAuthorizationEntity;
 import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
 import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.security.authorization.RoleAuthorization;
@@ -60,7 +58,7 @@ public class InternalAuthenticationToken implements Authentication {
     PermissionEntity pe = new PermissionEntity();
     pe.setId(PermissionEntity.AMBARI_ADMINISTRATOR_PERMISSION);
     pe.setPermissionName(PermissionEntity.AMBARI_ADMINISTRATOR_PERMISSION_NAME);
-    pe.setAuthorizations(createAdminAuthorizations());
+    pe.addAuthorizations(EnumSet.allOf(RoleAuthorization.class));
     entity.setPermission(pe);
     
     ResourceEntity resource = new ResourceEntity();
@@ -73,24 +71,6 @@ public class InternalAuthenticationToken implements Authentication {
     entity.setResource(resource);
   }
 
-  /**
-   * Creates the collection of RoleAuthorizationEntity objects that an administrative user would have.
-   *
-   * @return a collection of RoleAuthorizationEntity objects
-   */
-  private static Collection<RoleAuthorizationEntity> createAdminAuthorizations() {
-    List<RoleAuthorizationEntity> authorizations = new ArrayList<RoleAuthorizationEntity>();
-
-    for (RoleAuthorization roleAuthorization : RoleAuthorization.values()) {
-      RoleAuthorizationEntity re = new RoleAuthorizationEntity();
-      re.setAuthorizationId(roleAuthorization.getId());
-      re.setAuthorizationName(roleAuthorization.name());
-      authorizations.add(re);
-    }
-
-    return authorizations;
-  }
-
   public InternalAuthenticationToken(String tokenString) {
     this.token = tokenString;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/513b5275/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 4b33bcd..20280fb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -884,7 +884,7 @@ public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
 
       PermissionEntity role = permissionDAO.findPermissionByNameAndType(roleName, resourceTypeDAO.findByName(resourceType));
       if (role != null) {
-        role.getAuthorizations().add(roleAuthorization);
+        role.addAuthorization(roleAuthorization);
         permissionDAO.merge(role);
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/513b5275/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java b/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
index 1e68f9d..39b3d47 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/TestAuthenticationFactory.java
@@ -18,17 +18,14 @@
 
 package org.apache.ambari.server.security;
 
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.EnumSet;
-import java.util.Set;
 
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 import org.apache.ambari.server.orm.entities.ResourceEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
-import org.apache.ambari.server.orm.entities.RoleAuthorizationEntity;
 import org.apache.ambari.server.security.authorization.AmbariGrantedAuthority;
 import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.security.authorization.RoleAuthorization;
@@ -173,7 +170,7 @@ public class TestAuthenticationFactory {
     PermissionEntity permissionEntity = new PermissionEntity();
     permissionEntity.setId(PermissionEntity.AMBARI_ADMINISTRATOR_PERMISSION);
     permissionEntity.setResourceType(createResourceTypeEntity(ResourceType.AMBARI));
-    permissionEntity.setAuthorizations(createAuthorizations(EnumSet.allOf(RoleAuthorization.class)));
+    permissionEntity.addAuthorizations(EnumSet.allOf(RoleAuthorization.class));
     return permissionEntity;
   }
 
@@ -181,7 +178,7 @@ public class TestAuthenticationFactory {
     PermissionEntity permissionEntity = new PermissionEntity();
     permissionEntity.setId(PermissionEntity.CLUSTER_ADMINISTRATOR_PERMISSION);
     permissionEntity.setResourceType(createResourceTypeEntity(ResourceType.CLUSTER));
-    permissionEntity.setAuthorizations(createAuthorizations(EnumSet.of(
+    permissionEntity.addAuthorizations(EnumSet.of(
         RoleAuthorization.CLUSTER_MANAGE_CREDENTIALS,
         RoleAuthorization.CLUSTER_MODIFY_CONFIGS,
         RoleAuthorization.CLUSTER_MANAGE_CONFIG_GROUPS,
@@ -220,7 +217,7 @@ public class TestAuthenticationFactory {
         RoleAuthorization.CLUSTER_RUN_CUSTOM_COMMAND,
         RoleAuthorization.SERVICE_MANAGE_AUTO_START,
         RoleAuthorization.CLUSTER_MANAGE_AUTO_START,
-        RoleAuthorization.CLUSTER_MANAGE_USER_PERSISTED_DATA)));
+        RoleAuthorization.CLUSTER_MANAGE_USER_PERSISTED_DATA));
     return permissionEntity;
   }
 
@@ -228,7 +225,7 @@ public class TestAuthenticationFactory {
     PermissionEntity permissionEntity = new PermissionEntity();
     permissionEntity.setId(5);
     permissionEntity.setResourceType(createResourceTypeEntity(ResourceType.CLUSTER));
-    permissionEntity.setAuthorizations(createAuthorizations(EnumSet.of(
+    permissionEntity.addAuthorizations(EnumSet.of(
         RoleAuthorization.HOST_VIEW_CONFIGS,
         RoleAuthorization.HOST_ADD_DELETE_COMPONENTS,
         RoleAuthorization.HOST_VIEW_METRICS,
@@ -261,7 +258,7 @@ public class TestAuthenticationFactory {
         RoleAuthorization.SERVICE_VIEW_OPERATIONAL_LOGS,
         RoleAuthorization.SERVICE_MANAGE_AUTO_START,
         RoleAuthorization.CLUSTER_MANAGE_AUTO_START,
-        RoleAuthorization.CLUSTER_MANAGE_CREDENTIALS)));
+        RoleAuthorization.CLUSTER_MANAGE_CREDENTIALS));
     return permissionEntity;
   }
 
@@ -269,7 +266,7 @@ public class TestAuthenticationFactory {
     PermissionEntity permissionEntity = new PermissionEntity();
     permissionEntity.setId(5);
     permissionEntity.setResourceType(createResourceTypeEntity(ResourceType.CLUSTER));
-    permissionEntity.setAuthorizations(createAuthorizations(EnumSet.of(
+    permissionEntity.addAuthorizations(EnumSet.of(
         RoleAuthorization.CLUSTER_VIEW_ALERTS,
         RoleAuthorization.CLUSTER_VIEW_CONFIGS,
         RoleAuthorization.CLUSTER_VIEW_METRICS,
@@ -296,7 +293,7 @@ public class TestAuthenticationFactory {
         RoleAuthorization.SERVICE_VIEW_STATUS_INFO,
         RoleAuthorization.SERVICE_VIEW_OPERATIONAL_LOGS,
         RoleAuthorization.SERVICE_MANAGE_AUTO_START,
-        RoleAuthorization.CLUSTER_MANAGE_USER_PERSISTED_DATA)));
+        RoleAuthorization.CLUSTER_MANAGE_USER_PERSISTED_DATA));
     return permissionEntity;
   }
 
@@ -304,7 +301,7 @@ public class TestAuthenticationFactory {
     PermissionEntity permissionEntity = new PermissionEntity();
     permissionEntity.setId(6);
     permissionEntity.setResourceType(createResourceTypeEntity(ResourceType.CLUSTER));
-    permissionEntity.setAuthorizations(createAuthorizations(EnumSet.of(
+    permissionEntity.addAuthorizations(EnumSet.of(
         RoleAuthorization.SERVICE_VIEW_CONFIGS,
         RoleAuthorization.SERVICE_VIEW_METRICS,
         RoleAuthorization.SERVICE_VIEW_STATUS_INFO,
@@ -322,7 +319,7 @@ public class TestAuthenticationFactory {
         RoleAuthorization.CLUSTER_VIEW_STACK_DETAILS,
         RoleAuthorization.CLUSTER_VIEW_STATUS_INFO,
         RoleAuthorization.CLUSTER_MANAGE_USER_PERSISTED_DATA
-    )));
+    ));
     return permissionEntity;
   }
 
@@ -330,7 +327,7 @@ public class TestAuthenticationFactory {
     PermissionEntity permissionEntity = new PermissionEntity();
     permissionEntity.setId(PermissionEntity.CLUSTER_USER_PERMISSION);
     permissionEntity.setResourceType(createResourceTypeEntity(ResourceType.CLUSTER));
-    permissionEntity.setAuthorizations(createAuthorizations(EnumSet.of(
+    permissionEntity.addAuthorizations(EnumSet.of(
         RoleAuthorization.SERVICE_VIEW_CONFIGS,
         RoleAuthorization.SERVICE_VIEW_METRICS,
         RoleAuthorization.SERVICE_VIEW_STATUS_INFO,
@@ -344,7 +341,7 @@ public class TestAuthenticationFactory {
         RoleAuthorization.CLUSTER_VIEW_STACK_DETAILS,
         RoleAuthorization.CLUSTER_VIEW_STATUS_INFO,
         RoleAuthorization.CLUSTER_MANAGE_USER_PERSISTED_DATA
-    )));
+    ));
     return permissionEntity;
   }
 
@@ -352,9 +349,7 @@ public class TestAuthenticationFactory {
     PermissionEntity permissionEntity = new PermissionEntity();
     permissionEntity.setId(PermissionEntity.VIEW_USER_PERMISSION);
     permissionEntity.setResourceType(createResourceTypeEntity(ResourceType.CLUSTER));
-    permissionEntity.setAuthorizations(createAuthorizations(EnumSet.of(
-      RoleAuthorization.VIEW_USE
-    )));
+    permissionEntity.addAuthorizations(EnumSet.of(RoleAuthorization.VIEW_USE));
     return permissionEntity;
   }
 
@@ -396,21 +391,6 @@ public class TestAuthenticationFactory {
     return resourceTypeEntity;
   }
 
-  private static RoleAuthorizationEntity createRoleAuthorizationEntity(RoleAuthorization authorization) {
-    RoleAuthorizationEntity roleAuthorizationEntity = new RoleAuthorizationEntity();
-    roleAuthorizationEntity.setAuthorizationId(authorization.getId());
-    roleAuthorizationEntity.setAuthorizationName(authorization.name());
-    return roleAuthorizationEntity;
-  }
-
-  private static Collection<RoleAuthorizationEntity> createAuthorizations(Set<RoleAuthorization> roleAuthorizations) {
-    Collection<RoleAuthorizationEntity> roleAuthorizationEntities = new ArrayList<RoleAuthorizationEntity>();
-    for (RoleAuthorization roleAuthorization : roleAuthorizations) {
-      roleAuthorizationEntities.add(createRoleAuthorizationEntity(roleAuthorization));
-    }
-    return roleAuthorizationEntities;
-  }
-
   private static class TestAuthorization implements Authentication {
     private final String name;
     private final Collection<? extends GrantedAuthority> authorities;

http://git-wip-us.apache.org/repos/asf/ambari/blob/513b5275/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AuthorizationHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AuthorizationHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AuthorizationHelperTest.java
index 7fb8867..26eb8fb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AuthorizationHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AuthorizationHelperTest.java
@@ -260,16 +260,16 @@ public class AuthorizationHelperTest  extends EasyMockSupport {
     cluster2ResourceEntity.setId(2L);
 
     PermissionEntity readOnlyPermissionEntity = new PermissionEntity();
-    readOnlyPermissionEntity.setAuthorizations(Collections.singleton(readOnlyRoleAuthorizationEntity));
+    readOnlyPermissionEntity.addAuthorization(readOnlyRoleAuthorizationEntity);
 
     PermissionEntity privilegedPermissionEntity = new PermissionEntity();
-    privilegedPermissionEntity.setAuthorizations(Arrays.asList(readOnlyRoleAuthorizationEntity,
-        privilegedRoleAuthorizationEntity));
+    privilegedPermissionEntity.addAuthorization(readOnlyRoleAuthorizationEntity);
+    privilegedPermissionEntity.addAuthorization(privilegedRoleAuthorizationEntity);
 
     PermissionEntity administratorPermissionEntity = new PermissionEntity();
-    administratorPermissionEntity.setAuthorizations(Arrays.asList(readOnlyRoleAuthorizationEntity,
-        privilegedRoleAuthorizationEntity,
-        administratorRoleAuthorizationEntity));
+    administratorPermissionEntity.addAuthorization(readOnlyRoleAuthorizationEntity);
+    administratorPermissionEntity.addAuthorization(privilegedRoleAuthorizationEntity);
+    administratorPermissionEntity.addAuthorization(administratorRoleAuthorizationEntity);
 
     PrivilegeEntity readOnlyPrivilegeEntity = new PrivilegeEntity();
     readOnlyPrivilegeEntity.setPermission(readOnlyPermissionEntity);
@@ -395,16 +395,16 @@ public class AuthorizationHelperTest  extends EasyMockSupport {
     viewResourceEntity.setId(53L);
 
     PermissionEntity readOnlyPermissionEntity = new PermissionEntity();
-    readOnlyPermissionEntity.setAuthorizations(Collections.singleton(readOnlyRoleAuthorizationEntity));
+    readOnlyPermissionEntity.addAuthorization(readOnlyRoleAuthorizationEntity);
 
     PermissionEntity viewUsePermissionEntity = new PermissionEntity();
-    viewUsePermissionEntity.setAuthorizations(Arrays.asList(readOnlyRoleAuthorizationEntity,
-        viewUseRoleAuthorizationEntity));
+    viewUsePermissionEntity.addAuthorization(readOnlyRoleAuthorizationEntity);
+    viewUsePermissionEntity.addAuthorization(viewUseRoleAuthorizationEntity);
 
     PermissionEntity administratorPermissionEntity = new PermissionEntity();
-    administratorPermissionEntity.setAuthorizations(Arrays.asList(readOnlyRoleAuthorizationEntity,
-        viewUseRoleAuthorizationEntity,
-        administratorRoleAuthorizationEntity));
+    administratorPermissionEntity.addAuthorization(readOnlyRoleAuthorizationEntity);
+    administratorPermissionEntity.addAuthorization(viewUseRoleAuthorizationEntity);
+    administratorPermissionEntity.addAuthorization(administratorRoleAuthorizationEntity);
 
     PrivilegeEntity readOnlyPrivilegeEntity = new PrivilegeEntity();
     readOnlyPrivilegeEntity.setPermission(readOnlyPermissionEntity);

http://git-wip-us.apache.org/repos/asf/ambari/blob/513b5275/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog242Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog242Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog242Test.java
index 0cd4f12f..2d0064f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog242Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog242Test.java
@@ -35,7 +35,6 @@ import static org.easymock.EasyMock.verify;
 import java.lang.reflect.Method;
 import java.sql.SQLException;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -388,18 +387,10 @@ public class UpgradeCatalog242Test {
 
     ResourceTypeEntity clusterResourceTypeEntity = easyMockSupport.createMock(ResourceTypeEntity.class);
 
-    Collection<RoleAuthorizationEntity> ambariAdministratorAuthorizations = new ArrayList<RoleAuthorizationEntity>();
-    Collection<RoleAuthorizationEntity> clusterAdministratorAuthorizations = new ArrayList<RoleAuthorizationEntity>();
-
-    PermissionEntity clusterAdministratorPermissionEntity = easyMockSupport.createMock(PermissionEntity.class);
-    expect(clusterAdministratorPermissionEntity.getAuthorizations())
-        .andReturn(clusterAdministratorAuthorizations)
-        .times(1);
-
-    PermissionEntity ambariAdministratorPermissionEntity = easyMockSupport.createMock(PermissionEntity.class);
-    expect(ambariAdministratorPermissionEntity.getAuthorizations())
-        .andReturn(ambariAdministratorAuthorizations)
-        .times(2);
+    PermissionEntity clusterAdministratorPermissionEntity = new PermissionEntity();
+    clusterAdministratorPermissionEntity.setId(1);
+    PermissionEntity ambariAdministratorPermissionEntity = new PermissionEntity();
+    ambariAdministratorPermissionEntity.setId(2);
 
     PermissionDAO permissionDAO = easyMockSupport.createMock(PermissionDAO.class);
     expect(permissionDAO.findPermissionByNameAndType("AMBARI.ADMINISTRATOR", ambariResourceTypeEntity))
@@ -449,11 +440,11 @@ public class UpgradeCatalog242Test {
     Assert.assertEquals("CLUSTER.RUN_CUSTOM_COMMAND", clusterRunCustomCommandEntity.getAuthorizationId());
     Assert.assertEquals("Perform custom cluster-level actions", clusterRunCustomCommandEntity.getAuthorizationName());
 
-    Assert.assertEquals(2, ambariAdministratorAuthorizations.size());
-    Assert.assertTrue(ambariAdministratorAuthorizations.contains(clusterRunCustomCommandEntity));
-    Assert.assertTrue(ambariAdministratorAuthorizations.contains(ambariRunCustomCommandEntity));
+    Assert.assertEquals(2, ambariAdministratorPermissionEntity.getAuthorizations().size());
+    Assert.assertTrue(ambariAdministratorPermissionEntity.getAuthorizations().contains(clusterRunCustomCommandEntity));
+    Assert.assertTrue(ambariAdministratorPermissionEntity.getAuthorizations().contains(ambariRunCustomCommandEntity));
 
-    Assert.assertEquals(1, clusterAdministratorAuthorizations.size());
-    Assert.assertTrue(clusterAdministratorAuthorizations.contains(clusterRunCustomCommandEntity));
+    Assert.assertEquals(1, clusterAdministratorPermissionEntity.getAuthorizations().size());
+    Assert.assertTrue(clusterAdministratorPermissionEntity.getAuthorizations().contains(clusterRunCustomCommandEntity));
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/513b5275/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index 2836858..f4212d6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -37,8 +37,6 @@ import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -1580,16 +1578,10 @@ public class UpgradeCatalog250Test {
 
     ResourceTypeEntity clusterResourceTypeEntity = easyMockSupport.createMock(ResourceTypeEntity.class);
 
-    Collection<RoleAuthorizationEntity> ambariAdministratorAuthorizations = new ArrayList<RoleAuthorizationEntity>();
-    Collection<RoleAuthorizationEntity> clusterAdministratorAuthorizations = new ArrayList<RoleAuthorizationEntity>();
-
-    PermissionEntity clusterAdministratorPermissionEntity = easyMockSupport.createMock(PermissionEntity.class);
-    expect(clusterAdministratorPermissionEntity.getAuthorizations())
-        .andReturn(clusterAdministratorAuthorizations).atLeastOnce();
-
-    PermissionEntity ambariAdministratorPermissionEntity = easyMockSupport.createMock(PermissionEntity.class);
-    expect(ambariAdministratorPermissionEntity.getAuthorizations())
-        .andReturn(ambariAdministratorAuthorizations).atLeastOnce();
+    PermissionEntity clusterAdministratorPermissionEntity = new PermissionEntity();
+    clusterAdministratorPermissionEntity.setId(1);
+    PermissionEntity ambariAdministratorPermissionEntity = new PermissionEntity();
+    ambariAdministratorPermissionEntity.setId(2);
 
     PermissionDAO permissionDAO = easyMockSupport.createMock(PermissionDAO.class);
     expect(permissionDAO.findPermissionByNameAndType("AMBARI.ADMINISTRATOR", ambariResourceTypeEntity))
@@ -1635,12 +1627,47 @@ public class UpgradeCatalog250Test {
     Assert.assertEquals("CLUSTER.RUN_CUSTOM_COMMAND", clusterRunCustomCommandEntity.getAuthorizationId());
     Assert.assertEquals("Perform custom cluster-level actions", clusterRunCustomCommandEntity.getAuthorizationName());
 
-    Assert.assertEquals(2, ambariAdministratorAuthorizations.size());
-    Assert.assertTrue(ambariAdministratorAuthorizations.contains(clusterRunCustomCommandEntity));
-    Assert.assertTrue(ambariAdministratorAuthorizations.contains(ambariRunCustomCommandEntity));
+    Assert.assertEquals(2, ambariAdministratorPermissionEntity.getAuthorizations().size());
+    Assert.assertTrue(ambariAdministratorPermissionEntity.getAuthorizations().contains(clusterRunCustomCommandEntity));
+    Assert.assertTrue(ambariAdministratorPermissionEntity.getAuthorizations().contains(ambariRunCustomCommandEntity));
+
+    Assert.assertEquals(1, clusterAdministratorPermissionEntity.getAuthorizations().size());
+    Assert.assertTrue(clusterAdministratorPermissionEntity.getAuthorizations().contains(clusterRunCustomCommandEntity));
+  }
+
+  @Test
+  public void testAddingRoleAuthorizationIsIdempotent() throws Exception {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    ResourceTypeEntity ambariResourceTypeEntity = new ResourceTypeEntity();
+    PermissionEntity ambariAdministratorPermissionEntity = new PermissionEntity();
+
+    final PermissionDAO permissionDAO = easyMockSupport.createNiceMock(PermissionDAO.class);
+    expect(permissionDAO.findPermissionByNameAndType("AMBARI.ADMINISTRATOR", ambariResourceTypeEntity))
+      .andReturn(ambariAdministratorPermissionEntity)
+      .anyTimes();
+
+    final ResourceTypeDAO resourceTypeDAO = easyMockSupport.createNiceMock(ResourceTypeDAO.class);
+    expect(resourceTypeDAO.findByName("AMBARI")).andReturn(ambariResourceTypeEntity).anyTimes();
+
+    final RoleAuthorizationDAO roleAuthorizationDAO = easyMockSupport.createNiceMock(RoleAuthorizationDAO.class);
+    expect(roleAuthorizationDAO.findById("CLUSTER.RUN_CUSTOM_COMMAND")).andReturn(null).anyTimes();
+
+    Capture<RoleAuthorizationEntity> captureAmbariRunCustomCommandEntity = newCapture();
+    roleAuthorizationDAO.create(capture(captureAmbariRunCustomCommandEntity));
+    expectLastCall().times(2);
+
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    expect(injector.getInstance(RoleAuthorizationDAO.class)).andReturn(roleAuthorizationDAO).atLeastOnce();
+    expect(injector.getInstance(PermissionDAO.class)).andReturn(permissionDAO).atLeastOnce();
+    expect(injector.getInstance(ResourceTypeDAO.class)).andReturn(resourceTypeDAO).atLeastOnce();
+
+    easyMockSupport.replayAll();
+
+    new UpgradeCatalog242(injector).createRoleAuthorizations();
+    new UpgradeCatalog242(injector).createRoleAuthorizations();
+    easyMockSupport.verifyAll();
 
-    Assert.assertEquals(1, clusterAdministratorAuthorizations.size());
-    Assert.assertTrue(clusterAdministratorAuthorizations.contains(clusterRunCustomCommandEntity));
+    Assert.assertEquals(2, ambariAdministratorPermissionEntity.getAuthorizations().size());
   }
 
   @Test


[13/50] [abbrv] ambari git commit: AMBARI-19953: AMS HBase RegionServer process is still alive after switching AMS to embedded mode. (Qin Liu via avijayan)

Posted by nc...@apache.org.
AMBARI-19953: AMS HBase RegionServer process is still alive after switching AMS to embedded mode. (Qin Liu via avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2db72cdf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2db72cdf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2db72cdf

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 2db72cdf667a174d43eb7fa3c45166b1734948ae
Parents: dd6fb57
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Wed Feb 15 08:45:07 2017 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Wed Feb 15 11:06:44 2017 -0800

----------------------------------------------------------------------
 .../AMBARI_METRICS/0.1.0/package/scripts/ams_service.py      | 4 ++++
 .../stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py    | 8 ++++++++
 2 files changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/2db72cdf/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
index f423165..875211f 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
@@ -55,6 +55,10 @@ def ams_service(name, action):
         hbase_service('master', action=action)
         hbase_service('regionserver', action=action)
       cmd = format("{cmd} --distributed")
+    else:
+      # make sure no residual region server process is running in embedded mode
+      if action == 'stop':
+        hbase_service('regionserver', action=action)
 
     if action == 'start':
       Execute(format("{sudo} rm -rf {hbase_tmp_dir}/*.tmp")

http://git-wip-us.apache.org/repos/asf/ambari/blob/2db72cdf/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
index 7df74bf..900ccfb 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
@@ -98,6 +98,14 @@ class TestMetricsCollector(RMFTestCase):
     self.assert_hbase_configure('master')
     self.assert_hbase_configure('regionserver')
     self.assert_ams('collector')
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf stop regionserver',
+                              on_timeout = 'ls /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid` >/dev/null 2>&1 && ambari-sudo.sh -H -E kill -9 `ambari-sudo.sh cat /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid`',
+                              timeout = 30,
+                              user = 'ams'
+                              )
+    self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//hbase-ams-regionserver.pid',
+                              action = ['delete']
+                              )
     self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf stop',
                               user = 'ams'
     )


[12/50] [abbrv] ambari git commit: AMBARI-20027 Override property popup: Redundant scrollbars in config groups list. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-20027 Override property popup: Redundant scrollbars in config groups list. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b36f00e2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b36f00e2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b36f00e2

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b36f00e2bafa4c65a20bfde57c0a60ff37a68cdb
Parents: e890f01
Author: ababiichuk <ab...@hortonworks.com>
Authored: Wed Feb 15 18:08:40 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Wed Feb 15 20:28:42 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/styles/application.less | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b36f00e2/ambari-web/app/styles/application.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/application.less b/ambari-web/app/styles/application.less
index 612c09c..aa8f075 100644
--- a/ambari-web/app/styles/application.less
+++ b/ambari-web/app/styles/application.less
@@ -2025,7 +2025,7 @@ input[type="radio"].align-checkbox, input[type="checkbox"].align-checkbox {
     .dropdown-menu {
       max-height: 160px;
       max-width: 300px;
-      overflow-y: scroll;
+      overflow-y: auto;
     }
   }
   .message{


[36/50] [abbrv] ambari git commit: AMBARI-20051. Hosts filter component list is not sorted so it is difficult to use (akovalenko)

Posted by nc...@apache.org.
AMBARI-20051. Hosts filter component list is not sorted so it is difficult to use (akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ce404d64
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ce404d64
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ce404d64

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: ce404d640e930b7326dc1857ac1ae6ce03b2884e
Parents: 18fc258
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Thu Feb 16 18:05:18 2017 +0200
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Fri Feb 17 03:12:46 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/views/main/host/combo_search_box.js       | 1 +
 ambari-web/test/views/main/host/combo_search_box_test.js | 6 +++---
 2 files changed, 4 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ce404d64/ambari-web/app/views/main/host/combo_search_box.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/combo_search_box.js b/ambari-web/app/views/main/host/combo_search_box.js
index 2f9900a..372c3f7 100644
--- a/ambari-web/app/views/main/host/combo_search_box.js
+++ b/ambari-web/app/views/main/host/combo_search_box.js
@@ -307,6 +307,7 @@ App.MainHostComboSearchBoxView = Em.View.extend({
         App.router.get('mainHostController.labelValueMap')[displayName] = component.get('componentName');
       }
     });
+    hostComponentList = hostComponentList.sortProperty('label');
     return hostComponentList;
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce404d64/ambari-web/test/views/main/host/combo_search_box_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/host/combo_search_box_test.js b/ambari-web/test/views/main/host/combo_search_box_test.js
index c82167c..0775f66 100644
--- a/ambari-web/test/views/main/host/combo_search_box_test.js
+++ b/ambari-web/test/views/main/host/combo_search_box_test.js
@@ -663,11 +663,11 @@ describe('App.MainHostComboSearchBoxView', function () {
       App.router.get.restore();
     });
 
-    it("should return host-component list", function() {
+    it("should return sorted host-component list", function() {
       expect(view.getHostComponentList()).to.be.eql([
+        {label: 'cc1', category: 'Component'},
         {label: 'mc1', category: 'Component'},
-        {label: 'sc1', category: 'Component'},
-        {label: 'cc1', category: 'Component'}
+        {label: 'sc1', category: 'Component'}
       ]);
       expect(labelValueMap).to.be.eql({
         mc1: 'MC1',


[45/50] [abbrv] ambari git commit: AMBARI-18868. Stage and Request status should be persisted in the database. (jaimin)

Posted by nc...@apache.org.
AMBARI-18868. Stage and Request status should be persisted in the database. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0fc7a667
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0fc7a667
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0fc7a667

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 0fc7a6671feb10dc0e8475dc4878942cf19f46cc
Parents: dd174f4
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Fri Feb 17 09:31:10 2017 -0800
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Fri Feb 17 09:31:10 2017 -0800

----------------------------------------------------------------------
 .../actionmanager/ActionDBAccessorImpl.java     | 108 ++--
 .../server/actionmanager/ActionScheduler.java   |  31 +
 .../ambari/server/actionmanager/Request.java    |   8 +-
 .../ambari/server/actionmanager/Stage.java      |  25 +
 .../controller/internal/CalculatedStatus.java   | 390 +++++++++++-
 .../ambari/server/events/TaskCreateEvent.java   |  48 ++
 .../apache/ambari/server/events/TaskEvent.java  |  66 ++
 .../ambari/server/events/TaskUpdateEvent.java   |  35 ++
 .../listeners/tasks/TaskStatusListener.java     | 609 +++++++++++++++++++
 .../events/publishers/TaskEventPublisher.java   |  62 ++
 .../server/orm/dao/HostRoleCommandDAO.java      |  67 +-
 .../ambari/server/orm/dao/RequestDAO.java       |   8 +
 .../apache/ambari/server/orm/dao/StageDAO.java  |  32 +-
 .../orm/entities/HostRoleCommandEntity.java     |   4 +-
 .../server/orm/entities/RequestEntity.java      |  49 +-
 .../ambari/server/orm/entities/StageEntity.java |  70 ++-
 .../server/orm/entities/StageEntityPK.java      |  12 +
 .../server/upgrade/UpgradeCatalog300.java       |  70 +++
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   7 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   7 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   7 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   7 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   7 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   7 +-
 .../actionmanager/TestActionDBAccessorImpl.java |   3 +-
 .../actionmanager/TestActionScheduler.java      |  71 ++-
 .../alerts/AmbariPerformanceRunnableTest.java   |   7 +-
 .../internal/UpgradeResourceProviderTest.java   |   1 -
 .../UpgradeSummaryResourceProviderTest.java     |   1 -
 .../listeners/tasks/TaskStatusListenerTest.java | 164 +++++
 .../ambari/server/state/ConfigHelperTest.java   |   2 +
 .../cluster/ClusterEffectiveVersionTest.java    |   5 +-
 .../services/RetryUpgradeActionServiceTest.java |   1 -
 .../server/upgrade/UpgradeCatalog300Test.java   |  20 +
 34 files changed, 1892 insertions(+), 119 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
index 7881a4b..b813fe6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java
@@ -45,7 +45,9 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.CalculatedStatus;
 import org.apache.ambari.server.events.HostsRemovedEvent;
 import org.apache.ambari.server.events.RequestFinishedEvent;
+import org.apache.ambari.server.events.TaskCreateEvent;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
+import org.apache.ambari.server.events.publishers.TaskEventPublisher;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
 import org.apache.ambari.server.orm.dao.ExecutionCommandDAO;
 import org.apache.ambari.server.orm.dao.HostDAO;
@@ -130,6 +132,9 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
   AmbariEventPublisher ambariEventPublisher;
 
   @Inject
+  TaskEventPublisher taskEventPublisher;
+
+  @Inject
   AuditLogger auditLogger;
 
   /**
@@ -205,8 +210,6 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
   public Collection<HostRoleCommandEntity> abortOperation(long requestId) {
     long now = System.currentTimeMillis();
 
-    endRequest(requestId);
-
     // only request commands which actually need to be aborted; requesting all
     // commands here can cause OOM problems during large requests like upgrades
     List<HostRoleCommandEntity> commands = hostRoleCommandDAO.findByRequestIdAndStatuses(requestId,
@@ -228,7 +231,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
     if (!commands.isEmpty()) {
       return hostRoleCommandDAO.mergeAll(commands);
     }
-
+    endRequest(requestId);
     return Collections.emptyList();
   }
 
@@ -283,7 +286,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
   @Override
   @Experimental(feature = ExperimentalFeature.PARALLEL_PROCESSING)
   public List<Stage> getStagesInProgress() {
-    List<StageEntity> stageEntities = stageDAO.findByCommandStatuses(
+    List<StageEntity> stageEntities = stageDAO.findByStatuses(
       HostRoleStatus.IN_PROGRESS_STATUSES);
     return getStagesForEntities(stageEntities);
   }
@@ -343,6 +346,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
     RequestEntity requestEntity = request.constructNewPersistenceEntity();
 
     Long clusterId = -1L;
+    Long requestId = requestEntity.getRequestId();
     ClusterEntity clusterEntity = clusterDAO.findById(request.getClusterId());
     if (clusterEntity != null) {
       clusterId = clusterEntity.getClusterId();
@@ -356,8 +360,11 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
 
     addRequestToAuditlogCache(request);
 
+    List<HostRoleCommand> hostRoleCommands = new ArrayList<>();
+
     for (Stage stage : request.getStages()) {
       StageEntity stageEntity = stage.constructNewPersistenceEntity();
+      Long stageId = stageEntity.getStageId();
       stageEntities.add(stageEntity);
       stageEntity.setClusterId(clusterId);
       stageEntity.setRequest(requestEntity);
@@ -366,6 +373,8 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
       List<HostRoleCommand> orderedHostRoleCommands = stage.getOrderedHostRoleCommands();
 
       for (HostRoleCommand hostRoleCommand : orderedHostRoleCommands) {
+        hostRoleCommand.setRequestId(requestId);
+        hostRoleCommand.setStageId(stageId);
         HostRoleCommandEntity hostRoleCommandEntity = hostRoleCommand.constructNewPersistenceEntity();
         hostRoleCommandEntity.setStage(stageEntity);
         hostRoleCommandDAO.create(hostRoleCommandEntity);
@@ -415,11 +424,12 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
         hostRoleCommandEntity.setExecutionCommand(executionCommandEntity);
 
         executionCommandDAO.create(hostRoleCommandEntity.getExecutionCommand());
-        hostRoleCommandEntity = hostRoleCommandDAO.merge(hostRoleCommandEntity);
+        hostRoleCommandEntity = hostRoleCommandDAO.mergeWithoutPublishEvent(hostRoleCommandEntity);
 
         if (null != hostEntity) {
           hostEntity = hostDAO.merge(hostEntity);
         }
+        hostRoleCommands.add(hostRoleCommand);
       }
 
       for (RoleSuccessCriteriaEntity roleSuccessCriteriaEntity : stageEntity.getRoleSuccessCriterias()) {
@@ -431,6 +441,9 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
 
     requestEntity.setStages(stageEntities);
     requestDAO.merge(requestEntity);
+
+    TaskCreateEvent taskCreateEvent = new TaskCreateEvent(hostRoleCommands);
+    taskEventPublisher.publish(taskCreateEvent);
   }
 
   @Override
@@ -497,66 +510,55 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
     long now = System.currentTimeMillis();
 
     List<Long> requestsToCheck = new ArrayList<Long>();
-    List<Long> abortedCommandUpdates = new ArrayList<Long>();
 
     List<HostRoleCommandEntity> commandEntities = hostRoleCommandDAO.findByPKs(taskReports.keySet());
+    List<HostRoleCommandEntity> commandEntitiesToMerge = new ArrayList<HostRoleCommandEntity>();
     for (HostRoleCommandEntity commandEntity : commandEntities) {
       CommandReport report = taskReports.get(commandEntity.getTaskId());
-
-      boolean statusChanged = false;
-
-      switch (commandEntity.getStatus()) {
-        case ABORTED:
-          // We don't want to overwrite statuses for ABORTED tasks with
-          // statuses that have been received from the agent after aborting task
-          abortedCommandUpdates.add(commandEntity.getTaskId());
-          break;
-        default:
-          HostRoleStatus status = HostRoleStatus.valueOf(report.getStatus());
-          // if FAILED and marked for holding then set status = HOLDING_FAILED
-          if (status == HostRoleStatus.FAILED && commandEntity.isRetryAllowed()) {
-            status = HostRoleStatus.HOLDING_FAILED;
-
-            // tasks can be marked as skipped when they fail
-            if (commandEntity.isFailureAutoSkipped()) {
-              status = HostRoleStatus.SKIPPED_FAILED;
-            }
+      HostRoleStatus existingTaskStatus = commandEntity.getStatus();
+      HostRoleStatus reportedTaskStatus = HostRoleStatus.valueOf(report.getStatus());
+      if (!existingTaskStatus.isCompletedState() || existingTaskStatus == HostRoleStatus.ABORTED) {
+        // if FAILED and marked for holding then set reportedTaskStatus = HOLDING_FAILED
+        if (reportedTaskStatus == HostRoleStatus.FAILED && commandEntity.isRetryAllowed()) {
+          reportedTaskStatus = HostRoleStatus.HOLDING_FAILED;
+
+          // tasks can be marked as skipped when they fail
+          if (commandEntity.isFailureAutoSkipped()) {
+            reportedTaskStatus = HostRoleStatus.SKIPPED_FAILED;
           }
-
-          commandEntity.setStatus(status);
-          statusChanged = true;
-          break;
-      }
-
-      commandEntity.setStdOut(report.getStdOut().getBytes());
-      commandEntity.setStdError(report.getStdErr().getBytes());
-      commandEntity.setStructuredOut(report.getStructuredOut() == null ? null :
-        report.getStructuredOut().getBytes());
-      commandEntity.setExitcode(report.getExitCode());
-
-      if (HostRoleStatus.getCompletedStates().contains(commandEntity.getStatus())) {
-        commandEntity.setEndTime(now);
-
-        String actionId = report.getActionId();
-        long[] requestStageIds = StageUtils.getRequestStage(actionId);
-        long requestId = requestStageIds[0];
-        long stageId = requestStageIds[1];
-        if(statusChanged) {
-          auditLog(commandEntity, requestId);
         }
-        if (requestDAO.getLastStageId(requestId).equals(stageId)) {
-          requestsToCheck.add(requestId);
+        if (!existingTaskStatus.isCompletedState()) {
+          commandEntity.setStatus(reportedTaskStatus);
         }
+        commandEntity.setStdOut(report.getStdOut().getBytes());
+        commandEntity.setStdError(report.getStdErr().getBytes());
+        commandEntity.setStructuredOut(report.getStructuredOut() == null ? null :
+            report.getStructuredOut().getBytes());
+        commandEntity.setExitcode(report.getExitCode());
+        if (HostRoleStatus.getCompletedStates().contains(commandEntity.getStatus())) {
+          commandEntity.setEndTime(now);
+
+          String actionId = report.getActionId();
+          long[] requestStageIds = StageUtils.getRequestStage(actionId);
+          long requestId = requestStageIds[0];
+          long stageId = requestStageIds[1];
+          auditLog(commandEntity, requestId);
+          if (requestDAO.getLastStageId(requestId).equals(stageId)) {
+            requestsToCheck.add(requestId);
+          }
+        }
+        commandEntitiesToMerge.add(commandEntity);
+      } else {
+       LOG.warn(String.format("Request for invalid transition of host role command status received for task id %d from " +
+           "agent: %s -> %s",commandEntity.getTaskId(),existingTaskStatus,reportedTaskStatus));
       }
     }
 
     // no need to merge if there's nothing to merge
-    if (!commandEntities.isEmpty()) {
-      hostRoleCommandDAO.mergeAll(commandEntities);
+    if (!commandEntitiesToMerge.isEmpty()) {
+      hostRoleCommandDAO.mergeAll(commandEntitiesToMerge);
     }
 
-    // Invalidate cache because of updates to ABORTED commands
-    hostRoleCommandCache.invalidateAll(abortedCommandUpdates);
 
     for (Long requestId : requestsToCheck) {
       endRequestIfCompleted(requestId);
@@ -923,7 +925,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor {
       return HostRoleStatus.QUEUED;
     }
     Collection<HostRoleStatus> taskStatuses = details.getTaskStatuses();
-    return CalculatedStatus.calculateSummaryStatusOfStage(CalculatedStatus.calculateStatusCounts(taskStatuses), numberOfTasks, false);
+    return CalculatedStatus.calculateSummaryStatus(CalculatedStatus.calculateStatusCounts(taskStatuses), numberOfTasks, false);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
index 680c0a6..a92c03c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionScheduler.java
@@ -30,6 +30,7 @@ import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import javax.persistence.EntityManager;
 
@@ -49,6 +50,7 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.HostsMap;
 import org.apache.ambari.server.events.ActionFinalReportReceivedEvent;
 import org.apache.ambari.server.events.jpa.EntityManagerCacheInvalidationEvent;
+import org.apache.ambari.server.events.listeners.tasks.TaskStatusListener;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.events.publishers.JPAEventPublisher;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
@@ -75,10 +77,13 @@ import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Function;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.ListMultimap;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Multimap;
 import com.google.common.eventbus.Subscribe;
 import com.google.common.reflect.TypeToken;
@@ -179,6 +184,9 @@ class ActionScheduler implements Runnable {
    * we receive awake() request during running a scheduler iteration.
    */
   private boolean activeAwakeRequest = false;
+
+  private AtomicBoolean taskStatusLoaded = new AtomicBoolean();
+
   //Cache for clusterHostinfo, key - stageId-requestId
   private Cache<String, Map<String, Set<String>>> clusterHostInfoCache;
   private Cache<String, Map<String, String>> commandParamsStageCache;
@@ -353,6 +361,8 @@ class ActionScheduler implements Runnable {
         LOG.debug("Processing {} in progress stages ", stages.size());
       }
 
+      publishInProgressTasks(stages);
+
       if (stages.isEmpty()) {
         // Nothing to do
         if (LOG.isDebugEnabled()) {
@@ -532,6 +542,27 @@ class ActionScheduler implements Runnable {
     }
   }
 
+  /**
+   * publish event to load {@link TaskStatusListener#activeTasksMap} {@link TaskStatusListener#activeStageMap}
+   * and {@link TaskStatusListener#activeRequestMap} for all running request once during server startup.
+   * This is required as some tasks may have been in progress when server was last stopped
+   * @param stages list of stages
+   */
+  private void publishInProgressTasks(List<Stage> stages) {
+    if (taskStatusLoaded.compareAndSet(false, true)) {
+      if (!stages.isEmpty()) {
+        Function<Stage, Long> transform = new Function<Stage, Long>() {
+          @Override
+          public Long apply(Stage stage) {
+            return stage.getRequestId();
+          }
+        };
+        Set<Long> runningRequestID = ImmutableSet.copyOf(Lists.transform(stages, transform));
+        List<HostRoleCommand> hostRoleCommands = db.getAllTasksByRequestIds(runningRequestID);
+        hostRoleCommandDAO.publishTaskCreateEvent(hostRoleCommands);
+      }
+    }
+  }
 
   /**
    * Returns the list of hosts that have a task assigned

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
index 31e11c1..502c016 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
@@ -71,7 +71,8 @@ public class Request {
    * As of now, this field is not used. Request status is
    * calculated at RequestResourceProvider on the fly.
    */
-  private HostRoleStatus status; // not persisted yet
+  private HostRoleStatus status = HostRoleStatus.PENDING;
+  private HostRoleStatus displayStatus = HostRoleStatus.PENDING;
   private String inputs;
   private List<RequestResourceFilter> resourceFilters;
   private RequestOperationLevel operationLevel;
@@ -186,6 +187,7 @@ public class Request {
     this.requestType = entity.getRequestType();
     this.commandName = entity.getCommandName();
     this.status = entity.getStatus();
+    this.displayStatus = entity.getDisplayStatus();
     if (entity.getRequestScheduleEntity() != null) {
       this.requestScheduleId = entity.getRequestScheduleEntity().getScheduleId();
     }
@@ -241,6 +243,8 @@ public class Request {
     requestEntity.setInputs(inputs);
     requestEntity.setRequestType(requestType);
     requestEntity.setRequestScheduleId(requestScheduleId);
+    requestEntity.setStatus(status);
+    requestEntity.setDisplayStatus(displayStatus);
     //TODO set all fields
 
     if (resourceFilters != null) {
@@ -381,6 +385,8 @@ public class Request {
         ", startTime=" + startTime +
         ", endTime=" + endTime +
         ", inputs='" + inputs + '\'' +
+        ", status='" + status + '\'' +
+        ", displayStatus='" + displayStatus + '\'' +
         ", resourceFilters='" + resourceFilters + '\'' +
         ", operationLevel='" + operationLevel + '\'' +
         ", requestType=" + requestType +

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
index 4a05b32..f7ceca2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Stage.java
@@ -74,6 +74,8 @@ public class Stage {
   private long stageId = -1;
   private final String logDir;
   private final String requestContext;
+  private HostRoleStatus status = HostRoleStatus.PENDING;
+  private HostRoleStatus displayStatus = HostRoleStatus.PENDING;
   private String clusterHostInfo;
   private String commandParamsStage;
   private String hostParamsStage;
@@ -157,6 +159,8 @@ public class Stage {
     commandParamsStage = stageEntity.getCommandParamsStage();
     hostParamsStage = stageEntity.getHostParamsStage();
     commandExecutionType = stageEntity.getCommandExecutionType();
+    status = stageEntity.getStatus();
+    displayStatus = stageEntity.getDisplayStatus();
 
     List<Long> taskIds = hostRoleCommandDAO.findTaskIdsByStage(requestId, stageId);
     Collection<HostRoleCommand> commands = dbAccessor.getTasks(taskIds);
@@ -197,6 +201,8 @@ public class Stage {
     stageEntity.setCommandParamsStage(commandParamsStage);
     stageEntity.setHostParamsStage(hostParamsStage);
     stageEntity.setCommandExecutionType(commandExecutionType);
+    stageEntity.setStatus(status);
+    stageEntity.setDisplayStatus(displayStatus);
 
     for (Role role : successFactors.keySet()) {
       RoleSuccessCriteriaEntity roleSuccessCriteriaEntity = new RoleSuccessCriteriaEntity();
@@ -290,6 +296,23 @@ public class Stage {
     this.commandExecutionType = commandExecutionType;
   }
 
+  /**
+   * get current status of the stage
+   * @return {@link HostRoleStatus}
+   */
+  public HostRoleStatus getStatus() {
+    return status;
+  }
+
+  /**
+   * sets status of the stage
+   * @param status {@link HostRoleStatus}
+   */
+  public void setStatus(HostRoleStatus status) {
+    this.status = status;
+  }
+
+
   public synchronized void setStageId(long stageId) {
     if (this.stageId != -1) {
       throw new RuntimeException("Attempt to set stageId again! Not allowed.");
@@ -915,6 +938,8 @@ public class Stage {
     builder.append("clusterHostInfo="+clusterHostInfo+"\n");
     builder.append("commandParamsStage="+commandParamsStage+"\n");
     builder.append("hostParamsStage="+hostParamsStage+"\n");
+    builder.append("status="+status+"\n");
+    builder.append("displayStatus="+displayStatus+"\n");
     builder.append("Success Factors:\n");
     for (Role r : successFactors.keySet()) {
       builder.append("  role: "+r+", factor: "+successFactors.get(r)+"\n");

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
index 3c415df..32dd03d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CalculatedStatus.java
@@ -26,12 +26,20 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.server.Role;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.actionmanager.Stage;
+import org.apache.ambari.server.events.listeners.tasks.TaskStatusListener;
 import org.apache.ambari.server.orm.dao.HostRoleCommandStatusSummaryDTO;
 import org.apache.ambari.server.orm.entities.HostRoleCommandEntity;
 import org.apache.ambari.server.orm.entities.StageEntity;
+import org.apache.ambari.server.orm.entities.StageEntityPK;
+
+import com.google.common.base.Function;
+import com.google.common.base.Predicate;
+import com.google.common.collect.FluentIterable;
 
 /**
  * Status of a request resource, calculated from a set of tasks or stages.
@@ -142,7 +150,7 @@ public class CalculatedStatus {
 
     Map<HostRoleStatus, Integer> taskStatusCounts = CalculatedStatus.calculateTaskEntityStatusCounts(tasks);
 
-    HostRoleStatus status = calculateSummaryStatusOfStage(taskStatusCounts, size, skippable);
+    HostRoleStatus status = calculateSummaryStatus(taskStatusCounts, size, skippable);
 
     double progressPercent = calculateProgressPercent(taskStatusCounts, size);
 
@@ -167,7 +175,7 @@ public class CalculatedStatus {
 
       // calculate the stage status from the task status counts
       HostRoleStatus stageStatus =
-          calculateSummaryStatusOfStage(calculateTaskEntityStatusCounts(stageTasks), stageTasks.size(), stage.isSkippable());
+          calculateSummaryStatus(calculateTaskEntityStatusCounts(stageTasks), stageTasks.size(), stage.isSkippable());
 
       stageStatuses.add(stageStatus);
 
@@ -203,7 +211,7 @@ public class CalculatedStatus {
 
       // calculate the stage status from the task status counts
       HostRoleStatus stageStatus =
-          calculateSummaryStatusOfStage(calculateTaskStatusCounts(stageTasks), stageTasks.size(), stage.isSkippable());
+          calculateSummaryStatus(calculateTaskStatusCounts(stageTasks), stageTasks.size(), stage.isSkippable());
 
       stageStatuses.add(stageStatus);
 
@@ -256,6 +264,126 @@ public class CalculatedStatus {
   }
 
   /**
+   * Returns counts of tasks that are in various states.
+   *
+   * @param hostRoleCommands  collection of beans {@link HostRoleCommand}
+   *
+   * @return a map of counts of tasks keyed by the task status
+   */
+  public static Map<HostRoleStatus, Integer> calculateStatusCountsForTasks(Collection<HostRoleCommand> hostRoleCommands) {
+    Map<HostRoleStatus, Integer> counters = new HashMap<>();
+    // initialize
+    for (HostRoleStatus hostRoleStatus : HostRoleStatus.values()) {
+      counters.put(hostRoleStatus, 0);
+    }
+    // calculate counts
+    for (HostRoleCommand hrc : hostRoleCommands) {
+      // count tasks where isCompletedState() == true as COMPLETED
+      // but don't count tasks with COMPLETED status twice
+      if (hrc.getStatus().isCompletedState() && hrc.getStatus() != HostRoleStatus.COMPLETED) {
+        // Increase total number of completed tasks;
+        counters.put(HostRoleStatus.COMPLETED, counters.get(HostRoleStatus.COMPLETED) + 1);
+      }
+      // Increment counter for particular status
+      counters.put(hrc.getStatus(), counters.get(hrc.getStatus()) + 1);
+    }
+
+    // We overwrite the value to have the sum converged
+    counters.put(HostRoleStatus.IN_PROGRESS,
+        hostRoleCommands.size() -
+            counters.get(HostRoleStatus.COMPLETED) -
+            counters.get(HostRoleStatus.QUEUED) -
+            counters.get(HostRoleStatus.PENDING));
+
+    return counters;
+  }
+
+  /**
+   * Returns map for counts of stages that are in various states.
+   *
+   * @param stages  collection of beans {@link org.apache.ambari.server.events.listeners.tasks.TaskStatusListener.ActiveStage}
+   *
+   * @return a map of counts of tasks keyed by the task status
+   */
+  public static Map<StatusType,Map<HostRoleStatus, Integer>> calculateStatusCountsForStage(Collection<TaskStatusListener.ActiveStage> stages) {
+
+    Map<StatusType,Map<HostRoleStatus, Integer>> counters = new HashMap<>();
+    for (StatusType statusType : StatusType.values()) {
+      Map <HostRoleStatus, Integer> statusMap = new HashMap<HostRoleStatus, Integer>();
+      counters.put(statusType,statusMap);
+      // initialize
+      for (HostRoleStatus hostRoleStatus : HostRoleStatus.values()) {
+        statusMap.put(hostRoleStatus, 0);
+      }
+      for (TaskStatusListener.ActiveStage stage : stages) {
+        // count tasks where isCompletedState() == true as COMPLETED
+        // but don't count tasks with COMPLETED status twice
+        HostRoleStatus status;
+        if (statusType == StatusType.DISPLAY_STATUS) {
+          status = stage.getDisplayStatus();
+        } else {
+          status = stage.getStatus();
+        }
+        if (status.isCompletedState() && status != HostRoleStatus.COMPLETED) {
+          // Increase total number of completed tasks;
+          statusMap.put(HostRoleStatus.COMPLETED, statusMap.get(HostRoleStatus.COMPLETED) + 1);
+        }
+
+        // Increment counter for particular status
+        statusMap.put(status, statusMap.get(status) + 1);
+      }
+      statusMap.put(HostRoleStatus.IN_PROGRESS,
+          stages.size() -
+              statusMap.get(HostRoleStatus.COMPLETED) -
+              statusMap.get(HostRoleStatus.QUEUED) -
+              statusMap.get(HostRoleStatus.PENDING));
+    }
+    return counters;
+  }
+
+
+  /**
+   * Returns counts of tasks that are in various states.
+   *
+   * @param hostRoleCommands  collection of beans {@link HostRoleCommand}
+   *
+   * @return a map of counts of tasks keyed by the task status
+   */
+  public static Map<HostRoleStatus, Integer> calculateStatusCountsForTasks(Collection<HostRoleCommand> hostRoleCommands, StageEntityPK stage) {
+    Map<HostRoleStatus, Integer> counters = new HashMap<>();
+    List<HostRoleCommand> hostRoleCommandsOfStage = new ArrayList<>();
+    // initialize
+    for (HostRoleStatus hostRoleStatus : HostRoleStatus.values()) {
+      counters.put(hostRoleStatus, 0);
+    }
+    // calculate counts
+    for (HostRoleCommand hrc : hostRoleCommands) {
+      if (stage.getStageId() == hrc.getStageId() && stage.getRequestId() == hrc.getRequestId()) {
+        // count tasks where isCompletedState() == true as COMPLETED
+        // but don't count tasks with COMPLETED status twice
+        if (hrc.getStatus().isCompletedState() && hrc.getStatus() != HostRoleStatus.COMPLETED) {
+          // Increase total number of completed tasks;
+          counters.put(HostRoleStatus.COMPLETED, counters.get(HostRoleStatus.COMPLETED) + 1);
+        }
+
+        // Increment counter for particular status
+        counters.put(hrc.getStatus(), counters.get(hrc.getStatus()) + 1);
+
+        hostRoleCommandsOfStage.add(hrc);
+      }
+    }
+
+    // We overwrite the value to have the sum converged
+    counters.put(HostRoleStatus.IN_PROGRESS,
+        hostRoleCommandsOfStage.size() -
+            counters.get(HostRoleStatus.COMPLETED) -
+            counters.get(HostRoleStatus.QUEUED) -
+            counters.get(HostRoleStatus.PENDING));
+
+    return counters;
+  }
+
+  /**
    * Returns counts of task entities that are in various states.
    *
    * @param tasks  the collection of task entities
@@ -329,7 +457,7 @@ public class CalculatedStatus {
       int total = summary.getTaskTotal();
       boolean skip = summary.isStageSkippable();
       Map<HostRoleStatus, Integer> counts = calculateStatusCounts(summary.getTaskStatuses());
-      HostRoleStatus stageStatus = calculateSummaryStatusOfStage(counts, total, skip);
+      HostRoleStatus stageStatus = calculateSummaryStatus(counts, total, skip);
       HostRoleStatus stageDisplayStatus = calculateSummaryDisplayStatus(counts, total, skip);
 
       stageStatuses.add(stageStatus);
@@ -392,7 +520,7 @@ public class CalculatedStatus {
    *
    * @return summary request status based on statuses of tasks in different states.
    */
-  public static HostRoleStatus calculateSummaryStatusOfStage(Map<HostRoleStatus, Integer> counters,
+  public static HostRoleStatus calculateSummaryStatus(Map<HostRoleStatus, Integer> counters,
       int total, boolean skippable) {
 
     // when there are 0 tasks, return COMPLETED
@@ -435,6 +563,230 @@ public class CalculatedStatus {
   }
 
   /**
+   *
+   * @param counters counts of resources that are in various states
+   * @param skippable {Boolean} <code>TRUE<code/> if failure of any of the task should not fail the stage
+   * @return {@link HostRoleStatus}
+   */
+  public static HostRoleStatus calculateSummaryStatusFromPartialSet(Map<HostRoleStatus, Integer> counters,
+                                                      boolean skippable) {
+
+    HostRoleStatus status = HostRoleStatus.PENDING;
+    // By definition, any tasks in a future stage must be held in a PENDING status.
+    if (counters.get(HostRoleStatus.HOLDING) > 0 || counters.get(HostRoleStatus.HOLDING_FAILED) > 0 || counters.get(HostRoleStatus.HOLDING_TIMEDOUT) > 0) {
+      status =  counters.get(HostRoleStatus.HOLDING) > 0 ? HostRoleStatus.HOLDING :
+          counters.get(HostRoleStatus.HOLDING_FAILED) > 0 ? HostRoleStatus.HOLDING_FAILED :
+              HostRoleStatus.HOLDING_TIMEDOUT;
+    }
+
+    // Because tasks are not skippable, guaranteed to be FAILED
+    if (counters.get(HostRoleStatus.FAILED) > 0 && !skippable) {
+      status = HostRoleStatus.FAILED;
+    }
+
+    // Because tasks are not skippable, guaranteed to be TIMEDOUT
+    if (counters.get(HostRoleStatus.TIMEDOUT) > 0  && !skippable) {
+      status = HostRoleStatus.TIMEDOUT;
+    }
+
+    int inProgressTasks =  counters.get(HostRoleStatus.QUEUED) + counters.get(HostRoleStatus.IN_PROGRESS);
+    if (inProgressTasks > 0) {
+      status = HostRoleStatus.IN_PROGRESS;
+    }
+
+    return status;
+  }
+
+
+  /**
+   *
+   * @param hostRoleCommands list of {@link HostRoleCommand} for a stage
+   * @param counters counts of resources that are in various states
+   * @param successFactors Map of roles to their successfactor for a stage
+   * @param skippable {Boolean} <code>TRUE<code/> if failure of any of the task should not fail the stage
+   * @return {@link HostRoleStatus} based on success factor
+   */
+  public static HostRoleStatus calculateStageStatus(List <HostRoleCommand> hostRoleCommands, Map<HostRoleStatus, Integer> counters, Map<Role, Float> successFactors,
+                                                    boolean skippable) {
+
+    // when there are 0 tasks, return COMPLETED
+    int total = hostRoleCommands.size();
+    if (total == 0) {
+      return HostRoleStatus.COMPLETED;
+    }
+
+    if (counters.get(HostRoleStatus.PENDING) == total) {
+      return HostRoleStatus.PENDING;
+    }
+
+    // By definition, any tasks in a future stage must be held in a PENDING status.
+    if (counters.get(HostRoleStatus.HOLDING) > 0 || counters.get(HostRoleStatus.HOLDING_FAILED) > 0 || counters.get(HostRoleStatus.HOLDING_TIMEDOUT) > 0) {
+      return counters.get(HostRoleStatus.HOLDING) > 0 ? HostRoleStatus.HOLDING :
+          counters.get(HostRoleStatus.HOLDING_FAILED) > 0 ? HostRoleStatus.HOLDING_FAILED :
+              HostRoleStatus.HOLDING_TIMEDOUT;
+    }
+
+
+    if (counters.get(HostRoleStatus.FAILED) > 0 && !skippable) {
+      Set<Role> rolesWithFailedTasks = getRolesOfFailedTasks(hostRoleCommands);
+      Boolean didStageFailed = didStageFailed(hostRoleCommands, rolesWithFailedTasks, successFactors);
+      if (didStageFailed) return HostRoleStatus.FAILED;
+    }
+
+
+    if (counters.get(HostRoleStatus.TIMEDOUT) > 0  && !skippable) {
+      Set<Role> rolesWithTimedOutTasks = getRolesOfTimedOutTasks(hostRoleCommands);
+      Boolean didStageFailed = didStageFailed(hostRoleCommands, rolesWithTimedOutTasks, successFactors);
+      if (didStageFailed) return HostRoleStatus.TIMEDOUT;
+    }
+
+    int numActiveTasks = counters.get(HostRoleStatus.PENDING) + counters.get(HostRoleStatus.QUEUED) + counters.get(HostRoleStatus.IN_PROGRESS);
+
+    if (numActiveTasks > 0) {
+      return HostRoleStatus.IN_PROGRESS;
+    } else if (counters.get(HostRoleStatus.ABORTED) > 0) {
+      Set<Role> rolesWithTimedOutTasks = getRolesOfAbortedTasks(hostRoleCommands);
+      Boolean didStageFailed = didStageFailed(hostRoleCommands, rolesWithTimedOutTasks, successFactors);
+      if (didStageFailed) return HostRoleStatus.ABORTED;
+    }
+
+    return HostRoleStatus.COMPLETED;
+  }
+
+  /**
+   *  Get all {@link Role} any of whose tasks is in {@link HostRoleStatus#FAILED}
+   * @param hostRoleCommands list of {@link HostRoleCommand}
+   * @return Set of {@link Role}
+   */
+  protected static Set<Role> getRolesOfFailedTasks(List <HostRoleCommand> hostRoleCommands) {
+    return getRolesOfTasks(hostRoleCommands, HostRoleStatus.FAILED);
+  }
+
+  /**
+   *  Get all {@link Role} any of whose tasks is in {@link HostRoleStatus#TIMEDOUT}
+   * @param hostRoleCommands list of {@link HostRoleCommand}
+   * @return Set of {@link Role}
+   */
+  protected static Set<Role> getRolesOfTimedOutTasks(List <HostRoleCommand> hostRoleCommands) {
+    return getRolesOfTasks(hostRoleCommands, HostRoleStatus.TIMEDOUT);
+  }
+
+  /**
+   *  Get all {@link Role} any of whose tasks is in {@link HostRoleStatus#ABORTED}
+   * @param hostRoleCommands list of {@link HostRoleCommand}
+   * @return Set of {@link Role}
+   */
+  protected static Set<Role> getRolesOfAbortedTasks(List <HostRoleCommand> hostRoleCommands) {
+    return getRolesOfTasks(hostRoleCommands, HostRoleStatus.ABORTED);
+  }
+
+  /**
+   * Get all {@link Role} any of whose tasks are in given {@code status}
+   * @param hostRoleCommands list of {@link HostRoleCommand}
+   * @param status {@link HostRoleStatus}
+   * @return Set of {@link Role}
+   */
+  protected static Set<Role> getRolesOfTasks(List <HostRoleCommand> hostRoleCommands, final HostRoleStatus status) {
+
+    Predicate<HostRoleCommand> predicate = new Predicate<HostRoleCommand>() {
+      @Override
+      public boolean apply(HostRoleCommand hrc) {
+        return hrc.getStatus() ==  status;
+      }
+    };
+
+    Function<HostRoleCommand, Role> transform = new Function<HostRoleCommand, Role>() {
+      @Override
+      public Role apply(HostRoleCommand hrc) {
+        return hrc.getRole();
+      }
+    };
+    return FluentIterable.from(hostRoleCommands)
+        .filter(predicate)
+        .transform(transform)
+        .toSet();
+  }
+
+  /**
+   *
+   * @param hostRoleCommands list of {@link HostRoleCommand} for a stage
+   * @param roles  set of roles to be checked for meeting success criteria
+   * @param successFactors  map of role to it's success factor
+   * @return {Boolean} <code>TRUE</code> if stage failed due to hostRoleCommands of any role not meeting success criteria
+   */
+  protected static Boolean didStageFailed(List<HostRoleCommand> hostRoleCommands, Set<Role> roles, Map<Role, Float> successFactors) {
+    Boolean isFailed = Boolean.FALSE;
+    for (Role role: roles) {
+      List <HostRoleCommand> hostRoleCommandsOfRole = getHostRoleCommandsOfRole(hostRoleCommands, role);
+      List <HostRoleCommand> failedHostRoleCommands =  getFailedHostRoleCommands(hostRoleCommandsOfRole);
+      float successRatioForRole = (hostRoleCommandsOfRole.size() - failedHostRoleCommands.size())/hostRoleCommandsOfRole.size();
+      Float successFactorForRole =  successFactors.get(role) == null ? 1.0f : successFactors.get(role);
+      if (successRatioForRole  < successFactorForRole) {
+        isFailed = Boolean.TRUE;
+        break;
+      }
+    }
+    return isFailed;
+  }
+
+  /**
+   *
+   * @param hostRoleCommands list of {@link HostRoleCommand}
+   * @param role {@link Role}
+   * @return list of {@link HostRoleCommand} that belongs to {@link Role}
+   */
+  protected static List<HostRoleCommand> getHostRoleCommandsOfRole(List <HostRoleCommand> hostRoleCommands, final Role role) {
+    Predicate<HostRoleCommand> predicate = new Predicate<HostRoleCommand>() {
+      @Override
+      public boolean apply(HostRoleCommand hrc) {
+        return hrc.getRole() ==  role;
+      }
+    };
+    return FluentIterable.from(hostRoleCommands)
+        .filter(predicate)
+        .toList();
+  }
+
+  /**
+   *
+   * @param hostRoleCommands list of {@link HostRoleCommand}
+   * @return list of {@link HostRoleCommand} with failed status
+   */
+  protected static List<HostRoleCommand> getFailedHostRoleCommands(List <HostRoleCommand> hostRoleCommands) {
+    Predicate<HostRoleCommand> predicate = new Predicate<HostRoleCommand>() {
+      @Override
+      public boolean apply(HostRoleCommand hrc) {
+        return hrc.getStatus().isFailedAndNotSkippableState();
+      }
+    };
+    return FluentIterable.from(hostRoleCommands)
+        .filter(predicate)
+        .toList();
+  }
+
+
+  /**
+   * Calculate overall status from collection of statuses
+   * @param hostRoleStatuses list of all stage's {@link HostRoleStatus}
+   * @return overall status of a request
+   */
+  public static HostRoleStatus getOverallStatusForRequest (Collection<HostRoleStatus> hostRoleStatuses) {
+    Map<HostRoleStatus, Integer> statusCount = calculateStatusCounts(hostRoleStatuses);
+    return calculateSummaryStatus(statusCount, hostRoleStatuses.size(), false);
+  }
+
+  /**
+   * Calculate overall display status from collection of statuses
+   * @param hostRoleStatuses list of all stage's {@link HostRoleStatus}
+   * @return overall display status of a request
+   */
+  public static HostRoleStatus getOverallDisplayStatusForRequest (Collection<HostRoleStatus> hostRoleStatuses) {
+    Map<HostRoleStatus, Integer> statusCount = calculateStatusCounts(hostRoleStatuses);
+    return calculateSummaryDisplayStatus(statusCount, hostRoleStatuses.size(), false);
+  }
+
+
+  /**
    * Calculate overall status of an upgrade.
    *
    * @param counters   counts of resources that are in various states
@@ -444,7 +796,7 @@ public class CalculatedStatus {
    */
   protected static HostRoleStatus calculateSummaryStatusOfUpgrade(
       Map<HostRoleStatus, Integer> counters, int total) {
-    return calculateSummaryStatusOfStage(counters, total, false);
+    return calculateSummaryStatus(counters, total, false);
   }
 
   /**
@@ -456,10 +808,28 @@ public class CalculatedStatus {
    *
    * @return summary request status based on statuses of tasks in different states.
    */
-  protected static HostRoleStatus calculateSummaryDisplayStatus(
+  public static HostRoleStatus calculateSummaryDisplayStatus(
       Map<HostRoleStatus, Integer> counters, int total, boolean skippable) {
-    return counters.get(HostRoleStatus.SKIPPED_FAILED) > 0 ? HostRoleStatus.SKIPPED_FAILED :
-           counters.get(HostRoleStatus.FAILED) > 0 ? HostRoleStatus.FAILED:
-           calculateSummaryStatusOfStage(counters, total, skippable);
+    return counters.get(HostRoleStatus.FAILED) > 0 ? HostRoleStatus.FAILED:
+           counters.get(HostRoleStatus.TIMEDOUT) > 0 ? HostRoleStatus.TIMEDOUT:
+           counters.get(HostRoleStatus.SKIPPED_FAILED) > 0 ? HostRoleStatus.SKIPPED_FAILED :
+           calculateSummaryStatus(counters, total, skippable);
+  }
+
+  /**
+   * kind of {@link HostRoleStatus} persisted by {@link Stage} and {@link Request}
+   */
+  public enum StatusType {
+    STATUS("status"),
+    DISPLAY_STATUS("display_status");
+    private String value;
+
+    StatusType(String value) {
+      this.value = value;
+    }
+
+    public String getValue() {
+      return value;
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/events/TaskCreateEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/TaskCreateEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/TaskCreateEvent.java
new file mode 100644
index 0000000..9d73122
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/TaskCreateEvent.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events;
+
+
+import java.util.List;
+
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.events.listeners.tasks.TaskStatusListener;
+
+/**
+ * The {@link TaskCreateEvent} is to be fired every time
+ * when any request is to be tracked as running requests in
+ * {@link TaskStatusListener}
+ * This usually happens when new request is created by user action or
+ * when ambari-server starts with some stages in non-completed state
+ */
+public class TaskCreateEvent extends TaskEvent {
+
+
+  /**
+   * Constructor.
+   *
+   * @param hostRoleCommandList
+   *          all hostRoleCommands for all requests
+   */
+  public TaskCreateEvent(List<HostRoleCommand> hostRoleCommandList) {
+    super(hostRoleCommandList);
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/events/TaskEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/TaskEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/TaskEvent.java
new file mode 100644
index 0000000..ca351d7
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/TaskEvent.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events;
+
+import java.util.List;
+
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.commons.lang.StringUtils;
+
+/**
+ * {@link TaskEvent} is the base for all events related to create/update of tasks
+ * that might result in update of stage/request status
+ */
+public class TaskEvent {
+  /**
+   * List of {@link HostRoleCommand}
+   */
+  private List<HostRoleCommand> hostRoleCommands;
+
+  /**
+   * Constructor.
+   *
+   * @param hostRoleCommands
+   *          list of HRCs which have been reported back by the agents.
+   */
+  public TaskEvent(List<HostRoleCommand> hostRoleCommands) {
+    this.hostRoleCommands = hostRoleCommands;
+  }
+
+  /**
+   *  Gets hostRoleCommands that created event
+   * @return List of {@link HostRoleCommand}
+   */
+  public List<HostRoleCommand> getHostRoleCommands() {
+    return hostRoleCommands;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String toString() {
+    String hostRoleCommands = StringUtils.join(this.hostRoleCommands, ", ");
+    StringBuilder buffer = new StringBuilder("TaskEvent{");
+    buffer.append("hostRoleCommands=").append(hostRoleCommands);
+    buffer.append("}");
+    return buffer.toString();
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/events/TaskUpdateEvent.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/TaskUpdateEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/TaskUpdateEvent.java
new file mode 100644
index 0000000..84f67f5
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/TaskUpdateEvent.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events;
+
+import java.util.List;
+
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+
+/**
+ * The {@link TaskUpdateEvent} is to be fired every time
+ * when host role commands are merged to the database
+ */
+public class TaskUpdateEvent extends TaskEvent{
+
+  public TaskUpdateEvent(List<HostRoleCommand> hostRoleCommandList) {
+    super(hostRoleCommandList);
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListener.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListener.java
new file mode 100644
index 0000000..bc146ef
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListener.java
@@ -0,0 +1,609 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events.listeners.tasks;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.EagerSingleton;
+import org.apache.ambari.server.Role;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.actionmanager.Request;
+import org.apache.ambari.server.actionmanager.Stage;
+import org.apache.ambari.server.controller.internal.CalculatedStatus;
+import org.apache.ambari.server.events.TaskCreateEvent;
+import org.apache.ambari.server.events.TaskUpdateEvent;
+import org.apache.ambari.server.events.publishers.TaskEventPublisher;
+import org.apache.ambari.server.orm.dao.RequestDAO;
+import org.apache.ambari.server.orm.dao.StageDAO;
+import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.RoleSuccessCriteriaEntity;
+import org.apache.ambari.server.orm.entities.StageEntity;
+import org.apache.ambari.server.orm.entities.StageEntityPK;
+import org.jboss.netty.util.internal.ConcurrentHashMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.base.Predicate;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.Sets;
+import com.google.common.eventbus.Subscribe;
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+
+
+/**
+ * The {@link TaskStatusListener} is used to constantly update status of running Stages and Requests
+ * {@link TaskUpdateEvent} listens for all incoming events. These events are fired when either host role commands are created/updated
+ * This listener maintains map of all running tasks, stages and requests
+ */
+@Singleton
+@EagerSingleton
+public class TaskStatusListener {
+  /**
+   * Logger.
+   */
+  private final static Logger LOG = LoggerFactory.getLogger(TaskStatusListener.class);
+
+  /**
+   * Maps task id to its {@link HostRoleCommand} Object.
+   * Map has entries of all tasks of all active(ongoing) requests
+   * NOTE: Partial loading of tasks for any request may lead to incorrect update of the request status
+   */
+  private Map<Long,HostRoleCommand> activeTasksMap = new ConcurrentHashMap<>();
+
+  /**
+   * Maps all ongoing request id to its {@link ActiveRequest}
+   */
+  private Map<Long, ActiveRequest> activeRequestMap = new ConcurrentHashMap<>();
+
+  /**
+   * Maps {@link StageEntityPK} of all ongoing requests to its {@link ActiveStage}
+   * with updated {@link ActiveStage#status} and {@link ActiveStage#displayStatus}.
+   */
+  private Map<StageEntityPK, ActiveStage> activeStageMap = new ConcurrentHashMap<>();
+
+  private StageDAO stageDAO;
+
+  private RequestDAO requestDAO;
+
+
+  @Inject
+  public TaskStatusListener(TaskEventPublisher taskEventPublisher, StageDAO stageDAO, RequestDAO requestDAO) {
+    this.stageDAO = stageDAO;
+    this.requestDAO = requestDAO;
+    taskEventPublisher.register(this);
+  }
+
+  public Map<Long,HostRoleCommand> getActiveTasksMap() {
+    return activeTasksMap;
+  }
+
+  public Map<Long, ActiveRequest> getActiveRequestMap() {
+    return activeRequestMap;
+  }
+
+  public Map<StageEntityPK, ActiveStage> getActiveStageMap() {
+    return activeStageMap;
+  }
+
+  /**
+   * On receiving task update event, update related entries of the running request, stage and task in the maps
+   * Event containing newly created tasks is expected to contain complete set of all tasks for a request
+   * @param event Consumes {@link TaskUpdateEvent}.
+   */
+  @Subscribe
+  public void onTaskUpdateEvent(TaskUpdateEvent event) {
+    LOG.debug("Received task update event {}", event);
+    List<HostRoleCommand> hostRoleCommandListAll = event.getHostRoleCommands();
+    List<HostRoleCommand>  hostRoleCommandWithReceivedStatus =  new ArrayList<>();
+    Set<StageEntityPK> stagesWithReceivedTaskStatus = new HashSet<>();
+    Set<Long> requestIdsWithReceivedTaskStatus =  new HashSet<>();
+    for (HostRoleCommand hostRoleCommand : hostRoleCommandListAll) {
+      Long reportedTaskId = hostRoleCommand.getTaskId();
+      HostRoleCommand activeTask =  activeTasksMap.get(reportedTaskId);
+      if (activeTask == null) {
+        LOG.error(String.format("Received update for a task %d which is not being tracked as running task", reportedTaskId));
+      } else  {
+        hostRoleCommandWithReceivedStatus.add(hostRoleCommand);
+        StageEntityPK stageEntityPK = new StageEntityPK();
+        stageEntityPK.setRequestId(hostRoleCommand.getRequestId());
+        stageEntityPK.setStageId(hostRoleCommand.getStageId());
+        stagesWithReceivedTaskStatus.add(stageEntityPK);
+        requestIdsWithReceivedTaskStatus.add(hostRoleCommand.getRequestId());
+      }
+    }
+
+    updateActiveTasksMap(hostRoleCommandWithReceivedStatus);
+    Boolean didAnyStageStatusUpdated = updateActiveStagesStatus(stagesWithReceivedTaskStatus, hostRoleCommandListAll);
+    // Presumption: If there is no update in any of the running stage's status
+    // then none of the running request status needs to be updated
+    if (didAnyStageStatusUpdated) {
+      updateActiveRequestsStatus(requestIdsWithReceivedTaskStatus, stagesWithReceivedTaskStatus);
+    }
+
+  }
+
+  /**
+   * On receiving task create event, create entries in the running request, stage and task in the maps
+   * @param event Consumes {@link TaskCreateEvent}.
+   */
+  @Subscribe
+  public void onTaskCreateEvent(TaskCreateEvent event) {
+    LOG.debug("Received task create event {}", event);
+    List<HostRoleCommand> hostRoleCommandListAll = event.getHostRoleCommands();
+
+    for (HostRoleCommand hostRoleCommand : hostRoleCommandListAll) {
+      activeTasksMap.put(hostRoleCommand.getTaskId(), hostRoleCommand);
+      addStagePK(hostRoleCommand);
+      addRequestId(hostRoleCommand);
+    }
+  }
+
+
+  /**
+   * update changed host role command status
+   * @param hostRoleCommandWithReceivedStatus list of host role commands reported
+   */
+  private void updateActiveTasksMap(List<HostRoleCommand> hostRoleCommandWithReceivedStatus) {
+    for (HostRoleCommand hostRoleCommand : hostRoleCommandWithReceivedStatus) {
+      Long taskId = hostRoleCommand.getTaskId();
+      activeTasksMap.put(taskId , hostRoleCommand);
+    }
+  }
+
+
+  /**
+   * Adds new {@link StageEntityPK} to be tracked as running stage in {@link #activeStageMap}
+   * @param hostRoleCommand newly created {@link HostRoleCommand} in {@link #activeTasksMap}
+   */
+  private void addStagePK(HostRoleCommand hostRoleCommand) {
+    StageEntityPK stageEntityPK = new StageEntityPK();
+    stageEntityPK.setRequestId(hostRoleCommand.getRequestId());
+    stageEntityPK.setStageId(hostRoleCommand.getStageId());
+    if (activeStageMap.containsKey(stageEntityPK)) {
+      activeStageMap.get(stageEntityPK).addTaskId(hostRoleCommand.getTaskId());
+    } else {
+      StageEntity stageEntity = stageDAO.findByPK(stageEntityPK);
+      // Stage entity of the hostrolecommand should be persisted before publishing task create event
+      assert stageEntity != null;
+      Map<Role, Float> successFactors = new HashMap<>();
+      Collection<RoleSuccessCriteriaEntity> roleSuccessCriteriaEntities = stageEntity.getRoleSuccessCriterias();
+      for (RoleSuccessCriteriaEntity successCriteriaEntity : roleSuccessCriteriaEntities) {
+        successFactors.put(successCriteriaEntity.getRole(), successCriteriaEntity.getSuccessFactor().floatValue());
+      }
+      Set<Long> taskIdSet =  Sets.newHashSet(hostRoleCommand.getTaskId());
+
+      ActiveStage reportedStage = new ActiveStage(stageEntity.getStatus(), stageEntity.getDisplayStatus(),
+          successFactors, stageEntity.isSkippable(), taskIdSet);
+      activeStageMap.put(stageEntityPK, reportedStage);
+    }
+  }
+
+  /**
+   * update and persist all changed stage status
+   * @param stagesWithReceivedTaskStatus set of stages that has received task status
+   * @param hostRoleCommandListAll list of all task updates received from agent
+   * @return  <code>true</code> if any of the stage has changed it's existing status;
+   *          <code>false</code> otherwise
+   */
+  private Boolean updateActiveStagesStatus(final Set<StageEntityPK> stagesWithReceivedTaskStatus, List<HostRoleCommand> hostRoleCommandListAll) {
+    Boolean didAnyStageStatusUpdated = Boolean.FALSE;
+    for (StageEntityPK reportedStagePK : stagesWithReceivedTaskStatus) {
+      if (activeStageMap.containsKey(reportedStagePK)) {
+        Boolean didStatusChange = updateStageStatus(reportedStagePK, hostRoleCommandListAll);
+        if (didStatusChange) {
+          ActiveStage reportedStage = activeStageMap.get(reportedStagePK);
+          stageDAO.updateStatus(reportedStagePK, reportedStage.getStatus(), reportedStage.getDisplayStatus());
+          didAnyStageStatusUpdated = Boolean.TRUE;
+        }
+      } else {
+        LOG.error(String.format("Received update for a task whose stage is not being tracked as running stage: %s", reportedStagePK.toString()));
+      }
+
+    }
+    return didAnyStageStatusUpdated;
+  }
+
+  /**
+   * Adds new request id to be tracked as running request in {@link #activeRequestMap}
+   * @param hostRoleCommand newly created {@link HostRoleCommand} in {@link #activeTasksMap}
+   */
+  private void addRequestId(HostRoleCommand hostRoleCommand) {
+    Long requestId = hostRoleCommand.getRequestId();
+    StageEntityPK stageEntityPK = new StageEntityPK();
+    stageEntityPK.setRequestId(hostRoleCommand.getRequestId());
+    stageEntityPK.setStageId(hostRoleCommand.getStageId());
+    if (activeRequestMap.containsKey(requestId)) {
+      activeRequestMap.get(requestId).addStageEntityPK(stageEntityPK);
+    } else {
+      RequestEntity requestEntity = requestDAO.findByPK(requestId);
+      // Request entity of the hostrolecommand should be persisted before publishing task create event
+      assert requestEntity != null;
+      Set<StageEntityPK> stageEntityPKs =  Sets.newHashSet(stageEntityPK);
+      ActiveRequest request = new ActiveRequest(requestEntity.getStatus(),requestEntity.getDisplayStatus(), stageEntityPKs);
+      activeRequestMap.put(requestId, request);
+    }
+  }
+
+
+  /**
+   * update and persist changed request status
+   * @param requestIdsWithReceivedTaskStatus set of request ids that has received tasks status
+   * @param stagesWithChangedTaskStatus set of stages that have received tasks with changed status
+   */
+  private void updateActiveRequestsStatus(final Set<Long> requestIdsWithReceivedTaskStatus, Set<StageEntityPK> stagesWithChangedTaskStatus) {
+    for (Long reportedRequestId : requestIdsWithReceivedTaskStatus) {
+      if (activeRequestMap.containsKey(reportedRequestId)) {
+        ActiveRequest request =  activeRequestMap.get(reportedRequestId);
+        Boolean didStatusChange = updateRequestStatus(reportedRequestId, stagesWithChangedTaskStatus);
+        if (didStatusChange) {
+          requestDAO.updateStatus(reportedRequestId, request.getStatus(), request.getDisplayStatus());
+        }
+        if (request.isCompleted() && isAllTasksCompleted(reportedRequestId)) {
+          // Request is considered ton have been finished if request status and all of it's tasks status are completed
+          // in that case, request and it's stages
+          // and tasks should no longer be tracked as active(running)
+          removeRequestStageAndTasks(reportedRequestId);
+        }
+      } else {
+        LOG.error(String.format("Received update for a task whose request %d is not being tracked as running request", reportedRequestId));
+      }
+
+    }
+  }
+
+  /**
+   *
+   * @param requestId request Id
+   * @return  <code>false</code> if any of the task belonging to requestId has incomplete status
+   *          <code>true</code> otherwise
+   */
+  private Boolean isAllTasksCompleted(Long requestId) {
+    Boolean result = Boolean.TRUE;
+    for (Map.Entry<Long, HostRoleCommand> entry : activeTasksMap.entrySet()) {
+      if (entry.getValue().getRequestId() == requestId && !entry.getValue().getStatus().isCompletedState()) {
+        result = Boolean.FALSE;
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Removes entries from {@link #activeTasksMap},{@link #activeStageMap} and {@link #activeRequestMap}
+   * @param requestId request id whose entry and it's stage and task entries is to be removed
+   */
+  private void removeRequestStageAndTasks(Long requestId) {
+    removeTasks(requestId);
+    removeStages(requestId);
+    removeRequest(requestId);
+  }
+
+
+  /**
+   * Filters list of {@link Stage} to list of {@link StageEntityPK}
+   * @param requestID requestId
+   * @return  list of StageEntityPK
+   */
+  private List<StageEntityPK> getAllStageEntityPKForRequest(final Long requestID) {
+    Predicate<StageEntityPK> predicate = new Predicate<StageEntityPK>() {
+      @Override
+      public boolean apply(StageEntityPK stageEntityPK) {
+        return stageEntityPK.getRequestId().equals(requestID);
+      }
+    };
+    return  FluentIterable.from(activeStageMap.keySet())
+        .filter(predicate)
+        .toList();
+  }
+
+
+
+  /**
+   * Returns the computed status of the stage from the status of it's host role commands
+   * @param stagePK {@link StageEntityPK} primary key for the stage entity
+   * @param hostRoleCommandListAll list of all hrc received whose status has been received from agent
+   * @return {@link Boolean} <code>TRUE</code> if status of the given stage changed.
+   */
+  private Boolean updateStageStatus(final StageEntityPK stagePK, List<HostRoleCommand> hostRoleCommandListAll) {
+    Boolean didAnyStatusChanged = Boolean.FALSE;
+    ActiveStage reportedStage = activeStageMap.get(stagePK);
+    HostRoleStatus stageCurrentStatus = reportedStage.getStatus();
+    HostRoleStatus stageCurrentDisplayStatus = reportedStage.getDisplayStatus();
+
+
+    // if stage is already marked to be completed then do not calculate reported status from host role commands
+    // Presumption: There will be no status transition of the host role command from one completed state to another
+    if (!stageCurrentDisplayStatus.isCompletedState() || !stageCurrentStatus.isCompletedState()) {
+      Map<HostRoleStatus, Integer> receivedTaskStatusCount = CalculatedStatus.calculateStatusCountsForTasks(hostRoleCommandListAll, stagePK);
+      HostRoleStatus statusFromPartialSet = CalculatedStatus.calculateSummaryStatusFromPartialSet(receivedTaskStatusCount, reportedStage.getSkippable());
+      HostRoleStatus displayStatusFromPartialSet = CalculatedStatus.calculateSummaryStatusFromPartialSet(receivedTaskStatusCount, Boolean.FALSE);
+      if (statusFromPartialSet == HostRoleStatus.PENDING || displayStatusFromPartialSet == HostRoleStatus.PENDING) {
+        Function<Long,HostRoleCommand> transform = new Function<Long,HostRoleCommand>(){
+          @Override
+          public HostRoleCommand apply(Long taskId) {
+            return activeTasksMap.get(taskId);
+          }
+        };
+
+        List<HostRoleCommand> activeHostRoleCommandsOfStage = FluentIterable.from(reportedStage.getTaskIds())
+            .transform(transform).toList();
+        Map<HostRoleStatus, Integer> statusCount = CalculatedStatus.calculateStatusCountsForTasks(activeHostRoleCommandsOfStage);
+        if (displayStatusFromPartialSet == HostRoleStatus.PENDING) {
+          // calculate and get new display status of the stage as per the new status of received host role commands
+          HostRoleStatus display_status = CalculatedStatus.calculateSummaryDisplayStatus(statusCount, activeHostRoleCommandsOfStage.size(), reportedStage.getSkippable());
+          if (display_status != stageCurrentDisplayStatus) {
+            reportedStage.setDisplayStatus(display_status);
+            didAnyStatusChanged = Boolean.TRUE;
+          }
+
+        } else {
+          reportedStage.setDisplayStatus(displayStatusFromPartialSet);
+          didAnyStatusChanged = Boolean.TRUE;
+        }
+
+        if (statusFromPartialSet == HostRoleStatus.PENDING) {
+          // calculate status of the stage as per the new status of received host role commands
+          HostRoleStatus status = CalculatedStatus.calculateStageStatus(activeHostRoleCommandsOfStage, statusCount, reportedStage.getSuccessFactors(), reportedStage.getSkippable());
+          if (status != stageCurrentStatus) {
+            reportedStage.setStatus(status);
+            didAnyStatusChanged = Boolean.TRUE;
+          }
+        } else {
+          reportedStage.setDisplayStatus(displayStatusFromPartialSet);
+          didAnyStatusChanged = Boolean.TRUE;
+        }
+      } else {
+        reportedStage.setStatus(statusFromPartialSet);
+        reportedStage.setDisplayStatus(displayStatusFromPartialSet);
+        didAnyStatusChanged = Boolean.TRUE;
+      }
+    }
+
+    return didAnyStatusChanged;
+  }
+
+  /**
+   *
+   * @param requestId {@link Request} whose status is to be updated
+   * @param stagesWithChangedTaskStatus set of stages that have received tasks with changed status
+   * @return {Boolean} <code>TRUE</code> if request status has changed from existing
+   */
+  private Boolean updateRequestStatus (final Long requestId, Set<StageEntityPK> stagesWithChangedTaskStatus) {
+    Boolean didStatusChanged = Boolean.FALSE;
+    ActiveRequest request = activeRequestMap.get(requestId);
+    HostRoleStatus requestCurrentStatus = request.getStatus();
+    HostRoleStatus requestCurrentDisplayStatus = request.getDisplayStatus();
+
+    if (!requestCurrentDisplayStatus.isCompletedState() || !requestCurrentStatus.isCompletedState()) {
+      List <ActiveStage>  activeStagesWithChangesTaskStatus = new ArrayList<>();
+      for (StageEntityPK stageEntityPK:stagesWithChangedTaskStatus) {
+        if (requestId.equals(stageEntityPK.getRequestId())) {
+          ActiveStage activeStage = activeStageMap.get(stageEntityPK);
+          activeStagesWithChangesTaskStatus.add(activeStage);
+        }
+      }
+
+
+      Map<CalculatedStatus.StatusType,Map<HostRoleStatus, Integer>> stageStatusCountFromPartialSet = CalculatedStatus.calculateStatusCountsForStage(activeStagesWithChangesTaskStatus);
+      HostRoleStatus statusFromPartialSet = CalculatedStatus.calculateSummaryStatusFromPartialSet(stageStatusCountFromPartialSet.get(CalculatedStatus.StatusType.STATUS), Boolean.FALSE);
+      HostRoleStatus displayStatusFromPartialSet = CalculatedStatus.calculateSummaryStatusFromPartialSet(stageStatusCountFromPartialSet.get(CalculatedStatus.StatusType.DISPLAY_STATUS), Boolean.FALSE);
+
+      if (statusFromPartialSet == HostRoleStatus.PENDING || displayStatusFromPartialSet == HostRoleStatus.PENDING) {
+        List <ActiveStage> allActiveStages = new ArrayList<>();
+        for (StageEntityPK stageEntityPK:request.getStageEntityPks()) {
+          ActiveStage activeStage = activeStageMap.get(stageEntityPK);
+          allActiveStages.add(activeStage);
+        }
+        Map<CalculatedStatus.StatusType,Map<HostRoleStatus, Integer>> stageStatusCount = CalculatedStatus.calculateStatusCountsForStage(allActiveStages);
+
+        if (displayStatusFromPartialSet == HostRoleStatus.PENDING) {
+          // calculate and get new display status of the stage as per the new status of received host role commands
+
+          HostRoleStatus display_status = CalculatedStatus.calculateSummaryDisplayStatus(stageStatusCount.get(CalculatedStatus.StatusType.DISPLAY_STATUS), allActiveStages.size(), false);
+          if (display_status != requestCurrentDisplayStatus) {
+            request.setDisplayStatus(display_status);
+            didStatusChanged = Boolean.TRUE;
+          }
+
+        } else {
+          request.setDisplayStatus(displayStatusFromPartialSet);
+          didStatusChanged = Boolean.TRUE;
+        }
+
+        if (statusFromPartialSet == HostRoleStatus.PENDING) {
+          // calculate status of the stage as per the new status of received host role commands
+          HostRoleStatus status = CalculatedStatus.calculateSummaryStatus(stageStatusCount.get(CalculatedStatus.StatusType.STATUS), allActiveStages.size(), false);
+          if (status != requestCurrentStatus) {
+            request.setStatus(status);
+            didStatusChanged = Boolean.TRUE;
+          }
+        } else {
+          request.setDisplayStatus(displayStatusFromPartialSet);
+          didStatusChanged = Boolean.TRUE;
+        }
+      } else {
+        request.setStatus(statusFromPartialSet);
+        request.setDisplayStatus(displayStatusFromPartialSet);
+        didStatusChanged = Boolean.TRUE;
+      }
+    }
+
+    return didStatusChanged;
+  }
+
+
+  /**
+   * Removes list of {@link HostRoleCommand} entries from {@link #activeTasksMap}
+   * @param requestId request id
+   */
+  private void removeTasks(Long requestId) {
+    Iterator<Map.Entry<Long, HostRoleCommand>> iter = activeTasksMap.entrySet().iterator();
+    while (iter.hasNext()) {
+      Map.Entry<Long, HostRoleCommand> entry = iter.next();
+      HostRoleCommand hrc = entry.getValue();
+      if (hrc.getRequestId() == requestId) {
+        if (!hrc.getStatus().isCompletedState()) {
+          LOG.error(String.format("Task %d should have been completed before being removed from running task cache(activeTasksMap)", hrc.getTaskId()));
+        }
+        iter.remove();
+      }
+    }
+  }
+
+
+  /**
+   * Removes list of {@link StageEntityPK} entries from {@link #activeStageMap}
+   * @param requestId request Id
+   */
+  private void removeStages(Long requestId) {
+    List <StageEntityPK> stageEntityPKs = getAllStageEntityPKForRequest(requestId);
+    for (StageEntityPK stageEntityPK: stageEntityPKs) {
+      activeStageMap.remove(stageEntityPK);
+    }
+  }
+
+
+  /**
+   * Removes request id from {@link #activeRequestMap}
+   * @param requestId request Id
+   */
+  private void removeRequest(Long requestId) {
+    activeRequestMap.remove(requestId);
+  }
+
+
+  /**
+   * This class stores {@link Request#status} and {@link Request#displayStatus} information
+   * This information is cached for all running {@link Request} at {@link #activeRequestMap}
+   */
+  protected class ActiveRequest {
+    private HostRoleStatus status;
+    private HostRoleStatus displayStatus;
+    private Set <StageEntityPK> stageEntityPks;
+
+    public ActiveRequest(HostRoleStatus status, HostRoleStatus displayStatus, Set<StageEntityPK> stageEntityPks) {
+      this.status = status;
+      this.displayStatus = displayStatus;
+      this.stageEntityPks = stageEntityPks;
+    }
+
+    public HostRoleStatus getStatus() {
+      return status;
+    }
+
+    public void setStatus(HostRoleStatus status) {
+      this.status = status;
+    }
+
+    public HostRoleStatus getDisplayStatus() {
+      return displayStatus;
+    }
+
+    public void setDisplayStatus(HostRoleStatus displayStatus) {
+      this.displayStatus = displayStatus;
+    }
+
+    public Boolean isCompleted() {
+      return status.isCompletedState() && displayStatus.isCompletedState();
+    }
+
+    public Set <StageEntityPK> getStageEntityPks() {
+      return stageEntityPks;
+    }
+
+    public void addStageEntityPK(StageEntityPK stageEntityPK) {
+      stageEntityPks.add(stageEntityPK);
+    }
+
+  }
+
+  /**
+   * This class stores information needed to determine {@link Stage#status} and {@link Stage#displayStatus}
+   * This information is cached for all {@link Stage} of all running {@link Request} at {@link #activeStageMap}
+   */
+  public class ActiveStage {
+    private HostRoleStatus status;
+    private HostRoleStatus displayStatus;
+    private Boolean skippable;
+    private Set <Long> taskIds;
+
+    //Map of roles to successFactors for this stage. Default is 1 i.e. 100%
+    private Map<Role, Float> successFactors = new HashMap<Role, Float>();
+
+    public ActiveStage(HostRoleStatus status, HostRoleStatus displayStatus,
+                       Map<Role, Float> successFactors, Boolean skippable, Set<Long> taskIds) {
+      this.status = status;
+      this.displayStatus = displayStatus;
+      this.successFactors =  successFactors;
+      this.skippable = skippable;
+      this.taskIds = taskIds;
+    }
+
+    public HostRoleStatus getStatus() {
+      return status;
+    }
+
+    public void setStatus(HostRoleStatus status) {
+      this.status = status;
+    }
+
+    public HostRoleStatus getDisplayStatus() {
+      return displayStatus;
+    }
+
+    public void setDisplayStatus(HostRoleStatus displayStatus) {
+      this.displayStatus = displayStatus;
+    }
+
+    public Boolean getSkippable() {
+      return skippable;
+    }
+
+    public void setSkippable(Boolean skippable) {
+      this.skippable = skippable;
+    }
+
+    public Map<Role, Float> getSuccessFactors() {
+      return successFactors;
+    }
+
+    public void setSuccessFactors(Map<Role, Float> successFactors) {
+      this.successFactors = successFactors;
+    }
+
+    public Set <Long> getTaskIds() {
+      return taskIds;
+    }
+
+    public void addTaskId(Long taskId) {
+      taskIds.add(taskId);
+    }
+
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/TaskEventPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/TaskEventPublisher.java b/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/TaskEventPublisher.java
new file mode 100644
index 0000000..fdc41e5
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/events/publishers/TaskEventPublisher.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events.publishers;
+
+import org.apache.ambari.server.events.TaskEvent;
+
+import com.google.common.eventbus.EventBus;
+import com.google.common.eventbus.Subscribe;
+import com.google.inject.Singleton;
+
+/**
+ * The {@link TaskEventPublisher} is used to publish instances of
+ * {@link TaskEvent} to any {@link com.google.common.eventbus.Subscribe} interested.
+ * It uses a single-threaded, serial {@link EventBus}.
+ */
+@Singleton
+public class TaskEventPublisher {
+
+  /**
+   * A single threaded, synchronous event bus for processing task events.
+   */
+  private final EventBus m_eventBus = new EventBus("ambari-task-report-event-bus");
+
+
+  /**
+   * Publishes the specified event to all registered listeners that
+   * {@link Subscribe} to  {@link TaskEvent} instances.
+   *
+   * @param event {@link TaskEvent}
+   */
+  public void publish(TaskEvent event) {
+    m_eventBus.post(event);
+  }
+
+  /**
+   * Register a listener to receive events. The listener should use the
+   * {@link Subscribe} annotation.
+   *
+   * @param object
+   *          the listener to receive events.
+   */
+  public void register(Object object) {
+    m_eventBus.register(object);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
index 02c4091..e834045 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/HostRoleCommandDAO.java
@@ -40,6 +40,8 @@ import org.apache.ambari.annotations.TransactionalLock;
 import org.apache.ambari.annotations.TransactionalLock.LockArea;
 import org.apache.ambari.annotations.TransactionalLock.LockType;
 import org.apache.ambari.server.RoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
 import org.apache.ambari.server.api.query.JpaPredicateVisitor;
 import org.apache.ambari.server.api.query.JpaSortBuilder;
@@ -49,6 +51,9 @@ import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.SortRequest;
 import org.apache.ambari.server.controller.utilities.PredicateHelper;
+import org.apache.ambari.server.events.TaskCreateEvent;
+import org.apache.ambari.server.events.TaskUpdateEvent;
+import org.apache.ambari.server.events.publishers.TaskEventPublisher;
 import org.apache.ambari.server.orm.RequiresSession;
 import org.apache.ambari.server.orm.TransactionalLocks;
 import org.apache.ambari.server.orm.entities.HostEntity;
@@ -58,9 +63,11 @@ import org.apache.ambari.server.orm.entities.StageEntity;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Function;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.LoadingCache;
+import com.google.common.collect.FluentIterable;
 import com.google.common.collect.Lists;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
@@ -144,6 +151,13 @@ public class HostRoleCommandDAO {
   @Inject
   private Configuration configuration;
 
+
+  @Inject
+  HostRoleCommandFactory hostRoleCommandFactory;
+
+  @Inject
+  private TaskEventPublisher taskEventPublisher;
+
   /**
    * Used to ensure that methods which rely on the completion of
    * {@link Transactional} can detect when they are able to run.
@@ -629,11 +643,17 @@ public class HostRoleCommandDAO {
   @Transactional
   @TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType = LockType.WRITE)
   public HostRoleCommandEntity merge(HostRoleCommandEntity entity) {
+    entity = mergeWithoutPublishEvent(entity);
+    publishTaskUpdateEvent(Collections.singletonList(hostRoleCommandFactory.createExisting(entity)));
+    return entity;
+  }
+
+  @Transactional
+  @TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType = LockType.WRITE)
+  public HostRoleCommandEntity mergeWithoutPublishEvent(HostRoleCommandEntity entity) {
     EntityManager entityManager = entityManagerProvider.get();
     entity = entityManager.merge(entity);
-
     invalidateHostRoleCommandStatusSummaryCache(entity);
-
     return entity;
   }
 
@@ -667,10 +687,51 @@ public class HostRoleCommandDAO {
     }
 
     invalidateHostRoleCommandStatusSummaryCache(requestsToInvalidate);
-
+    publishTaskUpdateEvent(getHostRoleCommands(entities));
     return managedList;
   }
 
+  /**
+   *
+   * @param entities
+   */
+  public List<HostRoleCommand> getHostRoleCommands(Collection<HostRoleCommandEntity> entities) {
+    Function<HostRoleCommandEntity, HostRoleCommand> transform = new Function<HostRoleCommandEntity, HostRoleCommand> () {
+      @Override
+      public HostRoleCommand apply(HostRoleCommandEntity entity) {
+        return hostRoleCommandFactory.createExisting(entity);
+      }
+    };
+    return FluentIterable.from(entities)
+        .transform(transform)
+        .toList();
+
+  }
+
+  /**
+   *
+   * @param hostRoleCommands
+   */
+  public void publishTaskUpdateEvent(List<HostRoleCommand> hostRoleCommands) {
+    if (!hostRoleCommands.isEmpty()) {
+      TaskUpdateEvent taskUpdateEvent = new TaskUpdateEvent(hostRoleCommands);
+      taskEventPublisher.publish(taskUpdateEvent);
+    }
+  }
+
+  /**
+   *
+   * @param hostRoleCommands
+   */
+  public void publishTaskCreateEvent(List<HostRoleCommand> hostRoleCommands) {
+    if (!hostRoleCommands.isEmpty()) {
+      TaskCreateEvent taskCreateEvent = new TaskCreateEvent(hostRoleCommands);
+      taskEventPublisher.publish(taskCreateEvent);
+    }
+  }
+
+
+
   @Transactional
   @TransactionalLock(lockArea = LockArea.HRC_STATUS_CACHE, lockType = LockType.WRITE)
   public void remove(HostRoleCommandEntity entity) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RequestDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RequestDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RequestDAO.java
index 1c4d0a3..2696f66 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RequestDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/RequestDAO.java
@@ -144,6 +144,14 @@ public class RequestDAO {
   }
 
   @Transactional
+  public void updateStatus(long requestId, HostRoleStatus status, HostRoleStatus displayStatus) {
+    RequestEntity requestEntity = findByPK(requestId);
+    requestEntity.setStatus(status);
+    requestEntity.setDisplayStatus(displayStatus);
+    merge(requestEntity);
+  }
+
+  @Transactional
   public void create(RequestEntity requestEntity) {
     entityManagerProvider.get().persist(requestEntity);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/StageDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/StageDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/StageDAO.java
index d2f899f..126468a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/StageDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/StageDAO.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.orm.dao;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.EnumSet;
 import java.util.HashMap;
@@ -173,11 +174,15 @@ public class StageDAO {
     return daoUtils.selectList(query);
   }
 
+  /**
+   *
+   * @param statuses {@link HostRoleStatus}
+   * @return list of stage entities
+   */
   @RequiresSession
-  public List<StageEntity> findByCommandStatuses(
-      Collection<HostRoleStatus> statuses) {
+  public List<StageEntity> findByStatuses(Collection<HostRoleStatus> statuses) {
     TypedQuery<StageEntity> query = entityManagerProvider.get().createNamedQuery(
-        "StageEntity.findByCommandStatuses", StageEntity.class);
+        "StageEntity.findByStatuses", StageEntity.class);
 
     query.setParameter("statuses", statuses);
     return daoUtils.selectList(query);
@@ -280,8 +285,8 @@ public class StageDAO {
    *          the stage entity to update
    * @param desiredStatus
    *          the desired stage status
-   * @param controller
-   *          the ambari management controller
+   * @param actionManager
+   *          the action manager
    *
    * @throws java.lang.IllegalArgumentException
    *           if the transition to the desired status is not a legal transition
@@ -301,9 +306,11 @@ public class StageDAO {
     if (desiredStatus == HostRoleStatus.ABORTED) {
       actionManager.cancelRequest(stage.getRequestId(), "User aborted.");
     } else {
+      List <HostRoleCommandEntity> hrcWithChangedStatus = new ArrayList<HostRoleCommandEntity>();
       for (HostRoleCommandEntity hostRoleCommand : tasks) {
         HostRoleStatus hostRoleStatus = hostRoleCommand.getStatus();
         if (hostRoleStatus.equals(currentStatus)) {
+          hrcWithChangedStatus.add(hostRoleCommand);
           hostRoleCommand.setStatus(desiredStatus);
 
           if (desiredStatus == HostRoleStatus.PENDING) {
@@ -316,6 +323,21 @@ public class StageDAO {
   }
 
   /**
+   *
+   * @param stageEntityPK  {@link StageEntityPK}
+   * @param status {@link HostRoleStatus}
+   * @param displayStatus {@link HostRoleStatus}
+   */
+  @Transactional
+  public void updateStatus(StageEntityPK stageEntityPK, HostRoleStatus status, HostRoleStatus displayStatus) {
+    StageEntity stageEntity = findByPK(stageEntityPK);
+    stageEntity.setStatus(status);
+    stageEntity.setDisplayStatus(displayStatus);
+    merge(stageEntity);
+  }
+
+
+  /**
    * Determine whether or not it is valid to transition from this stage status
    * to the given status.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
index 74271b9..a809295 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/HostRoleCommandEntity.java
@@ -105,9 +105,9 @@ public class HostRoleCommandEntity {
   @Basic
   private Integer exitcode = 0;
 
-  @Column(name = "status")
+  @Column(name = "status", nullable = false)
   @Enumerated(EnumType.STRING)
-  private HostRoleStatus status;
+  private HostRoleStatus status = HostRoleStatus.PENDING;
 
   @Column(name = "std_error")
   @Lob


[44/50] [abbrv] ambari git commit: AMBARI-18868. Stage and Request status should be persisted in the database. (jaimin)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
index 7944d21..f19aa72 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
@@ -69,9 +69,28 @@ public class RequestEntity {
   @Enumerated(value = EnumType.STRING)
   private RequestType requestType;
 
-  @Column(name = "status")
+  /**
+   * This is the logical status of the request and
+   * represents if the intent of the request has been accomplished or not
+   *
+   *  Status calculated by calculating {@link StageEntity#status} of all belonging stages
+   *
+   */
+  @Column(name = "status", nullable = false)
   @Enumerated(value = EnumType.STRING)
-  private HostRoleStatus status;
+  private HostRoleStatus status = HostRoleStatus.PENDING;
+
+  /**
+   * This status informs if any of the underlying tasks
+   * have faced any type of failures {@link HostRoleStatus#isFailedState()}
+   *
+   * Status calculated by only taking into account
+   * all belonging {@link HostRoleCommandEntity#status} (or {@link StageEntity#status})
+   *
+   */
+  @Column(name = "display_status", nullable = false)
+  @Enumerated(value = EnumType.STRING)
+  private HostRoleStatus displayStatus = HostRoleStatus.PENDING;
 
   @Basic
   @Column(name = "create_time", nullable = false)
@@ -89,7 +108,7 @@ public class RequestEntity {
   @Column(name = "exclusive_execution", insertable = true, updatable = true, nullable = false)
   private Integer exclusive = 0;
 
-  @OneToMany(mappedBy = "request")
+  @OneToMany(mappedBy = "request", cascade = CascadeType.REMOVE)
   private Collection<StageEntity> stages;
 
   @OneToMany(mappedBy = "requestEntity", cascade = CascadeType.ALL)
@@ -207,14 +226,38 @@ public class RequestEntity {
     this.commandName = commandName;
   }
 
+  /**
+   *  get status for the request
+   * @return {@link HostRoleStatus}
+   */
   public HostRoleStatus getStatus() {
     return status;
   }
 
+  /**
+   * sets status for the request
+   * @param status {@link HostRoleStatus}
+   */
   public void setStatus(HostRoleStatus status) {
     this.status = status;
   }
 
+  /**
+   * get display status for the request
+   * @return  {@link HostRoleStatus}
+   */
+  public HostRoleStatus getDisplayStatus() {
+    return displayStatus;
+  }
+
+  /**
+   * sets display status for the request
+   * @param displayStatus {@link HostRoleStatus}
+   */
+  public void setDisplayStatus(HostRoleStatus displayStatus) {
+    this.displayStatus = displayStatus;
+  }
+
   public RequestScheduleEntity getRequestScheduleEntity() {
     return requestScheduleEntity;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
index f9c8810..f68338f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntity.java
@@ -39,17 +39,21 @@ import javax.persistence.OneToMany;
 import javax.persistence.Table;
 
 import org.apache.ambari.server.actionmanager.CommandExecutionType;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
 
 @Entity
 @Table(name = "stage")
 @IdClass(org.apache.ambari.server.orm.entities.StageEntityPK.class)
 @NamedQueries({
     @NamedQuery(
-        name = "StageEntity.findByCommandStatuses",
-        query = "SELECT stage from StageEntity stage WHERE stage.stageId IN (SELECT roleCommand.stageId from HostRoleCommandEntity roleCommand WHERE roleCommand.status IN :statuses AND roleCommand.stageId = stage.stageId AND roleCommand.requestId = stage.requestId ) ORDER BY stage.requestId, stage.stageId"),
+        name = "StageEntity.findByStatuses",
+        query = "SELECT stage from StageEntity stage WHERE stage.status IN :statuses ORDER BY stage.requestId, stage.stageId"),
+    @NamedQuery(
+        name = "StageEntity.findByPK",
+        query = "SELECT stage from StageEntity stage WHERE stage.requestId = :requestId AND stage.stageId = :stageId"),
     @NamedQuery(
         name = "StageEntity.findByRequestIdAndCommandStatuses",
-        query = "SELECT stage from StageEntity stage WHERE stage.stageId IN (SELECT roleCommand.stageId from HostRoleCommandEntity roleCommand WHERE roleCommand.requestId = :requestId AND roleCommand.status IN :statuses AND roleCommand.stageId = stage.stageId AND roleCommand.requestId = stage.requestId ) ORDER BY stage.stageId"),
+        query = "SELECT stage from StageEntity stage WHERE stage.status IN :statuses AND stage.requestId = :requestId ORDER BY stage.stageId"),
     @NamedQuery(
         name = "StageEntity.findIdsByRequestId",
         query = "SELECT stage.stageId FROM StageEntity stage WHERE stage.requestId = :requestId ORDER BY stage.stageId ASC") })
@@ -110,6 +114,32 @@ public class StageEntity {
   @Basic
   private byte[] hostParamsStage;
 
+  /**
+   * This status informs if the advanced criteria for the stage success
+   * as established at the time of stage creation has been accomplished or not
+   *
+   *  Status calculated by taking into account following
+   *  a) {@link #roleSuccessCriterias}
+   *  b) {@link #skippable}
+   *  c) {@link HostRoleCommandEntity#autoSkipOnFailure}
+   *  d) {@link HostRoleCommandEntity#status}
+   *
+   */
+  @Column(name = "status",  nullable = false)
+  @Enumerated(EnumType.STRING)
+  private HostRoleStatus status = HostRoleStatus.PENDING;
+
+  /**
+   * This status informs if any of the underlying tasks
+   * have faced any type of failures {@link HostRoleStatus#isFailedState()}
+   *
+   * Status calculated by only taking into account {@link HostRoleCommandEntity#status}
+   *
+   */
+  @Column(name = "display_status", nullable = false)
+  @Enumerated(EnumType.STRING)
+  private HostRoleStatus displayStatus = HostRoleStatus.PENDING;
+
   @ManyToOne
   @JoinColumn(name = "request_id", referencedColumnName = "request_id", nullable = false)
   private RequestEntity request;
@@ -195,6 +225,40 @@ public class StageEntity {
     this.commandExecutionType = commandExecutionType;
   }
 
+  /**
+   * get status for the stage
+   * @return {@link HostRoleStatus}
+   */
+  public HostRoleStatus getStatus() {
+    return status;
+  }
+
+  /**
+   * sets status for the stage
+   * @param status {@link HostRoleStatus}
+   */
+  public void setStatus(HostRoleStatus status) {
+    this.status = status;
+  }
+
+  /**
+   * get display status for the stage
+   * @return  {@link HostRoleStatus}
+   */
+  public HostRoleStatus getDisplayStatus() {
+    return displayStatus;
+  }
+
+
+  /**
+   * sets display status for the stage
+   * @param displayStatus {@link HostRoleStatus}
+   */
+  public void setDisplayStatus(HostRoleStatus displayStatus) {
+    this.displayStatus = displayStatus;
+  }
+
+
   @Override
   public boolean equals(Object o) {
     if (this == o) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntityPK.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntityPK.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntityPK.java
index 9ca0470..34d175c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntityPK.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/StageEntityPK.java
@@ -68,4 +68,16 @@ public class StageEntityPK implements Serializable {
     result = 31 * result + (stageId != null ? stageId.hashCode() : 0);
     return result;
   }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String toString() {
+    StringBuilder buffer = new StringBuilder("StageEntityPK{");
+    buffer.append("stageId=").append(getStageId());
+    buffer.append("requestId=").append(getRequestId());
+    buffer.append("}");
+    return buffer.toString();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
index 4f90ef3..0267a5e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
@@ -19,11 +19,25 @@ package org.apache.ambari.server.upgrade;
 
 
 import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
 import java.util.Map;
 
+import javax.persistence.EntityManager;
+
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.actionmanager.Stage;
+import org.apache.ambari.server.actionmanager.StageFactory;
 import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.internal.CalculatedStatus;
+import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.DaoUtils;
+import org.apache.ambari.server.orm.dao.RequestDAO;
+import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -41,6 +55,12 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
    */
   private static final Logger LOG = LoggerFactory.getLogger(UpgradeCatalog300.class);
 
+  private static final String STAGE_TABLE = "stage";
+  private static final String STAGE_STATUS_COLUMN = "status";
+  private static final String STAGE_DISPLAY_STATUS_COLUMN = "display_status";
+  private static final String REQUEST_TABLE = "request";
+  private static final String REQUEST_DISPLAY_STATUS_COLUMN = "display_status";
+
   @Inject
   DaoUtils daoUtils;
 
@@ -83,6 +103,16 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
    */
   @Override
   protected void executeDDLUpdates() throws AmbariException, SQLException {
+    updateStageTable();
+  }
+
+  protected void updateStageTable() throws SQLException {
+    dbAccessor.addColumn(STAGE_TABLE,
+        new DBAccessor.DBColumnInfo(STAGE_STATUS_COLUMN, String.class, 255, HostRoleStatus.PENDING, false));
+    dbAccessor.addColumn(STAGE_TABLE,
+        new DBAccessor.DBColumnInfo(STAGE_DISPLAY_STATUS_COLUMN, String.class, 255, HostRoleStatus.PENDING, false));
+    dbAccessor.addColumn(REQUEST_TABLE,
+        new DBAccessor.DBColumnInfo(REQUEST_DISPLAY_STATUS_COLUMN, String.class, 255, HostRoleStatus.PENDING, false));
   }
 
   /**
@@ -99,6 +129,7 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
     addNewConfigurationsFromXml();
     showHcatDeletedUserMessage();
+    setStatusOfStagesAndRequests();
   }
 
   protected void showHcatDeletedUserMessage() {
@@ -122,4 +153,43 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
 
   }
 
+  protected void setStatusOfStagesAndRequests() {
+    executeInTransaction(new Runnable() {
+      @Override
+      public void run() {
+        try {
+          RequestDAO requestDAO = injector.getInstance(RequestDAO.class);
+          StageFactory stageFactory = injector.getInstance(StageFactory.class);
+          EntityManager em = getEntityManagerProvider().get();
+          List<RequestEntity> requestEntities= requestDAO.findAll();
+          for (RequestEntity requestEntity: requestEntities) {
+            Collection<StageEntity> stageEntities= requestEntity.getStages();
+            List <HostRoleStatus> stageDisplayStatuses = new ArrayList<>();
+            List <HostRoleStatus> stageStatuses = new ArrayList<>();
+            for (StageEntity stageEntity: stageEntities) {
+              Stage stage = stageFactory.createExisting(stageEntity);
+              List<HostRoleCommand> hostRoleCommands = stage.getOrderedHostRoleCommands();
+              Map<HostRoleStatus, Integer> statusCount = CalculatedStatus.calculateStatusCountsForTasks(hostRoleCommands);
+              HostRoleStatus stageDisplayStatus = CalculatedStatus.calculateSummaryDisplayStatus(statusCount, hostRoleCommands.size(), stage.isSkippable());
+              HostRoleStatus stageStatus = CalculatedStatus.calculateStageStatus(hostRoleCommands, statusCount, stage.getSuccessFactors(), stage.isSkippable());
+              stageEntity.setStatus(stageStatus);
+              stageStatuses.add(stageStatus);
+              stageEntity.setDisplayStatus(stageDisplayStatus);
+              stageDisplayStatuses.add(stageDisplayStatus);
+              em.merge(stageEntity);
+            }
+            HostRoleStatus requestStatus = CalculatedStatus.getOverallStatusForRequest(stageStatuses);
+            requestEntity.setStatus(requestStatus);
+            HostRoleStatus requestDisplayStatus = CalculatedStatus.getOverallDisplayStatusForRequest(stageDisplayStatuses);
+            requestEntity.setDisplayStatus(requestDisplayStatus);
+            em.merge(requestEntity);
+          }
+        } catch (Exception e) {
+          LOG.warn("Setting status for stages and Requests threw exception. ", e);
+        }
+      }
+    });
+
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index f007b53..6c7cb09 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -345,7 +345,8 @@ CREATE TABLE request (
   request_type VARCHAR(255),
   request_schedule_id BIGINT,
   start_time BIGINT NOT NULL,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_request PRIMARY KEY (request_id),
   CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
 
@@ -361,6 +362,8 @@ CREATE TABLE stage (
   command_params BLOB,
   host_params BLOB,
   command_execution_type VARCHAR(32) NOT NULL DEFAULT 'STAGE',
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_stage PRIMARY KEY (stage_id, request_id),
   CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
 
@@ -378,7 +381,7 @@ CREATE TABLE host_role_command (
   start_time BIGINT NOT NULL,
   original_start_time BIGINT NOT NULL,
   end_time BIGINT,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   auto_skip_on_failure SMALLINT DEFAULT 0 NOT NULL,
   std_error BLOB,
   std_out BLOB,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index f6cb896..ebb0da0 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -356,7 +356,8 @@ CREATE TABLE request (
   request_context VARCHAR(255),
   request_type VARCHAR(255),
   start_time BIGINT NOT NULL,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_request PRIMARY KEY (request_id),
   CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
 
@@ -372,6 +373,8 @@ CREATE TABLE stage (
   command_params LONGBLOB,
   host_params LONGBLOB,
   command_execution_type VARCHAR(32) NOT NULL DEFAULT 'STAGE',
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_stage PRIMARY KEY (stage_id, request_id),
   CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
 
@@ -390,7 +393,7 @@ CREATE TABLE host_role_command (
   start_time BIGINT NOT NULL,
   original_start_time BIGINT NOT NULL,
   end_time BIGINT,
-  status VARCHAR(100),
+  status VARCHAR(100) NOT NULL DEFAULT 'PENDING',
   auto_skip_on_failure SMALLINT DEFAULT 0 NOT NULL,
   std_error LONGBLOB,
   std_out LONGBLOB,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 19253e8..884eb06 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -336,7 +336,8 @@ CREATE TABLE request (
   request_context VARCHAR(255),
   request_type VARCHAR(255),
   start_time NUMBER(19) NOT NULL,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_request PRIMARY KEY (request_id),
   CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
 
@@ -352,6 +353,8 @@ CREATE TABLE stage (
   command_params BLOB,
   host_params BLOB,
   command_execution_type VARCHAR2(32) DEFAULT 'STAGE' NOT NULL,
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_stage PRIMARY KEY (stage_id, request_id),
   CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
 
@@ -370,7 +373,7 @@ CREATE TABLE host_role_command (
   start_time NUMBER(19) NOT NULL,
   original_start_time NUMBER(19) NOT NULL,
   end_time NUMBER(19),
-  status VARCHAR2(255) NULL,
+  status VARCHAR2(255) NOT NULL DEFAULT 'PENDING',
   auto_skip_on_failure NUMBER(1) DEFAULT 0 NOT NULL,
   std_error BLOB NULL,
   std_out BLOB NULL,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index b13a9e3..7e57d9f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -345,7 +345,8 @@ CREATE TABLE request (
   request_type VARCHAR(255),
   request_schedule_id BIGINT,
   start_time BIGINT NOT NULL,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_request PRIMARY KEY (request_id),
   CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
 
@@ -361,6 +362,8 @@ CREATE TABLE stage (
   command_params BYTEA,
   host_params BYTEA,
   command_execution_type VARCHAR(32) DEFAULT 'STAGE' NOT NULL,
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_stage PRIMARY KEY (stage_id, request_id),
   CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
 
@@ -378,7 +381,7 @@ CREATE TABLE host_role_command (
   start_time BIGINT NOT NULL,
   original_start_time BIGINT NOT NULL,
   end_time BIGINT,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   auto_skip_on_failure SMALLINT DEFAULT 0 NOT NULL,
   std_error BYTEA,
   std_out BYTEA,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index cf2954a..2c4bd55 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -334,7 +334,8 @@ CREATE TABLE request (
   request_context VARCHAR(255),
   request_type VARCHAR(255),
   start_time NUMERIC(19) NOT NULL,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_request PRIMARY KEY (request_id),
   CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
 
@@ -350,6 +351,8 @@ CREATE TABLE stage (
   command_params IMAGE,
   host_params IMAGE,
   command_execution_type VARCHAR(32) NOT NULL DEFAULT 'STAGE',
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_stage PRIMARY KEY (stage_id, request_id),
   CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
 
@@ -368,7 +371,7 @@ CREATE TABLE host_role_command (
   start_time NUMERIC(19) NOT NULL,
   original_start_time NUMERIC(19) NOT NULL,
   end_time NUMERIC(19),
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   auto_skip_on_failure SMALLINT DEFAULT 0 NOT NULL,
   std_error IMAGE,
   std_out IMAGE,

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index 16c269a..a86a767 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -350,7 +350,8 @@ CREATE TABLE request (
   request_type VARCHAR(255),
   request_schedule_id BIGINT,
   start_time BIGINT NOT NULL,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_request PRIMARY KEY CLUSTERED (request_id),
   CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
 
@@ -366,6 +367,8 @@ CREATE TABLE stage (
   command_params VARBINARY(MAX),
   host_params VARBINARY(MAX),
   command_execution_type VARCHAR(32) NOT NULL DEFAULT 'STAGE',
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+  display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   CONSTRAINT PK_stage PRIMARY KEY CLUSTERED (stage_id, request_id),
   CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
 
@@ -383,7 +386,7 @@ CREATE TABLE host_role_command (
   start_time BIGINT NOT NULL,
   original_start_time BIGINT NOT NULL,
   end_time BIGINT,
-  status VARCHAR(255),
+  status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
   auto_skip_on_failure SMALLINT DEFAULT 0 NOT NULL,
   std_error VARBINARY(max),
   std_out VARBINARY(max),

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index 177ac70..edc5683 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -55,6 +55,7 @@ import org.apache.ambari.server.utils.CommandUtils;
 import org.apache.ambari.server.utils.StageUtils;
 import org.easymock.EasyMock;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -67,8 +68,6 @@ import com.google.inject.Injector;
 import com.google.inject.Singleton;
 import com.google.inject.util.Modules;
 
-import junit.framework.Assert;
-
 public class TestActionDBAccessorImpl {
   private static final Logger log = LoggerFactory.getLogger(TestActionDBAccessorImpl.class);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
index 6519126..526ca7c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionScheduler.java
@@ -27,6 +27,7 @@ import static org.junit.Assert.assertTrue;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyBoolean;
 import static org.mockito.Matchers.anyCollectionOf;
+import static org.mockito.Matchers.anyListOf;
 import static org.mockito.Matchers.anyLong;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Matchers.eq;
@@ -100,9 +101,11 @@ import org.apache.ambari.server.utils.StageUtils;
 import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 import org.slf4j.Logger;
@@ -119,8 +122,6 @@ import com.google.inject.Injector;
 import com.google.inject.Provider;
 import com.google.inject.persist.UnitOfWork;
 
-import junit.framework.Assert;
-
 public class TestActionScheduler {
 
   private static final Logger log = LoggerFactory.getLogger(TestActionScheduler.class);
@@ -207,6 +208,8 @@ public class TestActionScheduler {
     when(host.getHostName()).thenReturn(hostname);
 
     ActionDBAccessor db = mock(ActionDBAccessorImpl.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
     List<Stage> stages = new ArrayList<Stage>();
     Stage s = StageUtils.getATestStage(1, 977, hostname, CLUSTER_HOST_INFO,
       "{\"host_param\":\"param_value\"}", "{\"stage_param\":\"param_value\"}");
@@ -222,7 +225,7 @@ public class TestActionScheduler {
     //Keep large number of attempts so that the task is not expired finally
     //Small action timeout to test rescheduling
     ActionScheduler scheduler = new ActionScheduler(100, 5, db, aq, fsm,
-        10000, new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock, null, null);
+        10000, new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock, hostRoleCommandDAOMock, null);
     scheduler.setTaskTimeoutAdjustment(false);
 
     List<AgentCommand> ac = waitForQueueSize(hostname, aq, 1, scheduler);
@@ -314,6 +317,8 @@ public class TestActionScheduler {
     stages.add(s);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
     when(db.getCommandsInProgressCount()).thenReturn(stages.size());
     when(db.getStagesInProgress()).thenReturn(stages);
 
@@ -335,7 +340,7 @@ public class TestActionScheduler {
 
     //Small action timeout to test rescheduling
     ActionScheduler scheduler = new ActionScheduler(100, 0, db, aq, fsm, 3,
-        new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock, null, null);
+        new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock, hostRoleCommandDAOMock, null);
     scheduler.setTaskTimeoutAdjustment(false);
     // Start the thread
 
@@ -405,6 +410,8 @@ public class TestActionScheduler {
 
     when(db.getCommandsInProgressCount()).thenReturn(stages.size());
     when(db.getStagesInProgress()).thenReturn(stages);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     doAnswer(new Answer<Void>() {
       @Override
@@ -508,6 +515,8 @@ public class TestActionScheduler {
 
     when(db.getCommandsInProgressCount()).thenReturn(stages.size());
     when(db.getStagesInProgress()).thenReturn(stages);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     doAnswer(new Answer<Collection<HostRoleCommandEntity>>() {
       @Override
@@ -543,7 +552,7 @@ public class TestActionScheduler {
     // Make sure the NN install doesn't timeout
     ActionScheduler scheduler = new ActionScheduler(100, 50000, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
     scheduler.setTaskTimeoutAdjustment(false);
 
     int cycleCount=0;
@@ -606,6 +615,8 @@ public class TestActionScheduler {
     stages.add(s);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -658,7 +669,7 @@ public class TestActionScheduler {
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
 
     int cycleCount = 0;
     while (!stages.get(0).getHostRoleStatus(null, "AMBARI_SERVER_ACTION")
@@ -721,6 +732,8 @@ public class TestActionScheduler {
     stages.add(stage12);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -735,7 +748,7 @@ public class TestActionScheduler {
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null),
         unitOfWork, EasyMock.createNiceMock(AmbariEventPublisher.class), conf,
-        entityManagerProviderMock, (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        entityManagerProviderMock, hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
 
     scheduler.doWork();
 
@@ -763,6 +776,8 @@ public class TestActionScheduler {
     stages.add(s);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -816,7 +831,7 @@ public class TestActionScheduler {
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
 
     int cycleCount = 0;
     while (!stages.get(0).getHostRoleStatus(null, "AMBARI_SERVER_ACTION").isCompletedState()
@@ -976,6 +991,8 @@ public class TestActionScheduler {
     stages.add(s);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -1028,7 +1045,7 @@ public class TestActionScheduler {
 
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
 
     int cycleCount = 0;
     while (!stages.get(0).getHostRoleStatus(null, "AMBARI_SERVER_ACTION")
@@ -1124,6 +1141,8 @@ public class TestActionScheduler {
             RoleCommand.START, Service.Type.GANGLIA, 5, 5, 4));
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -1136,7 +1155,7 @@ public class TestActionScheduler {
     Configuration conf = new Configuration(properties);
     ActionScheduler scheduler = spy(new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null));
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null));
 
     doReturn(false).when(scheduler).wasAgentRestartedDuringOperation(any(Host.class), any(Stage.class), anyString());
 
@@ -1214,6 +1233,8 @@ public class TestActionScheduler {
             RoleCommand.START, Service.Type.GANGLIA, 5, 5, 4));
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -1228,7 +1249,7 @@ public class TestActionScheduler {
     ActionScheduler scheduler = spy(new ActionScheduler(100, 50, db, aq, fsm, 3,
             new HostsMap((String) null),
         unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null));
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null));
 
 
     doReturn(false).when(scheduler).wasAgentRestartedDuringOperation(any(Host.class), any(Stage.class), anyString());
@@ -1289,6 +1310,8 @@ public class TestActionScheduler {
 
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -1303,7 +1326,7 @@ public class TestActionScheduler {
     ActionScheduler scheduler = spy(new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null),
         unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null));
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null));
 
     doReturn(false).when(scheduler).wasAgentRestartedDuringOperation(any(Host.class), any(Stage.class), anyString());
 
@@ -1544,6 +1567,8 @@ public class TestActionScheduler {
     stage.setLastAttemptTime(host2, Role.HBASE_CLIENT.toString(), now);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -1616,7 +1641,7 @@ public class TestActionScheduler {
     ActionScheduler scheduler = new ActionScheduler(100, 10000, db, aq, fsm, 3,
         new HostsMap((String) null),
         unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
 
     scheduler.doWork();
 
@@ -1729,6 +1754,8 @@ public class TestActionScheduler {
             "host1", "cluster1", Role.HDFS_CLIENT, RoleCommand.UPGRADE, Service.Type.HDFS, 4, 2, 1));
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -1808,7 +1835,7 @@ public class TestActionScheduler {
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null),
         unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
 
     ActionManager am = new ActionManager(db, requestFactory, scheduler);
 
@@ -1976,6 +2003,8 @@ public class TestActionScheduler {
     when(host.getHostName()).thenReturn(hostname);
 
     ActionDBAccessor db = mock(ActionDBAccessorImpl.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -1993,7 +2022,7 @@ public class TestActionScheduler {
     //Small action timeout to test rescheduling
     ActionScheduler scheduler = new ActionScheduler(100, 100, db, aq, fsm,
         10000, new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
     scheduler.setTaskTimeoutAdjustment(false);
 
     List<AgentCommand> ac = waitForQueueSize(hostname, aq, 1, scheduler);
@@ -2135,6 +2164,8 @@ public class TestActionScheduler {
     stages.add(s);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -2187,7 +2218,7 @@ public class TestActionScheduler {
     ServerActionExecutor.init(injector);
     ActionScheduler scheduler = new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null);
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null);
 
     int cycleCount = 0;
     while (!stages.get(0).getHostRoleStatus(null, "AMBARI_SERVER_ACTION")
@@ -2467,6 +2498,8 @@ public class TestActionScheduler {
     when(host3.getHostName()).thenReturn(hostname);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
     when(db.getCommandsInProgressCount()).thenReturn(stagesInProgress.size());
     when(db.getStagesInProgress()).thenReturn(stagesInProgress);
 
@@ -2542,7 +2575,7 @@ public class TestActionScheduler {
 
     ActionScheduler scheduler = spy(new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null));
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null));
 
     doReturn(false).when(scheduler).wasAgentRestartedDuringOperation(any(Host.class), any(Stage.class), anyString());
 
@@ -2706,6 +2739,8 @@ public class TestActionScheduler {
     command.setStatus(HostRoleStatus.FAILED);
 
     ActionDBAccessor db = mock(ActionDBAccessor.class);
+    HostRoleCommandDAO hostRoleCommandDAOMock = mock(HostRoleCommandDAO.class);
+    Mockito.doNothing().when(hostRoleCommandDAOMock).publishTaskCreateEvent(anyListOf(HostRoleCommand.class));
 
     RequestEntity request = mock(RequestEntity.class);
     when(request.isExclusive()).thenReturn(false);
@@ -2776,7 +2811,7 @@ public class TestActionScheduler {
 
     ActionScheduler scheduler = spy(new ActionScheduler(100, 50, db, aq, fsm, 3,
         new HostsMap((String) null), unitOfWork, null, conf, entityManagerProviderMock,
-        (HostRoleCommandDAO)null, (HostRoleCommandFactory)null));
+        hostRoleCommandDAOMock, (HostRoleCommandFactory)null));
 
     doReturn(false).when(scheduler).wasAgentRestartedDuringOperation(any(Host.class), any(Stage.class), anyString());
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/alerts/AmbariPerformanceRunnableTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/alerts/AmbariPerformanceRunnableTest.java b/ambari-server/src/test/java/org/apache/ambari/server/alerts/AmbariPerformanceRunnableTest.java
index 7b1a5a2..facd802 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/alerts/AmbariPerformanceRunnableTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/alerts/AmbariPerformanceRunnableTest.java
@@ -18,12 +18,13 @@
 
 package org.apache.ambari.server.alerts;
 
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 
 import java.lang.reflect.Field;
 import java.util.ArrayList;
@@ -34,6 +35,7 @@ import java.util.Map;
 import javax.persistence.EntityManager;
 
 import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.alerts.AmbariPerformanceRunnable.PerformanceArea;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.internal.ClusterResourceProvider;
@@ -287,6 +289,7 @@ public class AmbariPerformanceRunnableTest {
       binder.bind(AlertsDAO.class).toInstance(createNiceMock(AlertsDAO.class));
       binder.bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
       binder.bind(ActionManager.class).toInstance(createNiceMock(ActionManager.class));
+      binder.bind(HostRoleCommandFactory.class).toInstance(createNiceMock(HostRoleCommandFactory.class));
       binder.bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
       binder.bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class));
       binder.bind(AlertDefinitionFactory.class).toInstance(createNiceMock(AlertDefinitionFactory.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
index a0701b6..f8b57e5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeResourceProviderTest.java
@@ -624,7 +624,6 @@ public class UpgradeResourceProviderTest {
     RequestEntity requestEntity = new RequestEntity();
     requestEntity.setRequestId(2L);
     requestEntity.setClusterId(cluster.getClusterId());
-    requestEntity.setStatus(HostRoleStatus.PENDING);
     requestEntity.setStages(new ArrayList<StageEntity>());
     requestDao.create(requestEntity);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
index 619e367..f009767 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UpgradeSummaryResourceProviderTest.java
@@ -205,7 +205,6 @@ public class UpgradeSummaryResourceProviderTest {
     RequestEntity requestEntity = new RequestEntity();
     requestEntity.setRequestId(upgradeRequestId);
     requestEntity.setClusterId(cluster.getClusterId());
-    requestEntity.setStatus(HostRoleStatus.PENDING);
     requestDAO.create(requestEntity);
 
     // Create the stage and add it to the request

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListenerTest.java
new file mode 100644
index 0000000..64a731b
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/events/listeners/tasks/TaskStatusListenerTest.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.events.listeners.tasks;
+
+import static org.easymock.EasyMock.anyLong;
+import static org.easymock.EasyMock.anyObject;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.ambari.server.Role;
+import org.apache.ambari.server.RoleCommand;
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapperFactory;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.events.TaskCreateEvent;
+import org.apache.ambari.server.events.TaskUpdateEvent;
+import org.apache.ambari.server.events.publishers.TaskEventPublisher;
+import org.apache.ambari.server.orm.dao.ExecutionCommandDAO;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.RequestDAO;
+import org.apache.ambari.server.orm.dao.StageDAO;
+import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.RoleSuccessCriteriaEntity;
+import org.apache.ambari.server.orm.entities.StageEntity;
+import org.apache.ambari.server.orm.entities.StageEntityPK;
+import org.apache.ambari.server.state.ServiceComponentHostEvent;
+import org.easymock.EasyMock;
+import org.easymock.EasyMockSupport;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.google.inject.Inject;
+
+
+public class TaskStatusListenerTest extends EasyMockSupport {
+
+  private TaskEventPublisher publisher = new TaskEventPublisher();
+
+  @Inject
+  private ExecutionCommandDAO executionCommandDAO;
+
+  @Inject
+  private ExecutionCommandWrapperFactory ecwFactory;
+
+
+  @Test
+  public void testOnTaskUpdateEvent() {
+    List<HostRoleCommand> hostRoleCommands = new ArrayList<HostRoleCommand>();
+    ServiceComponentHostEvent serviceComponentHostEvent = createNiceMock(ServiceComponentHostEvent.class);
+    HostDAO hostDAO = createNiceMock(HostDAO.class);
+    replayAll();
+
+    int hostRoleCommandSize = 3;
+    int hrcCounter = 1;
+    for (int stageCounter = 0; stageCounter < 2; stageCounter++) {
+      for (int i = 1; i <= hostRoleCommandSize; i++,hrcCounter++) {
+        String hostname = "hostname-" + hrcCounter;
+        HostRoleCommand hostRoleCommand = new HostRoleCommand(hostname, Role.DATANODE,
+            serviceComponentHostEvent, RoleCommand.EXECUTE, hostDAO, executionCommandDAO, ecwFactory);
+        hostRoleCommand.setStatus(HostRoleStatus.PENDING);
+        hostRoleCommand.setRequestId(1L);
+        hostRoleCommand.setStageId(stageCounter);
+        hostRoleCommand.setTaskId(hrcCounter);
+        hostRoleCommands.add(hostRoleCommand);
+      }
+    }
+
+    HostRoleStatus hostRoleStatus = HostRoleStatus.PENDING;
+    StageDAO stageDAO = createNiceMock(StageDAO.class);
+    RequestDAO requestDAO = createNiceMock(RequestDAO.class);
+    StageEntity stageEntity = createNiceMock(StageEntity.class);
+    RequestEntity requestEntity = createNiceMock(RequestEntity.class);
+    EasyMock.expect(stageEntity.getStatus()).andReturn(hostRoleStatus).anyTimes();;
+    EasyMock.expect(stageEntity.getDisplayStatus()).andReturn(hostRoleStatus).anyTimes();
+    EasyMock.expect(stageEntity.isSkippable()).andReturn(Boolean.FALSE).anyTimes();;
+    EasyMock.expect(stageEntity.getRoleSuccessCriterias()).andReturn(Collections.<RoleSuccessCriteriaEntity>emptyList()).anyTimes();
+    EasyMock.expect(stageDAO.findByPK(anyObject(StageEntityPK.class))).andReturn(stageEntity).anyTimes();
+    EasyMock.expect(requestEntity.getStatus()).andReturn(hostRoleStatus).anyTimes();;
+    EasyMock.expect(requestEntity.getDisplayStatus()).andReturn(hostRoleStatus).anyTimes();
+    EasyMock.expect(requestDAO.findByPK(anyLong())).andReturn(requestEntity).anyTimes();
+
+    requestDAO.updateStatus(1L,HostRoleStatus.COMPLETED,HostRoleStatus.SKIPPED_FAILED);
+    EasyMock.expectLastCall().times(1);
+
+
+
+    EasyMock.replay(stageEntity);
+    EasyMock.replay(requestEntity);
+    EasyMock.replay(stageDAO);
+    EasyMock.replay(requestDAO);
+
+    TaskCreateEvent event = new TaskCreateEvent(hostRoleCommands);
+    TaskStatusListener listener = new TaskStatusListener(publisher,stageDAO,requestDAO);
+
+    Assert.assertTrue(listener.getActiveTasksMap().isEmpty());
+    Assert.assertTrue(listener.getActiveStageMap().isEmpty());
+    Assert.assertTrue(listener.getActiveRequestMap().isEmpty());
+
+    listener.onTaskCreateEvent(event);
+    Assert.assertEquals(listener.getActiveTasksMap().size(),6);
+    Assert.assertEquals(listener.getActiveStageMap().size(),2);
+    Assert.assertEquals(listener.getActiveRequestMap().size(),1);
+    Assert.assertEquals(listener.getActiveRequestMap().get(1L).getStatus(), hostRoleStatus);
+
+
+
+    // update of a task status of IN_PROGRESS should cascade into an update of request status
+    String hostname = "hostname-1";
+    HostRoleCommand hostRoleCommand = new HostRoleCommand(hostname, Role.DATANODE,
+        serviceComponentHostEvent, RoleCommand.EXECUTE, hostDAO, executionCommandDAO, ecwFactory);
+    hostRoleCommand.setStatus(HostRoleStatus.IN_PROGRESS);
+    hostRoleCommand.setRequestId(1L);
+    hostRoleCommand.setStageId(0);
+    hostRoleCommand.setTaskId(1L);
+    listener.onTaskUpdateEvent(new TaskUpdateEvent(Collections.singletonList(hostRoleCommand)));
+    Assert.assertEquals(HostRoleStatus.IN_PROGRESS, listener.getActiveRequestMap().get(1L).getStatus());
+
+    // update of all tasks status of skip_failed and  completed states should cascade into request status of completed
+    // and request display status to be of skip_failed
+    hrcCounter = 1;
+    List<HostRoleCommand> finalHostRoleCommands = new ArrayList<HostRoleCommand>();
+    HostRoleStatus finalHostRoleStatus = HostRoleStatus.COMPLETED;
+    for (int stageCounter = 0; stageCounter < 2; stageCounter++) {
+      for (int i = 1; i <= hostRoleCommandSize; i++,hrcCounter++) {
+        String finalHostname = "hostname-" + hrcCounter;
+        HostRoleCommand finalHostRoleCommand = new HostRoleCommand(finalHostname, Role.DATANODE,
+            serviceComponentHostEvent, RoleCommand.EXECUTE, hostDAO, executionCommandDAO, ecwFactory);
+        finalHostRoleCommand.setStatus(finalHostRoleStatus);
+        finalHostRoleCommand.setRequestId(1L);
+        finalHostRoleCommand.setStageId(stageCounter);
+        finalHostRoleCommand.setTaskId(hrcCounter);
+        finalHostRoleCommands.add(finalHostRoleCommand);
+      }
+      finalHostRoleStatus = HostRoleStatus.SKIPPED_FAILED;
+    }
+
+    listener.onTaskUpdateEvent(new TaskUpdateEvent(finalHostRoleCommands));
+
+    //Once request status and display status are in completed state, it should no longer be tracked by TaskStatusListener
+    Assert.assertNull(listener.getActiveRequestMap().get(1L));
+
+    // verify request status = completed and display_status = skip_failed
+    verifyAll();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index b1c10f5..1709da8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -38,6 +38,7 @@ import javax.persistence.EntityManager;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.H2DatabaseCleaner;
+import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariCustomCommandExecutionHelper;
@@ -980,6 +981,7 @@ public class ConfigHelperTest {
           bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
           bind(ClusterController.class).toInstance(clusterController);
           bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+          bind(HostRoleCommandFactory.class).toInstance(createNiceMock(HostRoleCommandFactory.class));
           bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class));
         }
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterEffectiveVersionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterEffectiveVersionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterEffectiveVersionTest.java
index 9d339e2..d3c8acf 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterEffectiveVersionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterEffectiveVersionTest.java
@@ -24,6 +24,7 @@ import java.util.List;
 import javax.persistence.EntityManager;
 
 import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.StageFactory;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -68,6 +69,7 @@ import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 import org.easymock.EasyMock;
 import org.easymock.EasyMockSupport;
 import org.eclipse.jetty.server.SessionManager;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -83,8 +85,6 @@ import com.google.inject.Injector;
 import com.google.inject.Module;
 import com.google.inject.assistedinject.FactoryModuleBuilder;
 
-import junit.framework.Assert;
-
 /**
  * Tests that cluster effective version is calcualted correctly during upgrades.
  */
@@ -256,6 +256,7 @@ public class ClusterEffectiveVersionTest extends EasyMockSupport {
       binder.bind(DBAccessor.class).toInstance(EasyMock.createNiceMock(DBAccessor.class));
       binder.bind(EntityManager.class).toInstance(EasyMock.createNiceMock(EntityManager.class));
       binder.bind(ActionManager.class).toInstance(EasyMock.createNiceMock(ActionManager.class));
+      binder.bind(HostRoleCommandFactory.class).toInstance(EasyMock.createNiceMock(HostRoleCommandFactory.class));
       binder.bind(HostRoleCommandDAO.class).toInstance(EasyMock.createNiceMock(HostRoleCommandDAO.class));
       binder.bind(AmbariManagementController.class).toInstance(EasyMock.createNiceMock(AmbariManagementController.class));
       binder.bind(ClusterController.class).toInstance(EasyMock.createNiceMock(ClusterController.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
index ed95b0b..e699e49 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/services/RetryUpgradeActionServiceTest.java
@@ -251,7 +251,6 @@ public class RetryUpgradeActionServiceTest {
     RequestEntity requestEntity = new RequestEntity();
     requestEntity.setRequestId(upgradeRequestId);
     requestEntity.setClusterId(cluster.getClusterId());
-    requestEntity.setStatus(HostRoleStatus.PENDING);
     requestDAO.create(requestEntity);
 
     // Create the stage and add it to the request

http://git-wip-us.apache.org/repos/asf/ambari/blob/0fc7a667/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
index d7979e8..ec001ec 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
@@ -31,15 +31,18 @@ public class UpgradeCatalog300Test {
   public void testExecuteDMLUpdates() throws Exception {
     Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
     Method showHcatDeletedUserMessage = UpgradeCatalog300.class.getDeclaredMethod("showHcatDeletedUserMessage");
+    Method setStatusOfStagesAndRequests = UpgradeCatalog300.class.getDeclaredMethod("setStatusOfStagesAndRequests");
 
    UpgradeCatalog300 upgradeCatalog300 = createMockBuilder(UpgradeCatalog300.class)
             .addMockedMethod(showHcatDeletedUserMessage)
             .addMockedMethod(addNewConfigurationsFromXml)
+            .addMockedMethod(setStatusOfStagesAndRequests)
             .createMock();
 
 
     upgradeCatalog300.addNewConfigurationsFromXml();
     upgradeCatalog300.showHcatDeletedUserMessage();
+    upgradeCatalog300.setStatusOfStagesAndRequests();
 
 
     replay(upgradeCatalog300);
@@ -49,4 +52,21 @@ public class UpgradeCatalog300Test {
     verify(upgradeCatalog300);
   }
 
+  @Test
+  public void testExecuteDDLUpdates() throws Exception {
+    Method updateStageTable = UpgradeCatalog300.class.getDeclaredMethod("updateStageTable");
+    UpgradeCatalog300 upgradeCatalog300 = createMockBuilder(UpgradeCatalog300.class)
+        .addMockedMethod(updateStageTable)
+        .createMock();
+
+    upgradeCatalog300.updateStageTable();
+
+    replay(upgradeCatalog300);
+
+    upgradeCatalog300.executeDDLUpdates();
+
+    verify(upgradeCatalog300);
+  }
+
+
 }


[41/50] [abbrv] ambari git commit: AMBARI-20024. Relax ranger config DB consistency check rules (aonishuk)

Posted by nc...@apache.org.
AMBARI-20024. Relax ranger config DB consistency check rules (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c8c134ee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c8c134ee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c8c134ee

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: c8c134ee7c51a18df4613539e3b424cf17138645
Parents: b501425
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Fri Feb 17 13:36:12 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Fri Feb 17 13:36:12 2017 +0200

----------------------------------------------------------------------
 .../checks/DatabaseConsistencyCheckHelper.java   | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c8c134ee/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
index 926ec65..e895fc5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
@@ -33,6 +33,8 @@ import java.util.Map;
 import java.util.Objects;
 import java.util.Scanner;
 import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import javax.annotation.Nullable;
 import javax.inject.Provider;
@@ -934,6 +936,12 @@ public class DatabaseConsistencyCheckHelper {
                 Collection<String> serviceConfigsFromDB = dbServiceConfigs.get(serviceName);
                 if (serviceConfigsFromDB != null && serviceConfigsFromStack != null) {
                   serviceConfigsFromStack.removeAll(serviceConfigsFromDB);
+
+                  // skip ranger-{service_name}-* from being checked, unless ranger is installed
+                  if(!dbServiceConfigs.containsKey("RANGER")) {
+                    removeStringsByRegexp(serviceConfigsFromStack, "^ranger-"+ serviceName.toLowerCase() + "-" + "*");
+                  }
+
                   if (!serviceConfigsFromStack.isEmpty()) {
                     error("Required config(s): {} is(are) not available for service {} with service config version {} in cluster {}",
                             StringUtils.join(serviceConfigsFromStack, ","), serviceName, Integer.toString(serviceVersion), clusterName);
@@ -1009,4 +1017,15 @@ public class DatabaseConsistencyCheckHelper {
     }
   }
 
+  private static void removeStringsByRegexp(Collection<String> stringItems, String regexp) {
+      Pattern pattern = Pattern.compile(regexp);
+
+      for (Iterator<String> iterator = stringItems.iterator(); iterator.hasNext();) {
+        String stringItem = iterator.next();
+        Matcher matcher = pattern.matcher(stringItem);
+        if (matcher.find()) {
+          iterator.remove();
+        }
+      }
+  }
 }


[29/50] [abbrv] ambari git commit: AMBARI-20014 Multiple unit tests accessing same table which causes lock issues (dsen)

Posted by nc...@apache.org.
AMBARI-20014 Multiple unit tests accessing same table which causes lock issues (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b12ae51d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b12ae51d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b12ae51d

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b12ae51dd4a0c745827eea7ceb75c362e00d6f75
Parents: ea82a59
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Feb 16 16:03:05 2017 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Feb 16 16:03:05 2017 +0200

----------------------------------------------------------------------
 .../server/state/cluster/ClusterImpl.java       |   2 +
 .../AmbariManagementControllerTest.java         | 121 ++++++++++---------
 2 files changed, 64 insertions(+), 59 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b12ae51d/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 2a66795..db4aa21 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2090,6 +2090,8 @@ public class ClusterImpl implements Cluster {
     try {
       refresh();
       deleteAllServices();
+
+      refresh(); // update one-to-many clusterServiceEntities
       removeEntities();
       allConfigs.clear();
     } finally {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b12ae51d/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index af67f05..89f9d94 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -151,6 +151,7 @@ import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEve
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStoppedEvent;
 import org.apache.ambari.server.topology.TopologyManager;
+import org.apache.ambari.server.utils.EventBusSynchronizer;
 import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.collections.CollectionUtils;
 import org.easymock.Capture;
@@ -199,7 +200,6 @@ public class AmbariManagementControllerTest {
   private static final String FAKE_SERVICE_NAME = "FAKENAGIOS";
   private static final int STACK_VERSIONS_CNT = 16;
   private static final int REPOS_CNT = 3;
-  private static final int STACKS_CNT = 3;
   private static final int STACK_PROPERTIES_CNT = 103;
   private static final int STACK_COMPONENTS_CNT = 4;
   private static final int OS_CNT = 2;
@@ -258,6 +258,8 @@ public class AmbariManagementControllerTest {
 
   @Before
   public void setup() throws Exception {
+    EventBusSynchronizer.synchronizeAmbariEventPublisher(injector);
+
     entityManager = injector.getProvider(EntityManager.class).get();
     actionDB = injector.getInstance(ActionDBAccessor.class);
     serviceFactory = injector.getInstance(ServiceFactory.class);
@@ -7235,10 +7237,14 @@ public class AmbariManagementControllerTest {
   @Test
   public void testGetStacks() throws Exception {
 
+    HashSet<String> availableStacks = new HashSet<>();
+    for (StackInfo stackInfo: ambariMetaInfo.getStacks()){
+      availableStacks.add(stackInfo.getName());
+    }
 
     StackRequest request = new StackRequest(null);
     Set<StackResponse> responses = controller.getStacks(Collections.singleton(request));
-    Assert.assertEquals(STACKS_CNT, responses.size());
+    Assert.assertEquals(availableStacks.size(), responses.size());
 
     StackRequest requestWithParams = new StackRequest(STACK_NAME);
     Set<StackResponse> responsesWithParams = controller.getStacks(Collections.singleton(requestWithParams));
@@ -9256,84 +9262,81 @@ public class AmbariManagementControllerTest {
     String HOST1 = getUniqueName();
     String HOST2 = getUniqueName();
 
-    try {
-      Clusters clusters = injector.getInstance(Clusters.class);
+    Clusters clusters = injector.getInstance(Clusters.class);
 
-      clusters.addHost(HOST1);
-      Host host = clusters.getHost(HOST1);
-      setOsFamily(host, "redhat", "6.3");
-      clusters.getHost(HOST1).setState(HostState.HEALTHY);
+    clusters.addHost(HOST1);
+    Host host = clusters.getHost(HOST1);
+    setOsFamily(host, "redhat", "6.3");
+    clusters.getHost(HOST1).setState(HostState.HEALTHY);
 
-      clusters.addHost(HOST2);
-      host = clusters.getHost(HOST2);
-      setOsFamily(host, "redhat", "6.3");
+    clusters.addHost(HOST2);
+    host = clusters.getHost(HOST2);
+    setOsFamily(host, "redhat", "6.3");
 
-      AmbariManagementController amc = injector.getInstance(AmbariManagementController.class);
+    AmbariManagementController amc = injector.getInstance(AmbariManagementController.class);
 
-      ClusterRequest cr = new ClusterRequest(null, CLUSTER_NAME, STACK_ID, null);
-      amc.createCluster(cr);
+    ClusterRequest cr = new ClusterRequest(null, CLUSTER_NAME, STACK_ID, null);
+    amc.createCluster(cr);
 
-      Long CLUSTER_ID = clusters.getCluster(CLUSTER_NAME).getClusterId();
+    Long CLUSTER_ID = clusters.getCluster(CLUSTER_NAME).getClusterId();
 
-      ConfigurationRequest configRequest = new ConfigurationRequest(CLUSTER_NAME, "global", "version1",
-          new HashMap<String, String>() {{ put("a", "b"); }}, null);
-      cr.setDesiredConfig(Collections.singletonList(configRequest));
-      cr.setClusterId(CLUSTER_ID);
-      amc.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
+    ConfigurationRequest configRequest = new ConfigurationRequest(CLUSTER_NAME, "global", "version1",
+        new HashMap<String, String>() {{ put("a", "b"); }}, null);
+    cr.setDesiredConfig(Collections.singletonList(configRequest));
+    cr.setClusterId(CLUSTER_ID);
+    amc.updateClusters(Collections.singleton(cr), new HashMap<String, String>());
 
-      // add some hosts
-      Set<HostRequest> hrs = new HashSet<HostRequest>();
-      hrs.add(new HostRequest(HOST1, CLUSTER_NAME, null));
-      HostResourceProviderTest.createHosts(amc, hrs);
+    // add some hosts
+    Set<HostRequest> hrs = new HashSet<HostRequest>();
+    hrs.add(new HostRequest(HOST1, CLUSTER_NAME, null));
+    HostResourceProviderTest.createHosts(amc, hrs);
 
-      Set<ServiceRequest> serviceRequests = new HashSet<ServiceRequest>();
-      serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", null));
-      serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", null));
-      serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", null));
+    Set<ServiceRequest> serviceRequests = new HashSet<ServiceRequest>();
+    serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", null));
+    serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", null));
+    serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", null));
 
-      ServiceResourceProviderTest.createServices(amc, serviceRequests);
+    ServiceResourceProviderTest.createServices(amc, serviceRequests);
+
+    Set<ServiceComponentRequest> serviceComponentRequests = new HashSet<ServiceComponentRequest>();
+    serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "NAMENODE", null));
+    serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "SECONDARY_NAMENODE", null));
+    serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "DATANODE", null));
+    serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "MAPREDUCE2", "HISTORYSERVER", null));
+    serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "RESOURCEMANAGER", null));
+    serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "NODEMANAGER", null));
+    serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "HDFS_CLIENT", null));
 
-      Set<ServiceComponentRequest> serviceComponentRequests = new HashSet<ServiceComponentRequest>();
-      serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "NAMENODE", null));
-      serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "SECONDARY_NAMENODE", null));
-      serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "DATANODE", null));
-      serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "MAPREDUCE2", "HISTORYSERVER", null));
-      serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "RESOURCEMANAGER", null));
-      serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "NODEMANAGER", null));
-      serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "HDFS_CLIENT", null));
+    ComponentResourceProviderTest.createComponents(amc, serviceComponentRequests);
 
-      ComponentResourceProviderTest.createComponents(amc, serviceComponentRequests);
+    Set<ServiceComponentHostRequest> componentHostRequests = new HashSet<ServiceComponentHostRequest>();
+    componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "DATANODE", HOST1, null));
+    componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "NAMENODE", HOST1, null));
+    componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "SECONDARY_NAMENODE", HOST1, null));
+    componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "MAPREDUCE2", "HISTORYSERVER", HOST1, null));
+    componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "YARN", "RESOURCEMANAGER", HOST1, null));
+    componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "YARN", "NODEMANAGER", HOST1, null));
+    componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "HDFS_CLIENT", HOST1, null));
 
-      Set<ServiceComponentHostRequest> componentHostRequests = new HashSet<ServiceComponentHostRequest>();
-      componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "DATANODE", HOST1, null));
-      componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "NAMENODE", HOST1, null));
-      componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "SECONDARY_NAMENODE", HOST1, null));
-      componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "MAPREDUCE2", "HISTORYSERVER", HOST1, null));
-      componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "YARN", "RESOURCEMANAGER", HOST1, null));
-      componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "YARN", "NODEMANAGER", HOST1, null));
-      componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, "HDFS", "HDFS_CLIENT", HOST1, null));
+    amc.createHostComponents(componentHostRequests);
 
-      amc.createHostComponents(componentHostRequests);
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
+    ExecuteActionRequest ar = new ExecuteActionRequest(CLUSTER_NAME, Role.HDFS_SERVICE_CHECK.name(), null, false);
+    ar.getResourceFilters().add(resourceFilter);
+    amc.createAction(ar, null);
 
-      RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
-      ExecuteActionRequest ar = new ExecuteActionRequest(CLUSTER_NAME, Role.HDFS_SERVICE_CHECK.name(), null, false);
-      ar.getResourceFilters().add(resourceFilter);
-      amc.createAction(ar, null);
 
-      // change mind, delete the cluster
-      amc.deleteCluster(cr);
+    // change mind, delete the cluster
+    amc.deleteCluster(cr);
 
       assertNotNull(clusters.getHost(HOST1));
       assertNotNull(clusters.getHost(HOST2));
 
-      HostDAO dao = injector.getInstance(HostDAO.class);
+    HostDAO dao = injector.getInstance(HostDAO.class);
 
-      assertNotNull(dao.findByName(HOST1));
-      assertNotNull(dao.findByName(HOST2));
+    assertNotNull(dao.findByName(HOST1));
+    assertNotNull(dao.findByName(HOST2));
 
-    } finally {
-//      injector.getInstance(PersistService.class).stop();
-    }
   }
 
   @Test


[17/50] [abbrv] ambari git commit: Revert "AMBARI-20033. Typecasting to 'long' from earlier 'float', before setting them the following configs for recommendation : (1). 'llap_concurrency', (2). 'llap_concurrency' max value and (3). 'hive.llap.daemon.num.

Posted by nc...@apache.org.
Revert "AMBARI-20033. Typecasting to 'long' from earlier 'float', before setting them the following configs for recommendation : (1). 'llap_concurrency', (2). 'llap_concurrency' max value and (3). 'hive.llap.daemon.num.executors' max value."

This reverts commit d8c8b4ec4e1a3467279c2b523516175662c03afe.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/95d096de
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/95d096de
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/95d096de

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 95d096dea461466ba2e22028620b9c2081558bc6
Parents: d8c8b4e
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Feb 15 13:36:56 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Feb 15 13:36:56 2017 -0800

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.5/services/stack_advisor.py | 13 +++++--------
 1 file changed, 5 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/95d096de/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 52ada52..4de9a41 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -1031,15 +1031,12 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
                                  mem_per_thread_for_llap, normalized_tez_am_container_size))
       if llap_concurrency == 0:
         llap_concurrency = 1
-        Logger.info("DBG: Readjusted 'llap_concurrency' to : 1. Earlier calculated value : 0")
 
       if llap_concurrency * normalized_tez_am_container_size > hive_tez_am_cap_available:
-        llap_concurrency = long(math.floor(hive_tez_am_cap_available / normalized_tez_am_container_size))
-        Logger.info("DBG: Readjusted 'llap_concurrency' to : {0}, as llap_concurrency({1}) * normalized_tez_am_container_size({2}) > hive_tez_am_cap_available({3}))"
-                    .format(llap_concurrency, llap_concurrency, normalized_tez_am_container_size, hive_tez_am_cap_available))
+        llap_concurrency = math.floor(hive_tez_am_cap_available / normalized_tez_am_container_size)
 
         if llap_concurrency <= 0:
-          Logger.warning("DBG: Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
+          Logger.warning("Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
           self.recommendDefaultLlapConfiguration(configurations, services, hosts)
           return
         Logger.info("DBG: Adjusted 'llap_concurrency' : {0}, using following: hive_tez_am_cap_available : {1}, normalized_tez_am_container_size: "
@@ -1065,8 +1062,8 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
                   ": {2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, MAX_CONCURRENT_QUERIES : {4}".format(max_llap_concurreny_limit, max_executors_per_node,
                                                                                                num_llap_nodes_requested, MIN_EXECUTOR_TO_AM_RATIO,
                                                                                                MAX_CONCURRENT_QUERIES))
-    max_llap_concurreny = long(min(max_llap_concurreny_limit, math.floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
-                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size))))
+    max_llap_concurreny = min(max_llap_concurreny_limit, math.floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
+                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size)))
     Logger.info("DBG: Calculated 'max_llap_concurreny' : {0}, using following : max_llap_concurreny_limit : {1}, llap_mem_for_tezAm_and_daemons : "
                   "{2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, mem_per_thread_for_llap : {4}, normalized_tez_am_container_size : "
                   "{5}".format(max_llap_concurreny, max_llap_concurreny_limit, llap_mem_for_tezAm_and_daemons, MIN_EXECUTOR_TO_AM_RATIO,
@@ -1212,7 +1209,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     Logger.info("DBG: Putting num_executors_per_node as {0}".format(num_executors_per_node))
     putHiveInteractiveSiteProperty('hive.llap.daemon.num.executors', num_executors_per_node)
     putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "minimum", 1)
-    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", long(num_executors_per_node_max))
+    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", float(num_executors_per_node_max))
 
     # 'hive.llap.io.threadpool.size' config value is to be set same as value calculated for
     # 'hive.llap.daemon.num.executors' at all times.


[07/50] [abbrv] ambari git commit: AMBARI-19999 : Hive view 2.0 upload table : supporting endlines in input file, supporting char datatype in column type, handing errors during upload (nitirajrathore)

Posted by nc...@apache.org.
AMBARI-19999 : Hive view 2.0 upload table : supporting endlines in input file, supporting char datatype in column type, handing errors during upload (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/141e88dd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/141e88dd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/141e88dd

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 141e88dd4f3e2e48b2bd40d205e6c9716aec2669
Parents: 513b527
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Wed Feb 15 18:35:33 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Wed Feb 15 18:36:10 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/components/radio-button.js |  1 -
 .../resources/ui/app/components/upload-table.js |  4 +-
 .../resources/ui/app/locales/en/translations.js | 14 +---
 .../src/main/resources/ui/app/models/column.js  |  7 +-
 .../databases/database/tables/upload-table.js   | 79 +++++---------------
 .../templates/components/csv-format-params.hbs  | 18 +++--
 .../templates/databases/database/tables/new.hbs |  2 +-
 7 files changed, 42 insertions(+), 83 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/141e88dd/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js b/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js
index 066168c..c2e7e0d 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/radio-button.js
@@ -28,7 +28,6 @@ export default Ember.Component.extend({
   }.property('value', 'checked'),
 
   change: function() {
-    console.log("value changed : ", this.get('value'));
     this.set('checked', this.get('value'));
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/141e88dd/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js b/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js
index 29e9891..8df03e5 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/upload-table.js
@@ -23,6 +23,7 @@ export default Ember.Component.extend({
   fileFormatInfo: Ember.Object.create({
     csvParams: Ember.Object.create(),
     inputFileType: null,
+    containsEndlines: false,
   }),
   fileInfo: Ember.Object.create({
     files: Ember.A(),
@@ -33,9 +34,6 @@ export default Ember.Component.extend({
   actions: {
     onFileChanged: function () {
       console.log("inside files changed");
-      console.log("fileFormatInfo : ", this.get("fileFormatInfo"));
-      console.log("fileInfo : ", this.get("fileInfo"));
-      console.log("tableInfo : ", this.get("tableInfo"));
       this.send("preview");
     },
     preview: function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/141e88dd/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js b/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js
index b550dbe..50eba3a 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/locales/en/translations.js
@@ -35,29 +35,23 @@ export default {
         'escapeCharacterTooltip': "Escape character. Default is backslash (\).",
         'quoteCharacterTooltip': 'Quote character. Default is double quote (").',
         'quoteCharacterField': "Quote Character",
+        'isFirstRowHeader': "Is first row header?",
+        'fieldsTerminatedByTooltip': "Fields Terminated By character for Hive table.",
+        'isFirstRowHeaderTooltip': "Check if the first row of CSV is a header.",
+        'containsEndlines': "Contains endlines?",
       },
       "uploadTable": {
         'uploadProgress': "Upload Progress",
         'uploading': "Uploading..",
         'selectFromLocal': "Select from local",
         'hdfsPath': "HDFS Path",
-        'selectDatabase': "Select a Database",
         'tableName': "Table name",
         'tableNameErrorMessage': "Only alphanumeric and underscore characters are allowed in table name.",
         'tableNameTooltip': "Enter valid (alphanumeric + underscore) table name.",
-        'storedAs': "Stored as",
-        'isFirstRowHeader': "Is first row header ?",
-        'columnNameTooltip': "Enter valid (alphanumeric + underscore) column name.",
         'columnNameErrorMessage': "Only alphanumeric and underscore characters are allowed in column names.",
         'hdfsFieldTooltip': "Enter full HDFS path",
         'hdfsFieldPlaceholder': "Enter full HDFS path",
         'hdfsFieldErrorMessage': "Please enter complete path of hdfs file to upload.",
-        'containsEndlines': "Contains endlines?",
-        'fieldsTerminatedByField': "Fields Terminated By",
-        'escapedByField': "Escape By",
-        'escapedByTooltip': "Escaped By character for Hive table.",
-        'fieldsTerminatedByTooltip': "Fields Terminated By character for Hive table.",
-        'isFirstRowHeaderTooltip': "Check if the first row of CSV is a header.",
         'showPreview': "Preview"
       }
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/141e88dd/contrib/views/hive20/src/main/resources/ui/app/models/column.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/column.js b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
index 73a9824..9480616 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/column.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/column.js
@@ -111,10 +111,10 @@ let Column = Ember.Object.extend(Ember.Copyable,{
   },
 
   copy: function(){
-    return Column.create({
+    let col = Column.create({
       name: this.get("name"),
-      type: this.get("type"),
-      precision: this.get("percision"),
+      type: datatypes.findBy("label", this.get("type.label")),
+      precision: this.get("precision"),
       scale: this.get("scale"),
       isPartitioned: this.get("isPartitioned"),
       isClustered: this.get("isClustered"),
@@ -123,6 +123,7 @@ let Column = Ember.Object.extend(Ember.Copyable,{
       errors: this.get("errors").copy(),
       editing: this.get("editing"),
     });
+    return col;
   }
 
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/141e88dd/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
index 0e61905..a9bf9ea 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/database/tables/upload-table.js
@@ -41,11 +41,6 @@ export default NewTable.extend({
   getCharOptionByCharCode: function(charCode){
     return Helpers.getAllTerminationCharacters().findBy("id", charCode + "");
   },
-  // onChangeSelectedFileType: function(){
-  //   if(this.get('selectedFileType') === this.get('fileTypes')[1] && this.get('containsEndlines') === true){
-  //     this.set('containsEndlines', false);
-  //   }
-  // }.observes("selectedFileType", "containsEndlines"),
   getUploader(){
     return this.get('store').adapterFor('upload-table');
   },
@@ -102,7 +97,6 @@ export default NewTable.extend({
     var self = this;
     var fetchJobPromise = this.get('jobService').getJob(jobId);
     fetchJobPromise.then(function (data) {
-      console.log("waitForJobStatus : data : ", data);
       var job = JSON.parse(JSON.stringify(data));
       var status = job.status;
       if (status == constants.statuses.succeeded ) {
@@ -262,27 +256,24 @@ export default NewTable.extend({
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.failedToCreateActualTable'));
     this.setError(error);
   },
+  copyTableMeta: function(tableMeta){
+    let colArray = Ember.copy(tableMeta.columns, true);
+    let tableMetaCopy = JSON.parse(JSON.stringify(tableMeta));
+    tableMetaCopy.columns = colArray;
+    return tableMetaCopy;
+  },
   createTempTable: function (tableData) {
-    let tableMeta = JSON.parse(JSON.stringify(tableData.get("tableMeta")));
-    // manually copy the columns as they are missing members when copying
-    let columns = tableData.get("tableMeta").columns.map(function(col){
-      return col.copy();
-    });
-    tableMeta.columns = columns;
-
-    console.log("tableMeta : ", tableMeta);
-
-    var self = this;
     console.log("createTempTable");
     this.pushUploadProgressInfos(this.formatMessage('hive.messages.startingToCreateTemporaryTable'));
+    let tableMeta = this.copyTableMeta(tableData.get("tableMeta")); // deep copy or otherwise it does make separate
     var tempTableName = this.generateTempTableName();
     tableMeta.name = tempTableName;
 
     var headers = tableMeta.columns.map(function(column){
       if(tableData.fileFormatInfo.containsEndlines){
-        column.type.label = "STRING";
-        delete column.scale;
-        delete column.precision;
+        column.set("type", datatypes.findBy("label","STRING"));
+        column.set("scale");
+        column.set("precision");
       }
       return column;
     });
@@ -714,54 +705,21 @@ export default NewTable.extend({
       this.set("showMoreOrLess", "Show Less");
     }
   },
+  validateInputs: function(tableData){
+    let tableMeta = tableData.get("tableMeta");
+    let containsEndlines = tableData.get("fileFormatInfo.containsEndlines");
+    if(containsEndlines == true && tableMeta.settings && tableMeta.settings.fileFormat
+      && tableMeta.settings.fileFormat.type && tableMeta.settings.fileFormat.type === "TEXTFILE"){
+      throw new Error(`Cannot support endlines in fields when the  File Format is TEXTFILE. Please uncheck '${this.translate('hive.ui.csvFormatParams.containsEndlines')}'`);
+    }
+  },
 
-  displayOption: "display:none",
   actions: {
-  toggleCSVFormat: function() {
-    console.log("inside toggleCSVFormat");
-    this.toggleProperty('showCSVFormatInput');
-  },
-  hideInputParamModal : function(){
-      Ember.$("#inputParamsModal").modal("hide");
-    },
-    showInputParamModal : function(){
-      if(this.get('inputFileTypeCSV')){
-        Ember.$("#inputParamsModal").modal("show");
-      }
-    },
-    hideRowFormatModal : function(){
-      Ember.$("#rowFormatModal").modal("hide");
-    },
-    showRowFormatModal : function(){
-      if(this.get('storedAsTextFile')) {
-        Ember.$("#rowFormatModal").modal("show");
-      }
-    },
-    toggleErrors: function () {
-      this.toggleProperty('showErrors');
-    },
-    // filesUploaded: function (files) {
-    //   console.log("upload-table.js : uploaded new files : ", files);
-    //   this.clearFields();
-    //
-    //   this.set('files', files);
-    //   var name = files[0].name;
-    //   var i = name.indexOf(".");
-    //   var tableName = name.substr(0, i);
-    //   this.set('tableName', tableName);
-    //   var self = this;
-    //   return this.generatePreview(sourceObject)
-    // },
     preview: function (previewObject) {
       console.log("upload-table.js : uploaded new files : ", previewObject);
       this.clearFields();
 
       this.set('previewObject', previewObject);
-      // var name = previewObject.get("fileInfo").get("files")[0].name;
-      // var i = name.indexOf(".");
-      // var tableName = name.substr(0, i);
-      // this.set('tableName', tableName);
-      // var self = this;
       return this.generatePreview(previewObject)
     },
     previewFromHdfs: function () {
@@ -770,6 +728,7 @@ export default NewTable.extend({
     uploadTable: function (tableData) {
       console.log("tableData", tableData);
       try {
+        this.validateInputs(tableData);
         this.createTableAndUploadFile(tableData);
       } catch (e) {
         console.log("exception occured : ", e);

http://git-wip-us.apache.org/repos/asf/ambari/blob/141e88dd/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
index c63f502..df80260 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/csv-format-params.hbs
@@ -113,16 +113,24 @@
     <div class="row">
       <div class="col-md-6 form-horizontal">
         <div class="form-group">
-          <label class="col-md-2 control-label">Is First Row Header</label>
-          <div class="col-md-4">
-            <label>
-              {{input type="checkbox" checked=fileFormatInfo.csvParams.isFirstRowHeader}}
-            </label>
+          <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.isFirstRowHeader'}}</label>
+          <div class="col-md-8">
+            {{input type="checkbox" checked=fileFormatInfo.csvParams.isFirstRowHeader}}
           </div>
         </div>
       </div>
     </div>
     {{/if}}
+    <div class="row">
+      <div class="col-md-6 form-horizontal">
+        <div class="form-group">
+          <label class="col-md-4 control-label">{{t 'hive.ui.csvFormatParams.containsEndlines'}}</label>
+          <div class="col-md-8">
+            {{input type="checkbox" checked=fileFormatInfo.containsEndlines}}
+          </div>
+        </div>
+      </div>
+    </div>
   </div>
   {{/if}}
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/141e88dd/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
index 4f3b98a..288c00c 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/databases/database/tables/new.hbs
@@ -34,7 +34,7 @@
   <div class="table-header row">
     <p class="text-uppercase">table<strong>&nbsp;&nbsp;>&nbsp;&nbsp;create table</strong>
     <div class="pull-right">
-    {{#link-to "databases.database.tables.upload-table" }}<p class="text-uppercase">{{fa-icon "upload"}}&nbsp;upload table</p>{{/link-to}}
+      {{#link-to "databases.database.tables.upload-table" }}<button  class="btn btn-success"><p class="text-uppercase">{{fa-icon "upload"}}&nbsp;upload table</p></button>{{/link-to}}
     </div>
   </p>
   </div>


[18/50] [abbrv] ambari git commit: AMBARI-20033. Typecasting to 'long' from earlier 'float', before setting them the following configs for recommendation : (1). 'llap_concurrency', (2). 'llap_concurrency' max value and (3). 'hive.llap.daemon.num.executor

Posted by nc...@apache.org.
AMBARI-20033. Typecasting to 'long' from earlier 'float', before setting them the following configs for recommendation : (1). 'llap_concurrency', (2). 'llap_concurrency' max value and (3). 'hive.llap.daemon.num.executors' max value.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/90235286
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/90235286
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/90235286

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 90235286c8a4aebb1c5159be89509f6679a6e1da
Parents: 95d096d
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Feb 15 13:37:54 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Feb 15 13:37:54 2017 -0800

----------------------------------------------------------------------
 .../common-services/YARN/3.0.0.3.0/service_advisor.py | 14 +++++++++-----
 .../stacks/HDP/2.5/services/stack_advisor.py          | 13 ++++++++-----
 2 files changed, 17 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/90235286/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
index aecf1e3..6e4e55f 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/service_advisor.py
@@ -23,6 +23,7 @@ import os
 import traceback
 import inspect
 import socket
+import math
 from math import floor, ceil
 
 # Local imports
@@ -699,12 +700,15 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
                                mem_per_thread_for_llap, normalized_tez_am_container_size))
       if llap_concurrency == 0:
         llap_concurrency = 1
+        Logger.info("DBG: Readjusted 'llap_concurrency' to : 1. Earlier calculated value : 0")
 
       if llap_concurrency * normalized_tez_am_container_size > hive_tez_am_cap_available:
-        llap_concurrency = floor(hive_tez_am_cap_available / normalized_tez_am_container_size)
+        llap_concurrency = long(math.floor(hive_tez_am_cap_available / normalized_tez_am_container_size))
+        Logger.info("DBG: Readjusted 'llap_concurrency' to : {0}, as llap_concurrency({1}) * normalized_tez_am_container_size({2}) > hive_tez_am_cap_available({3}))"
+                    .format(llap_concurrency, llap_concurrency, normalized_tez_am_container_size, hive_tez_am_cap_available))
 
         if llap_concurrency <= 0:
-          Logger.warning("Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
+          Logger.warning("DBG: Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
           self.recommendDefaultLlapConfiguration(configurations, services, hosts)
           return
         Logger.info("DBG: Adjusted 'llap_concurrency' : {0}, using following: hive_tez_am_cap_available : {1}, normalized_tez_am_container_size: "
@@ -730,8 +734,8 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
                 ": {2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, MAX_CONCURRENT_QUERIES : {4}".format(max_llap_concurreny_limit, max_executors_per_node,
                                                                                              num_llap_nodes_requested, MIN_EXECUTOR_TO_AM_RATIO,
                                                                                              MAX_CONCURRENT_QUERIES))
-    max_llap_concurreny = min(max_llap_concurreny_limit, floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
-                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size)))
+    max_llap_concurreny = long(min(max_llap_concurreny_limit, floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
+                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size))))
     Logger.info("DBG: Calculated 'max_llap_concurreny' : {0}, using following : max_llap_concurreny_limit : {1}, llap_mem_for_tezAm_and_daemons : "
                 "{2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, mem_per_thread_for_llap : {4}, normalized_tez_am_container_size : "
                 "{5}".format(max_llap_concurreny, max_llap_concurreny_limit, llap_mem_for_tezAm_and_daemons, MIN_EXECUTOR_TO_AM_RATIO,
@@ -873,7 +877,7 @@ class YARNRecommender(service_advisor.ServiceAdvisor):
     num_executors_per_node = long(num_executors_per_node)
     putHiveInteractiveSiteProperty('hive.llap.daemon.num.executors', num_executors_per_node)
     putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "minimum", 1)
-    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", float(num_executors_per_node_max))
+    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", long(num_executors_per_node_max))
 
     # 'hive.llap.io.threadpool.size' config value is to be set same as value calculated for
     # 'hive.llap.daemon.num.executors' at all times.

http://git-wip-us.apache.org/repos/asf/ambari/blob/90235286/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 4de9a41..52ada52 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -1031,12 +1031,15 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
                                  mem_per_thread_for_llap, normalized_tez_am_container_size))
       if llap_concurrency == 0:
         llap_concurrency = 1
+        Logger.info("DBG: Readjusted 'llap_concurrency' to : 1. Earlier calculated value : 0")
 
       if llap_concurrency * normalized_tez_am_container_size > hive_tez_am_cap_available:
-        llap_concurrency = math.floor(hive_tez_am_cap_available / normalized_tez_am_container_size)
+        llap_concurrency = long(math.floor(hive_tez_am_cap_available / normalized_tez_am_container_size))
+        Logger.info("DBG: Readjusted 'llap_concurrency' to : {0}, as llap_concurrency({1}) * normalized_tez_am_container_size({2}) > hive_tez_am_cap_available({3}))"
+                    .format(llap_concurrency, llap_concurrency, normalized_tez_am_container_size, hive_tez_am_cap_available))
 
         if llap_concurrency <= 0:
-          Logger.warning("Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
+          Logger.warning("DBG: Calculated 'LLAP Concurrent Queries' = {0}. Expected value >= 1.".format(llap_concurrency))
           self.recommendDefaultLlapConfiguration(configurations, services, hosts)
           return
         Logger.info("DBG: Adjusted 'llap_concurrency' : {0}, using following: hive_tez_am_cap_available : {1}, normalized_tez_am_container_size: "
@@ -1062,8 +1065,8 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
                   ": {2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, MAX_CONCURRENT_QUERIES : {4}".format(max_llap_concurreny_limit, max_executors_per_node,
                                                                                                num_llap_nodes_requested, MIN_EXECUTOR_TO_AM_RATIO,
                                                                                                MAX_CONCURRENT_QUERIES))
-    max_llap_concurreny = min(max_llap_concurreny_limit, math.floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
-                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size)))
+    max_llap_concurreny = long(min(max_llap_concurreny_limit, math.floor(llap_mem_for_tezAm_and_daemons / (MIN_EXECUTOR_TO_AM_RATIO *
+                                                                                                      mem_per_thread_for_llap + normalized_tez_am_container_size))))
     Logger.info("DBG: Calculated 'max_llap_concurreny' : {0}, using following : max_llap_concurreny_limit : {1}, llap_mem_for_tezAm_and_daemons : "
                   "{2}, MIN_EXECUTOR_TO_AM_RATIO : {3}, mem_per_thread_for_llap : {4}, normalized_tez_am_container_size : "
                   "{5}".format(max_llap_concurreny, max_llap_concurreny_limit, llap_mem_for_tezAm_and_daemons, MIN_EXECUTOR_TO_AM_RATIO,
@@ -1209,7 +1212,7 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     Logger.info("DBG: Putting num_executors_per_node as {0}".format(num_executors_per_node))
     putHiveInteractiveSiteProperty('hive.llap.daemon.num.executors', num_executors_per_node)
     putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "minimum", 1)
-    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", float(num_executors_per_node_max))
+    putHiveInteractiveSitePropertyAttribute('hive.llap.daemon.num.executors', "maximum", long(num_executors_per_node_max))
 
     # 'hive.llap.io.threadpool.size' config value is to be set same as value calculated for
     # 'hive.llap.daemon.num.executors' at all times.


[02/50] [abbrv] ambari git commit: AMBARI-20007. Args value is not retained for ssh node.(Padma Priya N via gauravn7)

Posted by nc...@apache.org.
AMBARI-20007. Args value is not retained for ssh node.(Padma Priya N via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8c10104
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8c10104
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8c10104

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a8c101041ae50234ca72003d32bbd47001d8cbe8
Parents: 45ef011
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Wed Feb 15 17:07:36 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Wed Feb 15 17:07:36 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/components/java-action.js  | 21 +++++-------
 .../resources/ui/app/components/ssh-action.js   | 29 ++++++----------
 .../app/templates/components/shell-action.hbs   | 36 ++++++--------------
 .../ui/app/templates/components/ssh-action.hbs  | 12 +++----
 4 files changed, 35 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8c10104/contrib/views/wfmanager/src/main/resources/ui/app/components/java-action.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/java-action.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/java-action.js
index 9d43fe7..18a3702 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/java-action.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/java-action.js
@@ -24,29 +24,24 @@ const Validations = buildValidations({
   }),
   'actionModel.jobTracker': validator('presence', {
     presence : true
-  })  
+  })
 });
 
 export default Ember.Component.extend(Validations, {
   fileBrowser : Ember.inject.service('file-browser'),
-  javaOptsObserver : Ember.observer('isSingle',function(){
-    if(this.get('isSingle')){
-      this.set("actionModel.javaOpt", undefined);
-    }else{
-      this.set("actionModel.javaOpts", undefined);
-    }
-  }),
+  isSingle : false,
   setUp : function(){
     if(this.get('actionModel.args') === undefined){
       this.set("actionModel.args", Ember.A([]));
     }
-    if(this.get('actionModel.javaOpt') === undefined && !this.get('actionModel.javaOpts')){
+    if(this.get('actionModel.javaOpt') === undefined){
       this.set("actionModel.javaOpt", Ember.A([]));
+    }else if(this.get('actionModel.javaOpt').length > 0){
       this.set('isSingle', false);
-    }else if(this.get('actionModel.javaOpt') === undefined && this.get('actionModel.javaOpts')){
-      this.set('isSingle', true);
-    }else{
+    }else if(Ember.isBlank(this.get('actionModel.javaOpts')) && this.get('actionModel.javaOpt').length == 0){
       this.set('isSingle', false);
+    }else{
+      this.set('isSingle', true);
     }
     if(this.get('actionModel.files') === undefined){
       this.set("actionModel.files", Ember.A([]));
@@ -90,8 +85,10 @@ export default Ember.Component.extend(Validations, {
     onJavaOptChange(value){
       if(value === "single"){
         this.set('isSingle',true);
+        this.set("actionModel.javaOpt", Ember.A([]));
       }else{
         this.set('isSingle',false);
+        this.set("actionModel.javaOpts", undefined);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8c10104/contrib/views/wfmanager/src/main/resources/ui/app/components/ssh-action.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/ssh-action.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/ssh-action.js
index d879a0c..8da7eef 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/ssh-action.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/ssh-action.js
@@ -28,27 +28,17 @@ const Validations = buildValidations({
 });
 export default Ember.Component.extend(Validations, {
   fileBrowser : Ember.inject.service('file-browser'),
-  javaOptsObserver : Ember.observer('isSingle',function(){
-    if(this.get('isSingle')){
-      this.set("actionModel.arg", Ember.A([]));
-    }else{
-      this.set("actionModel.args", Ember.A([]));
-    }
-  }),
+  useArg : false,
   setUp : function(){
     if(this.get('actionModel.args') === undefined){
       this.set("actionModel.args", Ember.A([]));
+    }else if(this.get('actionModel.args').length > 0){
+      this.set('useArg', false);
     }
     if(this.get('actionModel.arg') === undefined){
       this.set("actionModel.arg", Ember.A([]));
-    }
-    if(this.get('actionModel.arg') === undefined && !this.get('actionModel.args')){
-      this.set("actionModel.arg", Ember.A([]));
-      this.set('isSingle', false);
-    }else if(this.get('actionModel.arg') === undefined && this.get('actionModel.args')){
-      this.set('isSingle', true);
-    }else{
-      this.set('isSingle', false);
+    }else if(this.get('actionModel.arg').length > 0){
+      this.set('useArg', true);
     }
   }.on('init'),
   initialize : function(){
@@ -73,11 +63,12 @@ export default Ember.Component.extend(Validations, {
     register (name, context){
       this.sendAction('register',name , context);
     },
-    onJavaOptChange(value){
-      if(value === "single"){
-        this.set('isSingle',true);
+    argTypeChanged(useArg){
+      this.set('useArg', useArg);
+      if(useArg){
+        this.set("actionModel.args", Ember.A([]));
       }else{
-        this.set('isSingle',false);
+        this.set("actionModel.arg", Ember.A([]));
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8c10104/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/shell-action.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/shell-action.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/shell-action.hbs
index 2cd8207..a2921b0 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/shell-action.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/shell-action.hbs
@@ -24,23 +24,24 @@
       <div class="input-group">
         {{input type="text" class="form-control" name="exec" title="Command or the path of the Shell command to execute" value=actionModel.exec placeholder="Script path or shell command"}}
         <span class="input-group-btn">
-          <button class="btn btn-secondary" type="button" {{action "openFileBrowser" "actionModel.exec"}}>Browse</button>
+          <button class="btn btn-primary" type="button" {{action "openFileBrowser" "actionModel.exec"}}>Browse</button>
         </span>
       </div>
       {{field-error model=this field='actionModel.exec' showErrorMessage=showErrorMessage}}
     </div>
   </div>
-
-
+  {{#arg-config args=actionModel.args register="register" title="Argument"}}{{/arg-config}}
   {{#jobxml-config jobXml=actionModel.jobXml openFileBrowser="openFileBrowser" register="register"}}{{/jobxml-config}}
-
-
   {{#arg-config args=actionModel.envVar register="register" title="Environment Variable"}}{{/arg-config}}
-
-
-
-</div>
-
+  <div class="form-group">
+    <label for="inputPassword" class="control-label col-xs-2">Capture output</label>
+      <div class="col-xs-10">
+        <div class="checkbox">
+          <label title="capture output of the action">{{input type="checkbox" name="capture-output" checked=actionModel.captureOutput}}</label>
+        </div>
+      </div>
+    </div>
+  </div>
 <div class="panel panel-default">
   <div class="panel-heading">Transition</div>
   <div class="panel-body handlerPanel">
@@ -48,7 +49,6 @@
   </div>
 </div>
 {{#action-credential-config credentials=credentials actionCredentials=actionModel.credentials}}{{/action-credential-config}}
-
 <div class="panel panel-default">
   <div class="panel-heading" id="accordion" data-toggle="collapse" data-parent="#accordion" data-target="#collapseOne">
       Advanced Properties
@@ -71,14 +71,9 @@
               {{input type="text" class="form-control"  name="name-node" value=actionModel.nameNode validations="required,noWhitespace" placeholder="Name node"}}
             </div>
           </div>
-
           {{#file-config files=actionModel.files openFileBrowser="openFileBrowser" register="register" title="File"}}{{/file-config}}
-
           {{#file-config files=actionModel.archives openFileBrowser="openFileBrowser" register="register" title="Archive"}}{{/file-config}}
-
           {{#prepare-config prepare=actionModel.prepare openFileBrowser="openFileBrowser" register="register"}}{{/prepare-config}}
-
-          {{#arg-config args=actionModel.args register="register" title="arg"}}{{/arg-config}}
         </div>
       </div>
       <div class=" panel panel-default">
@@ -87,15 +82,6 @@
           {{#name-value-config configuration=actionModel.configuration register="register"}}{{/name-value-config}}
         </div>
       </div>
-
-
-      <div class="form-group">
-        <div class="col-xs-10">
-          <div class="checkbox">
-            <label title="capture output of the action">{{input type="checkbox" name="capture-output" checked=actionModel.captureOutput}}capture output</label>
-          </div>
-        </div>
-      </div>
     </div>
   </div>
   {{#sla-info slaInfo=actionModel.slaInfo register="register" slaEnabled=actionModel.slaEnabled register="register"}}{{/sla-info}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8c10104/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/ssh-action.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/ssh-action.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/ssh-action.hbs
index 64f3906..a29a2ec 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/ssh-action.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/ssh-action.hbs
@@ -36,19 +36,17 @@
     <div class="form-group">
       <label class="control-label col-xs-2 visibility-hidden">Args</label>
       <div class="col-xs-2">
-        <input type="radio" name="Args" checked={{if (eq isSingle false) 'checked'}}  {{action "onJavaOptChange" "multiple" preventDefault=false on="change"}}> Args
+        <input type="radio" name="args" checked={{if (eq useArg false) 'checked'}}  {{action "argTypeChanged" false on="change"}}> Args
       </div>
       <div class="col-xs-2">
-        <input type="radio" name="Arg" checked={{if (eq isSingle true) 'checked'}}  {{action "onJavaOptChange" "single" preventDefault=false on="change"}}> Arg
+        <input type="radio" name="arg" checked={{if (eq useArg true) 'checked'}}  {{action "argTypeChanged" true on="change"}}> Arg
       </div>
     </div>
-
-    {{#if isSingle}}
-    {{#arg-config args=actionModel.arg register="register" title="Arg"}}{{/arg-config}}
+    {{#if useArg}}
+      {{#arg-config args=actionModel.arg register="register" title="Arg"}}{{/arg-config}}
     {{else}}
-    {{#arg-config args=actionModel.args register="register" title="Args"}}{{/arg-config}}
+      {{#arg-config args=actionModel.args register="register" title="Args"}}{{/arg-config}}
     {{/if}}
-
     <div class="form-group">
       <label class="control-label col-xs-2">Capture output</label>
       <div class="col-xs-8">


[30/50] [abbrv] ambari git commit: AMBARI-20035. Duration in BGoperation window should display durations in proper time units (alexantonenko)

Posted by nc...@apache.org.
AMBARI-20035. Duration in BGoperation window should display durations in proper time units (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ab53946f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ab53946f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ab53946f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: ab53946fa63ad209158300ca0907e5a67097e717
Parents: b12ae51
Author: Alex Antonenko <hi...@gmail.com>
Authored: Thu Feb 16 05:31:58 2017 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Feb 16 17:14:02 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/utils/date/date.js               | 26 ++++++++---------
 .../dashboard/widgets/uptime_text_widget.js     |  9 ++++--
 ambari-web/test/mappers/service_mapper_test.js  |  2 +-
 ambari-web/test/utils/date/date_test.js         | 30 ++++++++++----------
 .../stack_upgrade/upgrade_history_view_test.js  |  8 +++---
 .../widgets/hbase_master_uptime_test.js         |  4 +--
 .../dashboard/widgets/namenode_uptime_test.js   |  4 +--
 .../widgets/resource_manager_uptime_test.js     |  4 +--
 .../widgets/uptime_text_widget_test.js          |  4 +--
 9 files changed, 47 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/app/utils/date/date.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/date/date.js b/ambari-web/app/utils/date/date.js
index d461d21..986815e 100644
--- a/ambari-web/app/utils/date/date.js
+++ b/ambari-web/app/utils/date/date.js
@@ -153,13 +153,13 @@ module.exports = {
    * 30 ms = 30 ms
    * 300 ms = 300 ms
    * 999 ms = 999 ms
-   * 1000 ms = 1.00 secs
-   * 3000 ms = 3.00 secs
-   * 35000 ms = 35.00 secs
-   * 350000 ms = 350.00 secs
-   * 999999 ms = 999.99 secs
-   * 1000000 ms = 16.66 mins
-   * 3500000 secs = 58.33 mins
+   * 1000 ms = 1 secs
+   * 3000 ms = 3 secs
+   * 35000 ms = 35 secs
+   * 350000 ms = 350 secs
+   * 999999 ms = 999 secs
+   * 1000000 ms = 17 mins
+   * 3500000 secs = 58 mins
    *
    * @param {number} time
    * @param {bool} [zeroValid] for the case to show 0 when time is 0, not null
@@ -175,6 +175,7 @@ module.exports = {
       return null;
     }
     var timeStr = intTime.toString();
+    var date = new Date(intTime);
     var lengthOfNumber = timeStr.length;
     var oneMinMs = 60000;
     var oneHourMs = 3600000;
@@ -184,19 +185,18 @@ module.exports = {
       return time + ' ms';
     }
     if (lengthOfNumber < 7) {
-      time = (time / 1000).toFixed(2);
+      time = (time / 1000).toFixed(0);
       return time + ' secs';
     }
     if (time < oneHourMs) {
-      time = (time / oneMinMs).toFixed(2);
+      time = (time / oneMinMs).toFixed(0);
       return time + ' mins';
     }
     if (time < oneDayMs) {
-      time = (time / oneHourMs).toFixed(2);
-      return time + ' hours';
+      return date.getUTCHours() + 'h '+ date.getUTCMinutes() + 'm ' + date.getUTCSeconds() +'s';
     }
-    time = (time / oneDayMs).toFixed(2);
-    return time + ' days';
+
+    return ((date.getUTCFullYear() - 1970) * 365 + date.getUTCMonth() * 31 + date.getUTCDate()-1) + 'd ' + date.getUTCHours() + 'h ' + date.getUTCMinutes() + 'm';
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js b/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
index 74d59f1..e1b9c42 100644
--- a/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
+++ b/ambari-web/app/views/main/dashboard/widgets/uptime_text_widget.js
@@ -76,20 +76,23 @@ App.UptimeTextDashboardWidgetView = App.TextDashboardWidgetView.extend({
   uptimeProcessing: function (uptime) {
     var uptimeString = this.timeConverter(uptime);
     var diff = App.dateTimeWithTimeZone() - uptime;
+    var valueType = "";
     if (diff < 0) {
       diff = 0;
     }
     var formatted = date.timingFormat(diff); //17.67 days
     var timeUnit = null;
     if (formatted) {
-      switch (formatted.split(" ")[1]) {
+      valueType = formatted.split(" ")[0];
+      switch (valueType[valueType.length-1]) {
         case 'secs':
           timeUnit = 's';
           break;
-        case 'hours':
+        case 'h':
           timeUnit = 'hr';
+
           break;
-        case 'days':
+        case 'd':
           timeUnit = 'd';
           break;
         case 'mins':

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/test/mappers/service_mapper_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mappers/service_mapper_test.js b/ambari-web/test/mappers/service_mapper_test.js
index 4a8d49d..c5a0b10 100644
--- a/ambari-web/test/mappers/service_mapper_test.js
+++ b/ambari-web/test/mappers/service_mapper_test.js
@@ -220,7 +220,7 @@ describe('App.serviceMetricsMapper', function () {
         message: 'Storm mapper, stack version 2.1',
         expectedValues: {
           total_executors: 2,
-          nimbus_uptime: "3.96 hours",
+          nimbus_uptime: "3h 57m 30s",
           free_slots: 2,
           used_slots: 0,
           total_slots: 2,

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/test/utils/date/date_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/date/date_test.js b/ambari-web/test/utils/date/date_test.js
index 5289fdd..c005222 100644
--- a/ambari-web/test/utils/date/date_test.js
+++ b/ambari-web/test/utils/date/date_test.js
@@ -90,23 +90,23 @@ describe('date', function () {
       {i: '30', e:'30 ms'},
       {i: '300', e:'300 ms'},
       {i: '999', e:'999 ms'},
-      {i: '1000', e:'1.00 secs'},
-      {i: '3000', e:'3.00 secs'},
-      {i: '35000', e:'35.00 secs'},
-      {i: '350000', e:'350.00 secs'},
-      {i: '999999', e:'1000.00 secs'},
-      {i: '1000000', e:'16.67 mins'},
-      {i: '3500000', e:'58.33 mins'},
-      {i: '35000000', e:'9.72 hours'},
-      {i: '350000000', e:'4.05 days'},
-      {i: '3500000000', e:'40.51 days'},
-      {i: '35000000000', e:'405.09 days'}
+      {i: '1000', e:'1 secs'},
+      {i: '3000', e:'3 secs'},
+      {i: '35000', e:'35 secs'},
+      {i: '350000', e:'350 secs'},
+      {i: '999999', e:'1000 secs'},
+      {i: '1000000', e:'17 mins'},
+      {i: '3500000', e:'58 mins'},
+      {i: '35000000', e:'9h 43m 20s'},
+      {i: '350000000', e:'4d 1h 13m'},
+      {i: '3500000000', e:'40d 12h 13m'},
+      {i: '35000000000', e:'405d 2h 13m'}
     ]);
 
     describe('Correct data', function(){
       tests.forEach(function(test) {
         it(test.i, function() {
-          expect(date.timingFormat(test.i)).to.equal(test.e);
+          expect(date.timingFormat(test.i)).to.be.equal(test.e);
         });
       });
     });
@@ -148,12 +148,12 @@ describe('date', function () {
       {
         startTimestamp: 1349752195000,
         endTimestamp: 1349752199000,
-        e: '4.00 secs'
+        e: '4 secs'
       },
       {
         startTimestamp: 1349752195000,
         endTimestamp: 1367752195000,
-        e: '208.33 days'
+        e: '213d 8h 0m'
       },
       {
         startTimestamp: -10000000,
@@ -170,7 +170,7 @@ describe('date', function () {
         startTimestamp: 100000000,
         endTimestamp: -1,
         stubbed: true,
-        e: '19.00 secs'
+        e: '19 secs'
       }
     ];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
index 0ca7080..13da96e 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_history_view_test.js
@@ -135,13 +135,13 @@ describe('App.MainAdminStackUpgradeHistoryView', function () {
       event = {
         context: Em.Object.create({
           isSelected: false,
-          value: 'ALL',
+          value: 'ALL'
         })
       };
       view.set('categories', [
         Em.Object.create({
           isSelected: true,
-          value: 'UPGRADE_COMPLETED',
+          value: 'UPGRADE_COMPLETED'
         }),
         event.context
       ]);
@@ -192,12 +192,12 @@ describe('App.MainAdminStackUpgradeHistoryView', function () {
       Em.Object.create({
         directionLabel: Em.I18n.t('common.upgrade'),
         upgradeTypeLabel: Em.I18n.t('common.rolling'),
-        duration: '1.00 hours'
+        duration: '1h 0m 0s'
       }),
       Em.Object.create({
         directionLabel: Em.I18n.t('common.downgrade'),
         upgradeTypeLabel: Em.I18n.t('common.hostOrdered'),
-        duration: '2.00 hours'
+        duration: '2h 0m 0s'
       })
     ];
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js b/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
index 4f19a35..15b1ce3 100644
--- a/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/hbase_master_uptime_test.js
@@ -33,8 +33,8 @@ describe('App.HBaseMasterUptimeView', function () {
       e: {
         isGreen: true,
         isNA: false,
-        content: '192.1 d',
-        data: 192.1
+        content: '197.0 d',
+        data: 197
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js b/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
index bfd101c..50a39c7 100644
--- a/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/namenode_uptime_test.js
@@ -35,8 +35,8 @@ describe('App.NameNodeUptimeView', function() {
         isOrange: false,
         isGreen: true,
         isNA: false,
-        content: '192.1 d',
-        data: 192.1
+        content: '197.0 d',
+        data: 197
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js b/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
index d4a9b34..828133e 100644
--- a/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/resource_manager_uptime_test.js
@@ -33,8 +33,8 @@ describe('App.ResourceManagerUptimeView', function() {
       e: {
         isGreen: true,
         isNA: false,
-        content: '192.1 d',
-        data: 192.1
+        content: '197.0 d',
+        data: 197
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab53946f/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js b/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
index fa20593..00d224c 100644
--- a/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets/uptime_text_widget_test.js
@@ -60,7 +60,7 @@ describe('App.UptimeTextDashboardWidgetView', function() {
       {
         diff: 10*1000,
         e: {
-          timeUnit: 's'
+          timeUnit: 'secs'
         }
       },
       {
@@ -78,7 +78,7 @@ describe('App.UptimeTextDashboardWidgetView', function() {
       {
         diff: 1800*1000,
         e: {
-          timeUnit: 'min'
+          timeUnit: 'mins'
         }
       }
     ];


[21/50] [abbrv] ambari git commit: AMBARI-19823. If by any case Migration Is hampered Then No message is shown in UI Hue-Ambari Migration view. (Ishan Bhatt via gauravn7)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-udf.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-udf.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-udf.hbs
index 74b1cca..b2112c8 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-udf.hbs
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-udf.hbs
@@ -14,96 +14,105 @@ See the License for the specific language governing permissions and
 limitations under the License.
 }}
 <div class="panel panel-default">
-    <div class="panel-heading">
-        <h3>Pig UDFs Migration
-        </h3>
+  <div class="panel-heading">
+    <h3>Pig UDFs Migration
+    </h3>
+  </div>
+  <div class="panel-body">
+    <div class="row">
+      <div class="col-sm-3">
+        User Name
+        <font size="3" color="red"> *
+        </font>
+      </div>
+      <div class="col-sm-3">
+        {{ember-selectize content=model.usersdetail  selection=model.selections optionValuePath="content.username" optionLabelPath="content.username" multiple=true add-value="addSelection" remove-value="removeSelection" placeholder="Select an userName" }}
+      </div>
     </div>
-    <div class="panel-body">
-        <div class="row">
-            <div class="col-sm-3">
-                User Name
-                <font size="3" color="red"> *
-                </font>
-            </div>
-            <div class="col-sm-3">
-                {{ember-selectize content=model.usersdetail  selection=model.selections optionValuePath="content.username" optionLabelPath="content.username" multiple=true add-value="addSelection" remove-value="removeSelection" placeholder="Select an userName" }}
-            </div>
-        </div>
-        <br>
-        <div class="row">
-            <div class="col-sm-3">
-                Instance Name
-                <font size="3" color="red"> *
-                </font>
-            </div>
-            <div class="col-sm-3">
-                {{ember-selectize content=model.piginstancedetail  optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename  placeholder="Select an Instance name" }}
-            </div>
-        </div>
-        <br>
-        <div class="row">
-            <div class="col-sm-3">
-                <button class="btn btn-success" {{action 'submitResult'}} disabled={{jobstatus}}>Submit
-                </button>
-            </div>
-            <div class="col-sm-3">
-                {{#if jobstatus}}
-                    <h5>
-                        <font color="green">Job has been Submitted.
-                        </font>
-                    </h5>
-                {{/if}}
-            </div>
-        </div>
-        <br>
-        <div class="row">
-            <div class="col-sm-9">
-                {{#if jobstatus}}
-                    <br>
-                    <div class="progress" id="progressbar" style="">
-                        <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progressBar}}%">
-                        </div>
-                    </div>
-                {{/if}}
-            </div>
-        </div>
-        <div class="row">
-            <div class="col-sm-9">
-                {{#if completionStatus}}
-                    <h2>Migration Report
-                    </h2>
-                    <table class="table table-hover">
-                        <thead>
-                        <tr>
-                            <th>Parameters</th>
-                            <th>Status</th>
-                        </tr>
-                        </thead>
-                        <tbody>
-                        <tr>
-                            <td>Number of UDFs Transferred</td>
-                            <td>{{numberOfQueryTransfered}}</td>
-                        </tr>
-                        <tr>
-                            <td>Total Number of UDFs</td>
-                            <td>{{totalNoQuery}}</td>
-                        </tr>
-                        <tr>
-                            <td>Total Time Taken</td>
-                            <td>{{totalTimeTaken}}ms</td>
-                        </tr>
-                        <tr>
-                            <td>Hue Users</td>
-                            <td>{{Username}}</td>
-                        </tr>
-                        <tr>
-                            <td>Ambari Instance Name(Target)</td>
-                            <td>{{instanceName}}</td>
-                        </tr>
-                        </tbody>
-                    </table>
-                {{/if}}
+    <br>
+    <div class="row">
+      <div class="col-sm-3">
+        Instance Name
+        <font size="3" color="red"> *
+        </font>
+      </div>
+      <div class="col-sm-3">
+        {{ember-selectize content=model.piginstancedetail  optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename  placeholder="Select an Instance name" }}
+      </div>
+    </div>
+    <br>
+    <div class="row">
+      <div class="col-sm-3">
+        <button class="btn btn-success" {{action 'submitResult'}} disabled={{jobstatus}}>Submit
+        </button>
+      </div>
+      <div class="col-sm-3">
+        {{#if jobstatus}}
+          <h5>
+            <font color="green">Job has been Submitted.
+            </font>
+          </h5>
+        {{/if}}
+      </div>
+    </div>
+    <br>
+    <div class="row">
+      <div class="col-sm-9">
+        {{#if jobstatus}}
+          <br>
+          <div class="progress" id="progressbar" style="">
+            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="50"
+                 aria-valuemin="0" aria-valuemax="50" style="width:{{progressBar}}%">
             </div>
-        </div>
+          </div>
+        {{/if}}
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-sm-9">
+        {{#if error}}
+          <h3>Error Occured during migration</h3>
+          <p><span class="alert-danger">{{error}}</span></p>
+        {{/if}}
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-sm-9">
+        {{#if completionStatus}}
+          <h2>Migration Report
+          </h2>
+          <table class="table table-hover">
+            <thead>
+            <tr>
+              <th>Parameters</th>
+              <th>Status</th>
+            </tr>
+            </thead>
+            <tbody>
+            <tr>
+              <td>Number of UDFs Transferred</td>
+              <td>{{numberOfQueryTransfered}}</td>
+            </tr>
+            <tr>
+              <td>Total Number of UDFs</td>
+              <td>{{totalNoQuery}}</td>
+            </tr>
+            <tr>
+              <td>Total Time Taken</td>
+              <td>{{totalTimeTaken}}ms</td>
+            </tr>
+            <tr>
+              <td>Hue Users</td>
+              <td>{{Username}}</td>
+            </tr>
+            <tr>
+              <td>Ambari Instance Name(Target)</td>
+              <td>{{instanceName}}</td>
+            </tr>
+            </tbody>
+          </table>
+        {{/if}}
+      </div>
     </div>
+  </div>
 </div>


[33/50] [abbrv] ambari git commit: AMBARI-20008. Add "Manage alert notifications" authorization (Eugene Chekanskiy via rlevas)

Posted by nc...@apache.org.
AMBARI-20008. Add "Manage alert notifications" authorization (Eugene Chekanskiy via rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bfaaba2f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bfaaba2f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bfaaba2f

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: bfaaba2fa2ce0c271c5fa3de3c4b1134715407df
Parents: 1f1bfb8
Author: Eugene Chekanskiy <ec...@hortonworks.com>
Authored: Thu Feb 16 13:44:43 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu Feb 16 13:44:43 2017 -0500

----------------------------------------------------------------------
 .../internal/AlertTargetResourceProvider.java       | 11 +----------
 .../security/authorization/RoleAuthorization.java   |  1 +
 .../ambari/server/upgrade/UpgradeCatalog250.java    | 16 ++++++++++++++++
 .../src/main/resources/Ambari-DDL-Derby-CREATE.sql  |  3 +++
 .../src/main/resources/Ambari-DDL-MySQL-CREATE.sql  |  3 +++
 .../src/main/resources/Ambari-DDL-Oracle-CREATE.sql |  3 +++
 .../main/resources/Ambari-DDL-Postgres-CREATE.sql   |  3 +++
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql     |  3 +++
 .../main/resources/Ambari-DDL-SQLServer-CREATE.sql  |  3 +++
 .../server/upgrade/UpgradeCatalog250Test.java       |  5 +++++
 10 files changed, 41 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
index 4010528..0b7f1db 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProvider.java
@@ -48,7 +48,6 @@ import org.apache.ambari.server.notifications.TargetConfigurationResult;
 import org.apache.ambari.server.orm.dao.AlertDispatchDAO;
 import org.apache.ambari.server.orm.entities.AlertGroupEntity;
 import org.apache.ambari.server.orm.entities.AlertTargetEntity;
-import org.apache.ambari.server.security.authorization.ResourceType;
 import org.apache.ambari.server.security.authorization.RoleAuthorization;
 import org.apache.ambari.server.state.AlertState;
 import org.apache.ambari.server.state.alert.AlertGroup;
@@ -128,10 +127,7 @@ public class AlertTargetResourceProvider extends
   AlertTargetResourceProvider() {
     super(PROPERTY_IDS, KEY_PROPERTY_IDS);
 
-    // For now only allow an Ambari administrator to create, update, and manage Alert Targets.
-    // If an alert target can associated with a particular cluster, than a cluster administrator
-    // should be able to do this as well.
-    EnumSet<RoleAuthorization> requiredAuthorizations = EnumSet.of(RoleAuthorization.CLUSTER_MANAGE_ALERTS);
+    EnumSet<RoleAuthorization> requiredAuthorizations = EnumSet.of(RoleAuthorization.CLUSTER_MANAGE_ALERT_NOTIFICATIONS);
     setRequiredCreateAuthorizations(requiredAuthorizations);
     setRequiredUpdateAuthorizations(requiredAuthorizations);
     setRequiredDeleteAuthorizations(requiredAuthorizations);
@@ -252,11 +248,6 @@ public class AlertTargetResourceProvider extends
     return PK_PROPERTY_IDS;
   }
 
-  @Override
-  protected ResourceType getResourceType(Request request, Predicate predicate) {
-    return ResourceType.AMBARI;
-  }
-
   /**
    * Create and persist {@link AlertTargetEntity} from the map of properties.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
index 969772f..cd35c2c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/RoleAuthorization.java
@@ -54,6 +54,7 @@ public enum RoleAuthorization {
   CLUSTER_VIEW_STATUS_INFO("CLUSTER.VIEW_STATUS_INFO"),
   CLUSTER_RUN_CUSTOM_COMMAND("CLUSTER.RUN_CUSTOM_COMMAND"),
   CLUSTER_MANAGE_AUTO_START("CLUSTER.MANAGE_AUTO_START"),
+  CLUSTER_MANAGE_ALERT_NOTIFICATIONS("CLUSTER.MANAGE_ALERT_NOTIFICATIONS"),
   HOST_ADD_DELETE_COMPONENTS("HOST.ADD_DELETE_COMPONENTS"),
   HOST_ADD_DELETE_HOSTS("HOST.ADD_DELETE_HOSTS"),
   HOST_TOGGLE_MAINTENANCE("HOST.TOGGLE_MAINTENANCE"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index 5e929e3..bfab0fe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -178,6 +178,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
     updateYarnSite();
     updateRangerUrlConfigs();
     addManageServiceAutoStartPermissions();
+    addManageAlertNotificationsPermissions();
   }
 
   /**
@@ -1011,6 +1012,21 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
   }
 
   /**
+   * Add permissions for managing alert notifications configuration.
+   * <p>
+   * <ul>
+   * <li>CLUSTER.MANAGE_ALERT_NOTIFICATIONS permissions for AMBARI.ADMINISTRATOR, CLUSTER.ADMINISTRATOR</li>
+   * </ul>
+   */
+  protected void addManageAlertNotificationsPermissions() throws SQLException {
+    Collection<String> roles;
+    roles = Arrays.asList(
+        "AMBARI.ADMINISTRATOR:AMBARI",
+        "CLUSTER.ADMINISTRATOR:CLUSTER");
+    addRoleAuthorization("CLUSTER.MANAGE_ALERT_NOTIFICATIONS", "Manage alert notifications configuration", roles);
+  }
+
+  /**
    * Updates Ranger admin url for Ranger plugin supported configs.
    *
    * @throws AmbariException

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
index c7d7990..f007b53 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
@@ -1258,6 +1258,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
   SELECT 'CLUSTER.MANAGE_USER_PERSISTED_DATA', 'Manage cluster-level user persisted data' FROM SYSIBM.SYSDUMMY1 UNION ALL
   SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' FROM SYSIBM.SYSDUMMY1 UNION ALL
   SELECT 'CLUSTER.MANAGE_AUTO_START', 'Manage service auto-start configuration' FROM SYSIBM.SYSDUMMY1 UNION ALL
+  SELECT 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS', 'Manage alert notifications configuration' FROM SYSIBM.SYSDUMMY1 UNION ALL
   SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' FROM SYSIBM.SYSDUMMY1 UNION ALL
   SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' FROM SYSIBM.SYSDUMMY1 UNION ALL
   SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' FROM SYSIBM.SYSDUMMY1 UNION ALL
@@ -1413,6 +1414,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR'  UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
 
 -- Set authorizations for Administrator role
@@ -1456,6 +1458,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR'  UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR'  UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR'  UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR'  UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR'  UNION ALL
   SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR'  UNION ALL

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index de79328..f6cb896 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -1206,6 +1206,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
   SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
   SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
   SELECT 'CLUSTER.MANAGE_AUTO_START', 'Manage service auto-start configuration' UNION ALL
+  SELECT 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS', 'Manage alert notifications configuration' UNION ALL
   SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
   SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
   SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage administrative settings' UNION ALL
@@ -1363,6 +1364,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
 
 -- Set authorizations for Administrator role
@@ -1408,6 +1410,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 16c5864..19253e8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -1204,6 +1204,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
   SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' FROM dual UNION ALL
   SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' FROM dual UNION ALL
   SELECT 'CLUSTER.MANAGE_AUTO_START', 'Manage service auto-start configuration' FROM dual UNION ALL
+  SELECT 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS', 'Manage alert notifications configuration' FROM dual UNION ALL
   SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' FROM dual UNION ALL
   SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' FROM dual UNION ALL
   SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' FROM dual UNION ALL
@@ -1361,6 +1362,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
 
 -- Set authorizations for Administrator role
@@ -1406,6 +1408,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 91610bb..b13a9e3 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -1186,6 +1186,7 @@ INSERT INTO roleauthorization(authorization_id, authorization_name)
   SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
   SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
   SELECT 'CLUSTER.MANAGE_AUTO_START', 'Manage service auto-start configuration' UNION ALL
+  SELECT 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS', 'Manage alert notifications configuration' UNION ALL
   SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
   SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
   SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage administrative settings' UNION ALL
@@ -1343,6 +1344,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
 
 -- Set authorizations for Administrator role
@@ -1388,6 +1390,7 @@ INSERT INTO permission_roleauthorization(permission_id, authorization_id)
   SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+  SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
   SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index aebbcb0..cf2954a 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -1201,6 +1201,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
     SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
     SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
     SELECT 'CLUSTER.MANAGE_AUTO_START', 'Manage service auto-start configuration' UNION ALL
+    SELECT 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS', 'Manage alert notifications configuration' UNION ALL
     SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
     SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
     SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' UNION ALL
@@ -1358,6 +1359,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
     SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+    SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
 
   -- Set authorizations for Administrator role
@@ -1403,6 +1405,7 @@ insert into adminpermission(permission_id, permission_name, resource_type_id, pe
     SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+    SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.RUN_SERVICE_CHECK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index d3eaa6c..16c269a 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -1215,6 +1215,7 @@ BEGIN TRANSACTION
     SELECT 'CLUSTER.UPGRADE_DOWNGRADE_STACK', 'Upgrade/downgrade stack' UNION ALL
     SELECT 'CLUSTER.RUN_CUSTOM_COMMAND', 'Perform custom cluster-level actions' UNION ALL
     SELECT 'CLUSTER.MANAGE_AUTO_START', 'Manage service auto-start configuration' UNION ALL
+    SELECT 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS', 'Manage alert notifications configuration' UNION ALL
     SELECT 'AMBARI.ADD_DELETE_CLUSTERS', 'Create new clusters' UNION ALL
     SELECT 'AMBARI.RENAME_CLUSTER', 'Rename clusters' UNION ALL
     SELECT 'AMBARI.MANAGE_SETTINGS', 'Manage settings' UNION ALL
@@ -1372,6 +1373,7 @@ BEGIN TRANSACTION
     SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
+    SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='CLUSTER.ADMINISTRATOR';
 
   -- Set authorizations for Administrator role
@@ -1417,6 +1419,7 @@ BEGIN TRANSACTION
     SELECT permission_id, 'CLUSTER.UPGRADE_DOWNGRADE_STACK' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_USER_PERSISTED_DATA' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.MANAGE_AUTO_START' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
+    SELECT permission_id, 'CLUSTER.MANAGE_ALERT_NOTIFICATIONS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'CLUSTER.RUN_CUSTOM_COMMAND' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'AMBARI.ADD_DELETE_CLUSTERS' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL
     SELECT permission_id, 'AMBARI.RENAME_CLUSTER' FROM adminpermission WHERE permission_name='AMBARI.ADMINISTRATOR' UNION ALL

http://git-wip-us.apache.org/repos/asf/ambari/blob/bfaaba2f/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index f4212d6..a08b38b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -356,6 +356,7 @@ public class UpgradeCatalog250Test {
     Method updateHiveLlapConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateHiveLlapConfigs");
     Method updateHIVEInteractiveConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateHIVEInteractiveConfigs");
     Method addManageServiceAutoStartPermissions = UpgradeCatalog250.class.getDeclaredMethod("addManageServiceAutoStartPermissions");
+    Method addManageAlertNotificationsPermissions = UpgradeCatalog250.class.getDeclaredMethod("addManageAlertNotificationsPermissions");
     Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
     Method updateTablesForZeppelinViewRemoval = UpgradeCatalog250.class.getDeclaredMethod("updateTablesForZeppelinViewRemoval");
     Method updateZeppelinConfigs = UpgradeCatalog250.class.getDeclaredMethod("updateZeppelinConfigs");
@@ -374,6 +375,7 @@ public class UpgradeCatalog250Test {
         .addMockedMethod(updateHiveLlapConfigs)
         .addMockedMethod(addNewConfigurationsFromXml)
         .addMockedMethod(addManageServiceAutoStartPermissions)
+        .addMockedMethod(addManageAlertNotificationsPermissions)
         .addMockedMethod(updateHIVEInteractiveConfigs)
         .addMockedMethod(updateTablesForZeppelinViewRemoval)
         .addMockedMethod(updateZeppelinConfigs)
@@ -425,6 +427,9 @@ public class UpgradeCatalog250Test {
     upgradeCatalog250.addManageServiceAutoStartPermissions();
     expectLastCall().once();
 
+    upgradeCatalog250.addManageAlertNotificationsPermissions();
+    expectLastCall().once();
+
     upgradeCatalog250.updateYarnSite();
     expectLastCall().once();
 


[35/50] [abbrv] ambari git commit: AMBARI-20017. PixieDust - Decrease Service Check running time under 3 mins (dlysnichenko)

Posted by nc...@apache.org.
AMBARI-20017. PixieDust - Decrease Service Check running time under 3 mins (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/18fc2586
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/18fc2586
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/18fc2586

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 18fc2586fa7410823b3d7a9354cf5b10be0dc2e0
Parents: 347ba2a
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Thu Feb 16 23:17:09 2017 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu Feb 16 23:17:09 2017 +0200

----------------------------------------------------------------------
 .../controller/AmbariCustomCommandExecutionHelper.java   |  8 ++++++++
 .../org/apache/ambari/server/state/ConfigHelper.java     |  2 ++
 .../stacks/HDP/2.0.6/configuration/cluster-env.xml       | 11 +++++++++++
 3 files changed, 21 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/18fc2586/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index b601893..eeb2de5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -142,6 +142,8 @@ public class AmbariCustomCommandExecutionHelper {
 
   private final static String ALIGN_MAINTENANCE_STATE = "align_maintenance_state";
 
+  public final static int MIN_STRICT_SERVICE_CHECK_TIMEOUT = 120;
+  
   @Inject
   private ActionMetadata actionMetadata;
 
@@ -766,6 +768,12 @@ public class AmbariCustomCommandExecutionHelper {
     }
 
     commandParams.put(COMMAND_TIMEOUT, commandTimeout);
+    String checkType = configHelper.getValueFromDesiredConfigurations(cluster, ConfigHelper.CLUSTER_ENV, ConfigHelper.SERVICE_CHECK_TYPE);
+    if (ConfigHelper.SERVICE_CHECK_MINIMAL.equals(checkType)) {
+      int actualTimeout = Integer.parseInt(commandParams.get(COMMAND_TIMEOUT)) / 2;
+      actualTimeout = actualTimeout < MIN_STRICT_SERVICE_CHECK_TIMEOUT ? MIN_STRICT_SERVICE_CHECK_TIMEOUT : actualTimeout;
+      commandParams.put(COMMAND_TIMEOUT, Integer.toString(actualTimeout));
+    }
     commandParams.put(SERVICE_PACKAGE_FOLDER, serviceInfo.getServicePackageFolder());
     commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/18fc2586/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 0e5b434..6572bbb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -86,6 +86,7 @@ public class ConfigHelper {
   public static final String CLUSTER_ENV = "cluster-env";
   public static final String CLUSTER_ENV_ALERT_REPEAT_TOLERANCE = "alerts_repeat_tolerance";
   public static final String CLUSTER_ENV_RETRY_ENABLED = "command_retry_enabled";
+  public static final String SERVICE_CHECK_TYPE = "service_check_type";
   public static final String CLUSTER_ENV_RETRY_COMMANDS = "commands_to_retry";
   public static final String CLUSTER_ENV_RETRY_MAX_TIME_IN_SEC = "command_retry_max_time_in_sec";
   public static final String COMMAND_RETRY_MAX_TIME_IN_SEC_DEFAULT = "600";
@@ -94,6 +95,7 @@ public class ConfigHelper {
 
   public static final String HTTP_ONLY = "HTTP_ONLY";
   public static final String HTTPS_ONLY = "HTTPS_ONLY";
+  public static final String SERVICE_CHECK_MINIMAL = "minimal";
 
   /**
    * The tag given to newly created versions.

http://git-wip-us.apache.org/repos/asf/ambari/blob/18fc2586/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index 3af8f08..3f74aa7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -296,4 +296,15 @@ gpgcheck=0</value>
     <description>Flag to turn on when external setup of External Ranger is done.</description>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>service_check_type</name>
+    <display-name>Service Check Type</display-name>
+    <value>full</value>
+    <description>Indicates the complexity of the service check.  Valid values are 'minimal' or 'full'.</description>
+    <on-ambari-upgrade add="true"/>
+    <value-attributes>
+      <visible>true</visible>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+  </property>
 </configuration>


[31/50] [abbrv] ambari git commit: AMBARI-20049. One way SSL fallback logic can cause some agents to be connected with 2-way SSL (aonishuk)

Posted by nc...@apache.org.
AMBARI-20049. One way SSL fallback logic can cause some agents to be connected with 2-way SSL (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4eaec8ea
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4eaec8ea
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4eaec8ea

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4eaec8eab5175303d8ba39439ac60f3deec25f19
Parents: ab53946
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Feb 16 17:36:27 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Feb 16 17:36:27 2017 +0200

----------------------------------------------------------------------
 .../src/main/python/ambari_agent/security.py       | 17 +++++------------
 .../src/test/python/ambari_agent/TestSecurity.py   |  2 ++
 2 files changed, 7 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4eaec8ea/ambari-agent/src/main/python/ambari_agent/security.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/security.py b/ambari-agent/src/main/python/ambari_agent/security.py
index 72ae954..45de7bb 100644
--- a/ambari-agent/src/main/python/ambari_agent/security.py
+++ b/ambari-agent/src/main/python/ambari_agent/security.py
@@ -55,18 +55,11 @@ class VerifiedHTTPSConnection(httplib.HTTPSConnection):
         'Server require two-way SSL authentication. Use it instead of one-way...')
 
     if not self.two_way_ssl_required:
-      try:
-        sock = self.create_connection()
-        self.sock = ssl.wrap_socket(sock, cert_reqs=ssl.CERT_NONE)
-        logger.info('SSL connection established. Two-way SSL authentication is '
-                    'turned off on the server.')
-      except (ssl.SSLError, AttributeError):
-        self.two_way_ssl_required = True
-        logger.info(
-          'Insecure connection to https://' + self.host + ':' + self.port +
-          '/ failed. Reconnecting using two-way SSL authentication..')
-
-    if self.two_way_ssl_required:
+      sock = self.create_connection()
+      self.sock = ssl.wrap_socket(sock, cert_reqs=ssl.CERT_NONE)
+      logger.info('SSL connection established. Two-way SSL authentication is '
+                  'turned off on the server.')
+    else:
       self.certMan = CertificateManager(self.config, self.host)
       self.certMan.initSecurity()
       agent_key = self.certMan.getAgentKeyName()

http://git-wip-us.apache.org/repos/asf/ambari/blob/4eaec8ea/ambari-agent/src/test/python/ambari_agent/TestSecurity.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestSecurity.py b/ambari-agent/src/test/python/ambari_agent/TestSecurity.py
index 9e28ae7..c9a7fbe 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestSecurity.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestSecurity.py
@@ -102,6 +102,8 @@ class TestSecurity(unittest.TestCase):
     wrap_socket_mock.side_effect=ssl.SSLError()
     connection = security.VerifiedHTTPSConnection("example.com",
       self.config.get('server', 'secured_url_port'), self.config)
+    self.config.isTwoWaySSLConnection = MagicMock(return_value=True)
+
     connection._tunnel_host = False
     connection.sock = None
     try:


[49/50] [abbrv] ambari git commit: AMBARI-20066. 'Install' button is shown in Admin > Stack and Versions > Version page for the original stack version installed via Install Wizard (ncole)

Posted by nc...@apache.org.
AMBARI-20066. 'Install' button is shown in Admin > Stack and Versions > Version page for the original stack version installed via Install Wizard (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f2cb1b6e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f2cb1b6e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f2cb1b6e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: f2cb1b6ef495d0708024749dfe5b702d270952a2
Parents: a5dc2d2
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Feb 17 14:01:06 2017 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Feb 17 15:52:15 2017 -0500

----------------------------------------------------------------------
 .../server/state/cluster/ClusterImpl.java       | 43 +++++++++--
 .../server/state/cluster/ClusterTest.java       | 78 +++++++++++++-------
 2 files changed, 88 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f2cb1b6e/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index db4aa21..2d94f1a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -1313,7 +1313,7 @@ public class ClusterImpl implements Cluster {
    * @return Return the effective Cluster Version State
    */
   private RepositoryVersionState getEffectiveState(Map<RepositoryVersionState, Set<String>> stateToHosts) {
-    if (stateToHosts == null || stateToHosts.keySet().size() < 1) {
+    if (stateToHosts == null || stateToHosts.size() < 1) {
       return null;
     }
 
@@ -1350,18 +1350,51 @@ public class ClusterImpl implements Cluster {
     }
 
     if (totalNotRequired > 0) {
-      if (totalInstalled + totalNotRequired == totalHosts) {
-        return RepositoryVersionState.INSTALLED;
+
+      // !!! if all we have is NOT_REQUIRED and something else, the return should be the something else
+      if (2 == stateToHosts.size()) {
+
+        Map<RepositoryVersionState, Set<String>> map = Maps.filterKeys(stateToHosts,
+            new com.google.common.base.Predicate<RepositoryVersionState>() {
+              @Override
+              public boolean apply(RepositoryVersionState repoState) {
+                return repoState != RepositoryVersionState.NOT_REQUIRED;
+              }
+            });
+
+        // !!! better be true
+        if (1 == map.size()) {
+          return map.keySet().iterator().next();
+        } else {
+          LOG.warn("The mix of NON_REQUIRED hosts is unexpected: {}", stateToHosts);
+          return RepositoryVersionState.OUT_OF_SYNC;
+        }
       }
 
-      if (totalInstalling + totalInstalled + totalNotRequired == totalHosts) {
+      // if any hosts are still installing, then cluster is INSTALLING
+      if (totalInstalling > 0) {
         return RepositoryVersionState.INSTALLING;
       }
 
+      // if any hosts are install_failed, then cluster is INSTALL_FAILED
+      if (totalInstallFailed > 0) {
+        return RepositoryVersionState.INSTALL_FAILED;
+      }
+
+      // should be covered by the 2-state check above
+      if (totalInstalled > 0) {
+        return RepositoryVersionState.INSTALLED;
+      }
+
+      // rare
+      if (totalNotRequired == totalHosts) {
+        return RepositoryVersionState.NOT_REQUIRED;
+      }
+
     }
 
     // Also returns when have a mix of CURRENT and INSTALLING|INSTALLED
-    LOG.warn("have a mix of CURRENT and INSTALLING|INSTALLED host versions, " +
+    LOG.warn("Have a mix of CURRENT and INSTALLING|INSTALLED host versions, " +
       "returning OUT_OF_SYNC as cluster version. Host version states: {}", stateToHosts);
     return RepositoryVersionState.OUT_OF_SYNC;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f2cb1b6e/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
index 6cdfbad..396680a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/cluster/ClusterTest.java
@@ -221,6 +221,10 @@ public class ClusterTest {
   }
 
   private void createDefaultCluster() throws Exception {
+    createDefaultCluster(Sets.newHashSet("h1", "h2"));
+  }
+
+  private void createDefaultCluster(Set<String> hostNames) throws Exception {
     // TODO, use common function
     StackId stackId = new StackId("HDP", "0.1");
     StackEntity stackEntity = stackDAO.find(stackId.getStackName(), stackId.getStackVersion());
@@ -230,11 +234,10 @@ public class ClusterTest {
 
     clusters.addCluster(clusterName, stackId);
 
-    Map<String, String> hostAttributes = new HashMap<String, String>();
+    Map<String, String> hostAttributes = new HashMap<>();
     hostAttributes.put("os_family", "redhat");
     hostAttributes.put("os_release_version", "5.9");
 
-    Set<String> hostNames = new HashSet<String>() {{ add("h1"); add("h2"); }};
     for (String hostName : hostNames) {
       clusters.addHost(hostName);
 
@@ -279,7 +282,7 @@ public class ClusterTest {
     host2.setIpv4("192.168.0.2");
     host3.setIpv4("192.168.0.3");
 
-    List<HostEntity> hostEntities = new ArrayList<HostEntity>();
+    List<HostEntity> hostEntities = new ArrayList<>();
     hostEntities.add(host1);
     hostEntities.add(host2);
 
@@ -309,7 +312,7 @@ public class ClusterTest {
     when(stateEntity.getDesiredStack()).thenReturn(stackEntity);
 
     clusterServiceEntity.setServiceDesiredStateEntity(stateEntity);
-    List<ClusterServiceEntity> clusterServiceEntities = new ArrayList<ClusterServiceEntity>();
+    List<ClusterServiceEntity> clusterServiceEntities = new ArrayList<>();
     clusterServiceEntities.add(clusterServiceEntity);
     clusterEntity.setClusterServiceEntities(clusterServiceEntities);
     return clusterEntity;
@@ -444,8 +447,8 @@ public class ClusterTest {
     For some reason this still uses the metainfo.xml files for these services
     from HDP-2.0.5 stack instead of the provided Stack Id
     */
-    HashMap<String, Set<String>> componentsThatAdvertiseVersion = new HashMap<String, Set<String>>();
-    HashMap<String, Set<String>> componentsThatDontAdvertiseVersion = new HashMap<String, Set<String>>();
+    HashMap<String, Set<String>> componentsThatAdvertiseVersion = new HashMap<>();
+    HashMap<String, Set<String>> componentsThatDontAdvertiseVersion = new HashMap<>();
 
     Set<String> hdfsComponents = new HashSet<String>() {{ add("NAMENODE"); add("DATANODE"); add("HDFS_CLIENT"); }};
     Set<String> zkComponents = new HashSet<String>() {{ add("ZOOKEEPER_SERVER"); add("ZOOKEEPER_CLIENT"); }};
@@ -567,7 +570,7 @@ public class ClusterTest {
     hostInfo.setMemoryTotal(10);
     hostInfo.setMemorySize(100);
     hostInfo.setProcessorCount(10);
-    List<DiskInfo> mounts = new ArrayList<DiskInfo>();
+    List<DiskInfo> mounts = new ArrayList<>();
     mounts.add(new DiskInfo("/dev/sda", "/mnt/disk1",
         "5000000", "4000000", "10%", "size", "fstype"));
     hostInfo.setMounts(mounts);
@@ -839,7 +842,7 @@ public class ClusterTest {
     Assert.assertEquals(1, componentHostMap.get("JOBTRACKER").size());
     Assert.assertTrue(componentHostMap.get("JOBTRACKER").contains("h1"));
 
-    componentHostMap = c1.getServiceComponentHostMap(null, new HashSet<String>(Arrays.asList("HDFS", "MAPREDUCE")));
+    componentHostMap = c1.getServiceComponentHostMap(null, new HashSet<>(Arrays.asList("HDFS", "MAPREDUCE")));
     Assert.assertEquals(3, componentHostMap.size());
     Assert.assertEquals(1, componentHostMap.get("NAMENODE").size());
     Assert.assertTrue(componentHostMap.get("NAMENODE").contains("h1"));
@@ -896,7 +899,7 @@ public class ClusterTest {
     Assert.assertEquals(1, componentHostMap.get("DATANODE").size());
     Assert.assertTrue(componentHostMap.get("DATANODE").contains("h2"));
 
-    componentHostMap = c1.getServiceComponentHostMap(new HashSet<String>(Arrays.asList("h1", "h2", "h3")), null);
+    componentHostMap = c1.getServiceComponentHostMap(new HashSet<>(Arrays.asList("h1", "h2", "h3")), null);
     Assert.assertEquals(3, componentHostMap.size());
     Assert.assertEquals(1, componentHostMap.get("NAMENODE").size());
     Assert.assertTrue(componentHostMap.get("NAMENODE").contains("h1"));
@@ -959,10 +962,10 @@ public class ClusterTest {
   public void testGetAndSetConfigs() throws Exception {
     createDefaultCluster();
 
-    Map<String, Map<String, String>> c1PropAttributes = new HashMap<String, Map<String,String>>();
+    Map<String, Map<String, String>> c1PropAttributes = new HashMap<>();
     c1PropAttributes.put("final", new HashMap<String, String>());
     c1PropAttributes.get("final").put("a", "true");
-    Map<String, Map<String, String>> c2PropAttributes = new HashMap<String, Map<String,String>>();
+    Map<String, Map<String, String>> c2PropAttributes = new HashMap<>();
     c2PropAttributes.put("final", new HashMap<String, String>());
     c2PropAttributes.get("final").put("x", "true");
     Config config1 = configFactory.createNew(c1, "global", "version1",
@@ -1068,7 +1071,7 @@ public class ClusterTest {
     host.setIPv4("ipv4");
     host.setIPv6("ipv6");
 
-    Map<String, String> hostAttributes = new HashMap<String, String>();
+    Map<String, String> hostAttributes = new HashMap<>();
     hostAttributes.put("os_family", "redhat");
     hostAttributes.put("os_release_version", "5.9");
     host.setHostAttributes(hostAttributes);
@@ -1129,7 +1132,7 @@ public class ClusterTest {
     Config config2 = configFactory.createNew(c1, "core-site", "version2",
       new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
 
-    Set<Config> configs = new HashSet<Config>();
+    Set<Config> configs = new HashSet<>();
     configs.add(config1);
     configs.add(config2);
 
@@ -1190,7 +1193,7 @@ public class ClusterTest {
     Host host1 = clusters.getHost("h1");
     HostEntity hostEntity1 = hostDAO.findByName("h1");
 
-    Map<String, Map<String, String>> propAttributes = new HashMap<String, Map<String,String>>();
+    Map<String, Map<String, String>> propAttributes = new HashMap<>();
     propAttributes.put("final", new HashMap<String, String>());
     propAttributes.get("final").put("test", "true");
     Config config = configFactory.createNew(c1, "hdfs-site", "1", new HashMap<String, String>(){{
@@ -1204,7 +1207,7 @@ public class ClusterTest {
     assertTrue(configs.containsKey(hostEntity1.getHostId()));
     assertEquals(1, configs.get(hostEntity1.getHostId()).size());
 
-    List<Long> hostIds = new ArrayList<Long>();
+    List<Long> hostIds = new ArrayList<>();
     hostIds.add(hostEntity1.getHostId());
 
     configs = c1.getHostsDesiredConfigs(hostIds);
@@ -1294,7 +1297,7 @@ public class ClusterTest {
     Config config2 = configFactory.createNew(c1, "core-site", "version2",
       new HashMap<String, String>() {{ put("x", "y"); }}, new HashMap<String, Map<String,String>>());
 
-    Set<Config> configs = new HashSet<Config>();
+    Set<Config> configs = new HashSet<>();
     configs.add(config1);
     configs.add(config2);
 
@@ -1853,7 +1856,7 @@ public class ClusterTest {
 
   @Test
   public void testRecalculateClusterVersionStateWithNotRequired() throws Exception {
-    createDefaultCluster();
+    createDefaultCluster(Sets.newHashSet("h1", "h2", "h3"));
 
     Host h1 = clusters.getHost("h1");
     h1.setState(HostState.HEALTHY);
@@ -1861,6 +1864,9 @@ public class ClusterTest {
     Host h2 = clusters.getHost("h2");
     h2.setState(HostState.HEALTHY);
 
+    Host h3 = clusters.getHost("h3");
+    h3.setState(HostState.HEALTHY);
+
     // Phase 1: Install bits during distribution
     StackId stackId = new StackId("HDP-0.1");
     final String stackVersion = "0.1-1000";
@@ -1872,13 +1878,29 @@ public class ClusterTest {
         RepositoryVersionState.INSTALLING);
     c1.setCurrentStackVersion(stackId);
 
-    HostVersionEntity hv1 = helper.createHostVersion("h1", repositoryVersionEntity, RepositoryVersionState.INSTALLED);
-    HostVersionEntity hv2 = helper.createHostVersion("h2", repositoryVersionEntity, RepositoryVersionState.NOT_REQUIRED);
+    HostVersionEntity hv1 = helper.createHostVersion("h1", repositoryVersionEntity, RepositoryVersionState.NOT_REQUIRED);
+    HostVersionEntity hv2 = helper.createHostVersion("h2", repositoryVersionEntity, RepositoryVersionState.INSTALLING);
+    HostVersionEntity hv3 = helper.createHostVersion("h3", repositoryVersionEntity, RepositoryVersionState.INSTALLED);
 
     c1.recalculateClusterVersionState(repositoryVersionEntity);
-    //Should remain in its current state
-    checkStackVersionState(stackId, stackVersion,
-        RepositoryVersionState.INSTALLED);
+    ClusterVersionEntity cv = clusterVersionDAO.findByClusterAndStackAndVersion(c1.getClusterName(), stackId, stackVersion);
+    assertEquals(RepositoryVersionState.INSTALLING, cv.getState());
+
+    // 1 in NOT_REQUIRED, 1 in INSTALLED, 1 in CURRENT so should be INSTALLED
+    hv2.setState(RepositoryVersionState.CURRENT);
+    hostVersionDAO.merge(hv2);
+
+    c1.recalculateClusterVersionState(repositoryVersionEntity);
+    cv = clusterVersionDAO.findByClusterAndStackAndVersion(c1.getClusterName(), stackId, stackVersion);
+    assertEquals(RepositoryVersionState.INSTALLED, cv.getState());
+
+    // 1 in NOT_REQUIRED, and 2 in CURRENT, so cluster version should be CURRENT
+    hv3.setState(RepositoryVersionState.CURRENT);
+    hostVersionDAO.merge(hv3);
+
+    c1.recalculateClusterVersionState(repositoryVersionEntity);
+    cv = clusterVersionDAO.findByClusterAndStackAndVersion(c1.getClusterName(), stackId, stackVersion);
+    assertEquals(RepositoryVersionState.CURRENT, cv.getState());
   }
 
 
@@ -1949,7 +1971,7 @@ public class ClusterTest {
 
     RepositoryVersionEntity rv1 = helper.getOrCreateRepositoryVersion(stackId, v1);
 
-    Map<String, String> hostAttributes = new HashMap<String, String>();
+    Map<String, String> hostAttributes = new HashMap<>();
     hostAttributes.put("os_family", "redhat");
     hostAttributes.put("os_release_version", "6.4");
 
@@ -2106,7 +2128,7 @@ public class ClusterTest {
 
     RepositoryVersionEntity rv1 = helper.getOrCreateRepositoryVersion(stackId, v1);
 
-    Map<String, String> hostAttributes = new HashMap<String, String>();
+    Map<String, String> hostAttributes = new HashMap<>();
     hostAttributes.put("os_family", "redhat");
     hostAttributes.put("os_release_version", "6.4");
 
@@ -2180,7 +2202,7 @@ public class ClusterTest {
       h.setIPv4("ipv4");
       h.setIPv6("ipv6");
 
-      Map<String, String> hostAttributes = new HashMap<String, String>();
+      Map<String, String> hostAttributes = new HashMap<>();
       hostAttributes.put("os_family", "redhat");
       hostAttributes.put("os_release_version", "5.9");
       h.setHostAttributes(hostAttributes);
@@ -2249,7 +2271,7 @@ public class ClusterTest {
       h.setIPv4("ipv4");
       h.setIPv6("ipv6");
 
-      Map<String, String> hostAttributes = new HashMap<String, String>();
+      Map<String, String> hostAttributes = new HashMap<>();
       hostAttributes.put("os_family", "redhat");
       hostAttributes.put("os_release_version", "5.9");
       h.setHostAttributes(hostAttributes);
@@ -2582,8 +2604,8 @@ public class ClusterTest {
     // make sure the stacks are different
     Assert.assertFalse(stackId.equals(newStackId));
 
-    Map<String, String> properties = new HashMap<String, String>();
-    Map<String, Map<String, String>> propertiesAttributes = new HashMap<String, Map<String, String>>();
+    Map<String, String> properties = new HashMap<>();
+    Map<String, Map<String, String>> propertiesAttributes = new HashMap<>();
 
     // foo-type for v1 on current stack
     properties.put("foo-property-1", "foo-value-1");


[48/50] [abbrv] ambari git commit: AMBARI-20065 - Livy server start failed after downgrade with missing livy.server.kerberos.keytab error (jonathanhurley)

Posted by nc...@apache.org.
AMBARI-20065 - Livy server start failed after downgrade with missing livy.server.kerberos.keytab error (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a5dc2d2c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a5dc2d2c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a5dc2d2c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a5dc2d2cc98eb5b2f59cc7bc13b947e84c15479b
Parents: 8dabd55
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Feb 17 10:40:04 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Feb 17 15:18:27 2017 -0500

----------------------------------------------------------------------
 .../stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml      | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a5dc2d2c/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 4fb68ed..bb9ec1b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -61,10 +61,6 @@
           <function>prepare</function>
         </task>
       </execute-stage>
-
-      <execute-stage service="SPARK" component="LIVY_SERVER" title="Apply config changes for Livy Server">
-        <task xsi:type="configure" id="hdp_2_5_0_0_rename_spark_livy_configs"/>
-      </execute-stage>
     </group>
 
     <group xsi:type="stop" name="STOP_HIGH_LEVEL_SERVICE_COMPONENTS" title="Stop Components for High-Level Services">
@@ -497,6 +493,11 @@
           <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
         </task>
       </execute-stage>
+      
+      <!-- SPARK -->
+      <execute-stage service="SPARK" component="LIVY_SERVER" title="Apply config changes for Livy Server">
+        <task xsi:type="configure" id="hdp_2_5_0_0_rename_spark_livy_configs"/>
+      </execute-stage>      
     </group>
 
     <!--


[37/50] [abbrv] ambari git commit: AMBARI-20059 Storm config change results in Consistency check failed (dbuzhor)

Posted by nc...@apache.org.
AMBARI-20059 Storm config change results in Consistency check failed (dbuzhor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/984b35e9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/984b35e9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/984b35e9

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 984b35e9c1f3121f6688dc797e93b710db69d6b5
Parents: ce404d6
Author: Denys Buzhor <bd...@hortonworks.com>
Authored: Fri Feb 17 00:48:26 2017 +0200
Committer: Denys Buzhor <bd...@hortonworks.com>
Committed: Fri Feb 17 10:20:25 2017 +0200

----------------------------------------------------------------------
 .../controllers/main/service/info/configs.js    | 35 +++++++++++++--
 .../app/mixins/common/track_request_mixin.js    |  5 +++
 .../main/service/info/config_test.js            | 45 ++++++++++++++++++++
 3 files changed, 81 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/984b35e9/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index d95a2d2..786c6f9 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -278,6 +278,36 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.AddSecurityConfi
   isInit: true,
 
   /**
+   * Returns dependencies at all levels for service including dependencies for its childs, children dependencies
+   * and so on.
+   *
+   * @param  {String} serviceName name of services to get dependencies
+   * @returns {String[]}
+   */
+  getServicesDependencies: function(serviceName) {
+    var dependencies = Em.getWithDefault(App.StackService.find(serviceName), 'dependentServiceNames', []);
+    var loop = function(dependentServices, allDependencies) {
+      return dependentServices.reduce(function(all, name) {
+        var service = App.StackService.find(name);
+        if (!service) {
+          return all;
+        }
+        var serviceDependencies = service.get('dependentServiceNames');
+        if (!serviceDependencies.length) {
+          return all.concat(name);
+        }
+        var missed = _.intersection(_.difference(serviceDependencies, all), serviceDependencies);
+        if (missed.length) {
+          return loop(missed, all.concat(missed));
+        }
+        return all;
+      }, allDependencies || dependentServices);
+    };
+
+    return loop(dependencies).uniq().without(serviceName).toArray();
+  },
+
+  /**
    * On load function
    * @method loadStep
    */
@@ -285,10 +315,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.AddSecurityConfi
     var serviceName = this.get('content.serviceName'), self = this;
     App.router.get('mainController').stopPolling();
     this.clearStep();
-    this.set('dependentServiceNames', (App.StackService.find(serviceName).get('dependentServiceNames') || []).reduce(function(acc, i) {
-      acc.push(i);
-      return Array.prototype.concat.apply(acc, App.StackService.find(i).get('dependentServiceNames').toArray()).without(serviceName).uniq();
-    }, []));
+    this.set('dependentServiceNames', this.getServicesDependencies(serviceName));
     this.trackRequestChain(this.loadConfigTheme(serviceName).always(function () {
       if (self.get('preSelectedConfigVersion')) {
         self.loadPreSelectedConfigVersion();

http://git-wip-us.apache.org/repos/asf/ambari/blob/984b35e9/ambari-web/app/mixins/common/track_request_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/track_request_mixin.js b/ambari-web/app/mixins/common/track_request_mixin.js
index 4efcecb..c665253 100644
--- a/ambari-web/app/mixins/common/track_request_mixin.js
+++ b/ambari-web/app/mixins/common/track_request_mixin.js
@@ -22,6 +22,11 @@ App.TrackRequestMixin = Em.Mixin.create({
 
   requestsInProgress: [],
 
+  init: function() {
+    this.set('requestsInProgress', []);
+    this._super([].slice.call(arguments));
+  },
+
   /**
    * register request to view to track his progress
    * @param {$.ajax} request

http://git-wip-us.apache.org/repos/asf/ambari/blob/984b35e9/ambari-web/test/controllers/main/service/info/config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/info/config_test.js b/ambari-web/test/controllers/main/service/info/config_test.js
index 4cc2ef8..9d925be 100644
--- a/ambari-web/test/controllers/main/service/info/config_test.js
+++ b/ambari-web/test/controllers/main/service/info/config_test.js
@@ -1052,4 +1052,49 @@ describe("App.MainServiceInfoConfigsController", function () {
 
   });
 
+  describe('#getServicesDependencies', function() {
+    var createService = function(serviceName, dependencies) {
+      return Em.Object.create({
+        serviceName: serviceName,
+        dependentServiceNames: dependencies || []
+      });
+    };
+    var stackServices = [
+      createService('STORM', ['RANGER', 'ATLAS', 'ZOOKEEPER']),
+      createService('RANGER', ['HIVE', 'HDFS']),
+      createService('HIVE', ['YARN']),
+      createService('ZOOKEEPER', ['HDFS']),
+      createService('ATLAS'),
+      createService('HDFS', ['ZOOKEEPER']),
+      createService('YARN', ['HIVE'])
+    ];
+    beforeEach(function() {
+      sinon.stub(App.StackService, 'find', function(serviceName) {
+        return stackServices.findProperty('serviceName', serviceName);
+      });
+    });
+    afterEach(function() {
+      App.StackService.find.restore();
+    });
+
+    it('should returns all service dependencies STORM service', function() {
+      var result = mainServiceInfoConfigsController.getServicesDependencies('STORM');
+      expect(result).to.be.eql(['RANGER', 'ATLAS', 'ZOOKEEPER', 'HIVE', 'HDFS', 'YARN']);
+    });
+
+    it('should returns all service dependencies for ATLAS', function() {
+      var result = mainServiceInfoConfigsController.getServicesDependencies('ATLAS');
+      expect(result).to.be.eql([]);
+    });
+
+    it('should returns all service dependencies for RANGER', function() {
+      var result = mainServiceInfoConfigsController.getServicesDependencies('RANGER');
+      expect(result).to.be.eql(['HIVE', 'HDFS', 'YARN', 'ZOOKEEPER']);
+    });
+
+    it('should returns all service dependencies for YARN', function() {
+      var result = mainServiceInfoConfigsController.getServicesDependencies('YARN');
+      expect(result).to.be.eql(['HIVE']);
+    });
+  });
 });


[08/50] [abbrv] ambari git commit: AMBARI-20016 Hosts page moving through pages gets progressively slower to the point of being unusable. (atkach)

Posted by nc...@apache.org.
AMBARI-20016 Hosts page moving through pages gets progressively slower to the point of being unusable. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a1f23ad4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a1f23ad4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a1f23ad4

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: a1f23ad4284cff716b890392c4c22a16229e16ad
Parents: 141e88d
Author: Andrii Tkach <at...@apache.org>
Authored: Wed Feb 15 16:35:38 2017 +0200
Committer: Andrii Tkach <at...@apache.org>
Committed: Wed Feb 15 16:35:38 2017 +0200

----------------------------------------------------------------------
 ambari-web/app/controllers/main.js              |  5 +++
 .../controllers/main/service/add_controller.js  |  2 +-
 .../service/manage_config_groups_controller.js  | 14 +++----
 .../app/controllers/wizard/step7_controller.js  |  6 +--
 .../mappers/alert_definition_summary_mapper.js  |  4 --
 .../app/mappers/alert_definitions_mapper.js     | 15 ++++----
 ambari-web/app/mappers/alert_groups_mapper.js   |  3 +-
 .../app/mappers/alert_instances_mapper.js       |  2 +-
 .../app/mappers/alert_notification_mapper.js    |  2 +-
 ambari-web/app/mappers/cluster_mapper.js        |  2 +-
 .../app/mappers/components_state_mapper.js      |  6 +--
 .../app/mappers/configs/config_groups_mapper.js |  4 +-
 .../configs/service_config_version_mapper.js    |  3 +-
 ambari-web/app/mappers/configs/themes_mapper.js | 25 ++++--------
 ambari-web/app/mappers/hosts_mapper.js          | 10 ++---
 ambari-web/app/mappers/quicklinks_mapper.js     |  3 +-
 .../app/mappers/repository_version_mapper.js    |  9 ++---
 ambari-web/app/mappers/root_service_mapper.js   |  5 +--
 ambari-web/app/mappers/server_data_mapper.js    | 40 +++++++++++++++++++-
 ambari-web/app/mappers/service_mapper.js        |  3 +-
 .../app/mappers/service_metrics_mapper.js       | 20 +++++-----
 ambari-web/app/mappers/stack_mapper.js          |  9 ++---
 ambari-web/app/mappers/stack_service_mapper.js  |  6 +--
 .../app/mappers/stack_upgrade_history_mapper.js |  3 +-
 ambari-web/app/mappers/stack_version_mapper.js  |  3 +-
 ambari-web/app/mappers/target_cluster_mapper.js |  2 +-
 ambari-web/app/mappers/users_mapper.js          |  2 +-
 ambari-web/app/mappers/widget_mapper.js         |  3 +-
 .../main/service/configs/config_overridable.js  |  8 ++--
 .../app/mixins/main/service/groups_mapping.js   |  1 -
 ambari-web/app/models/user.js                   |  2 +-
 ambari-web/app/utils/http_client.js             |  5 ---
 ambari-web/test/controllers/main/admin_test.js  |  8 ++--
 .../test/controllers/main/host/details_test.js  |  4 +-
 .../main/service/add_controller_test.js         |  4 +-
 .../test/controllers/main/service/item_test.js  |  2 +-
 .../test/mappers/alert_groups_mapper_test.js    |  4 +-
 .../service/configs/config_overridable_test.js  | 12 +++---
 ambari-web/test/models/host_component_test.js   |  2 +-
 .../test/models/host_stack_version_test.js      | 12 +++---
 ambari-web/test/models/rack_test.js             |  4 +-
 ambari-web/test/models/stack_service_test.js    |  2 +-
 ambari-web/test/utils/http_client_test.js       |  9 -----
 43 files changed, 142 insertions(+), 148 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/controllers/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main.js b/ambari-web/app/controllers/main.js
index 87a483e..798115f 100644
--- a/ambari-web/app/controllers/main.js
+++ b/ambari-web/app/controllers/main.js
@@ -46,6 +46,11 @@ App.MainController = Em.Controller.extend({
    * run all processes and cluster's data loading
    */
   initialize: function(){
+    // Since we use only defaultTransaction, we can stub <code>removeCleanRecords</code> method,
+    // because it would remove from and add records to the same (default) transaction
+    App.store.defaultTransaction.reopen({
+      removeCleanRecords: Em.K
+    });
     App.router.get('clusterController').loadClusterData();
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/controllers/main/service/add_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/add_controller.js b/ambari-web/app/controllers/main/service/add_controller.js
index f312a5a..0989f76 100644
--- a/ambari-web/app/controllers/main/service/add_controller.js
+++ b/ambari-web/app/controllers/main/service/add_controller.js
@@ -198,7 +198,7 @@ App.AddServiceController = App.WizardController.extend(App.AddSecurityConfigs, {
       }, this);
       this.setDBProperty('services', services);
     }
-    App.store.commit();
+    App.store.fastCommit();
     this.set('serviceToInstall', null);
     this.set('content.services', stackServices);
     var self = this;

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/controllers/main/service/manage_config_groups_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/manage_config_groups_controller.js b/ambari-web/app/controllers/main/service/manage_config_groups_controller.js
index 4cf22d4..0df5dc5 100644
--- a/ambari-web/app/controllers/main/service/manage_config_groups_controller.js
+++ b/ambari-web/app/controllers/main/service/manage_config_groups_controller.js
@@ -679,7 +679,7 @@ App.ManageConfigGroupsController = Em.Controller.extend(App.ConfigOverridable, {
           name: this.get('configGroupName'),
           description: this.get('configGroupDesc')
         });
-        App.store.commit();
+        App.store.fastCommit();
         this.hide();
       }
     });
@@ -745,7 +745,7 @@ App.ManageConfigGroupsController = Em.Controller.extend(App.ConfigOverridable, {
           });
         }
 
-        App.store.load(App.ServiceConfigGroup, {
+        App.store.safeLoad(App.ServiceConfigGroup, {
           id: newGroupId,
           name: groupName,
           description: this.get('configGroupDesc'),
@@ -758,11 +758,11 @@ App.ManageConfigGroupsController = Em.Controller.extend(App.ConfigOverridable, {
           properties: duplicated ? properties : [],
           is_temporary: true
         });
-        App.store.commit();
+        App.store.fastCommit();
         var childConfigGroups = defaultConfigGroup.get('childConfigGroups').mapProperty('id');
         childConfigGroups.push(newGroupId);
-        App.store.load(App.ServiceConfigGroup, App.configGroupsMapper.generateDefaultGroup(self.get('serviceName'), defaultConfigGroup.get('hosts'), childConfigGroups));
-        App.store.commit();
+        App.store.safeLoad(App.ServiceConfigGroup, App.configGroupsMapper.generateDefaultGroup(self.get('serviceName'), defaultConfigGroup.get('hosts'), childConfigGroups));
+        App.store.fastCommit();
         self.get('configGroups').pushObject(App.ServiceConfigGroup.find(newGroupId));
         this.hide();
       }
@@ -853,9 +853,7 @@ App.ManageConfigGroupsController = Em.Controller.extend(App.ConfigOverridable, {
       resetGroupChanges: function (originalGroups) {
         if (this.get('subViewController.isHostsModified')) {
           App.ServiceConfigGroup.find().clear();
-          App.store.commit();
-          App.store.loadMany(App.ServiceConfigGroup, originalGroups);
-          App.store.commit();
+          App.store.safeLoadMany(App.ServiceConfigGroup, originalGroups);
         }
       },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index 1d24ee3..0207e6b 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -670,7 +670,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
         console.time('loadConfigGroups execution time: ');
         this.loadConfigGroups(this.get('allSelectedServiceNames')).done(this.loadOverrides.bind(this));
       } else {
-        App.store.commit();
+        App.store.fastCommit();
         App.configGroupsMapper.map(null, false, this.get('allSelectedServiceNames'));
         this.onLoadOverrides();
       }
@@ -1132,9 +1132,7 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
       if (service.get('serviceName') === 'MISC') return;
       var serviceRawGroups = serviceConfigGroups.filterProperty('service_name', service.serviceName);
       if (serviceRawGroups.length) {
-        App.store.commit();
-        App.store.loadMany(App.ServiceConfigGroup, serviceRawGroups);
-        App.store.commit();
+        App.store.safeLoadMany(App.ServiceConfigGroup, serviceRawGroups);
         serviceRawGroups.forEach(function(item){
           var modelGroup = App.ServiceConfigGroup.find(item.id);
           modelGroup.set('properties', []);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/alert_definition_summary_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_definition_summary_mapper.js b/ambari-web/app/mappers/alert_definition_summary_mapper.js
index 8b59c7c..29f286e 100644
--- a/ambari-web/app/mappers/alert_definition_summary_mapper.js
+++ b/ambari-web/app/mappers/alert_definition_summary_mapper.js
@@ -114,10 +114,6 @@ App.alertDefinitionSummaryMapper = App.QuickDataMapper.create({
         });
       }
     });
-    if (!$.mocho) {
-      //for some reasons this causing error in unit test
-      App.store.commit();
-    }
     console.timeEnd('App.alertDefinitionSummaryMapper execution time');
 
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/alert_definitions_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_definitions_mapper.js b/ambari-web/app/mappers/alert_definitions_mapper.js
index c400b89..ca0ada6 100644
--- a/ambari-web/app/mappers/alert_definitions_mapper.js
+++ b/ambari-web/app/mappers/alert_definitions_mapper.js
@@ -228,16 +228,15 @@ App.alertDefinitionsMapper = App.QuickDataMapper.create({
       });
 
       // load all mapped data to model
-      App.store.loadMany(this.get('reportModel'), alertReportDefinitions);
-      App.store.loadMany(this.get('parameterModel'), parameters);
-      App.store.loadMany(this.get('metricsSourceModel'), alertMetricsSourceDefinitions);
+      App.store.safeLoadMany(this.get('reportModel'), alertReportDefinitions);
+      App.store.safeLoadMany(this.get('parameterModel'), parameters);
+      App.store.safeLoadMany(this.get('metricsSourceModel'), alertMetricsSourceDefinitions);
       this.setMetricsSourcePropertyLists(this.get('metricsSourceModel'), alertMetricsSourceDefinitions);
-      App.store.loadMany(this.get('metricsUriModel'), alertMetricsUriDefinitions);
-      App.store.loadMany(this.get('metricsAmsModel'), alertMetricsAmsDefinitions);
-      // this loadMany takes too much time
-      App.store.loadMany(this.get('model'), alertDefinitions);
+      App.store.safeLoadMany(this.get('metricsUriModel'), alertMetricsUriDefinitions);
+      App.store.safeLoadMany(this.get('metricsAmsModel'), alertMetricsAmsDefinitions);
+      // this safeLoadMany takes too much time
+      App.store.safeLoadMany(this.get('model'), alertDefinitions);
       this.setAlertDefinitionsRawSourceData(rawSourceData);
-      App.store.commit();
     }
     console.timeEnd('App.alertDefinitionsMapper execution time');
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/alert_groups_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_groups_mapper.js b/ambari-web/app/mappers/alert_groups_mapper.js
index c079c21..1fd38f5 100644
--- a/ambari-web/app/mappers/alert_groups_mapper.js
+++ b/ambari-web/app/mappers/alert_groups_mapper.js
@@ -109,8 +109,7 @@ App.alertGroupsMapper = App.QuickDataMapper.create({
       App.cache['previousAlertGroupsFullMap'] = groupsMap;
       App.cache['alertNotificationsGroupsMap'] = alertNotificationsGroupsMap;
       // initial load takes much more time than others, but it's OK (all data should be saved first time)
-      App.store.loadMany(this.get('model'), alertGroups);
-      App.store.commit();
+      App.store.safeLoadMany(this.get('model'), alertGroups);
       console.timeEnd('App.alertGroupsMapper execution time');
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/alert_instances_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_instances_mapper.js b/ambari-web/app/mappers/alert_instances_mapper.js
index 269941b..f547642 100644
--- a/ambari-web/app/mappers/alert_instances_mapper.js
+++ b/ambari-web/app/mappers/alert_instances_mapper.js
@@ -71,7 +71,7 @@ App.alertInstanceMapper = App.QuickDataMapper.create({
         model.find().clear();
       }
 
-      App.store.loadMany(model, alertInstances);
+      App.store.safeLoadMany(model, alertInstances);
       console.timeEnd('App.alertInstanceMapper execution time');
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/alert_notification_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/alert_notification_mapper.js b/ambari-web/app/mappers/alert_notification_mapper.js
index 9567c77..8c7f50f 100644
--- a/ambari-web/app/mappers/alert_notification_mapper.js
+++ b/ambari-web/app/mappers/alert_notification_mapper.js
@@ -61,7 +61,7 @@ App.alertNotificationMapper = App.QuickDataMapper.create({
         notificationsAlertStates[item.AlertTarget.id] = item.AlertTarget.alert_states;
       }, this);
 
-      App.store.loadMany(this.get('model'), result);
+      App.store.safeLoadMany(this.get('model'), result);
       App.cache['previousAlertNotificationsFullMap'] = notifications;
       this._setPropertiesToEachModel('properties', notificationsProperties);
       this._setPropertiesToEachModel('alertStates', notificationsAlertStates);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/cluster_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/cluster_mapper.js b/ambari-web/app/mappers/cluster_mapper.js
index 5689867..d1aa63b 100644
--- a/ambari-web/app/mappers/cluster_mapper.js
+++ b/ambari-web/app/mappers/cluster_mapper.js
@@ -24,7 +24,7 @@ App.clusterMapper = App.QuickDataMapper.create({
       if(json){
         var result = json;
         result = this.parseIt(result, this.config);
-        App.store.load(this.get('model'), result);
+        App.store.safeLoad(this.get('model'), result);
         var cluster = App.Cluster.find(result.id);
         var clusterDesiredConfigs = [];
         // Create desired_configs_array

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/components_state_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/components_state_mapper.js b/ambari-web/app/mappers/components_state_mapper.js
index 89d5af2..a7e6d60 100644
--- a/ambari-web/app/mappers/components_state_mapper.js
+++ b/ambari-web/app/mappers/components_state_mapper.js
@@ -214,9 +214,9 @@ App.componentsStateMapper = App.QuickDataMapper.create({
         }
       }, this);
     }
-    App.store.loadMany(this.clientModel, clients);
-    App.store.loadMany(this.slaveModel, slaves);
-    App.store.loadMany(this.masterModel, masters);
+    App.store.safeLoadMany(this.clientModel, clients);
+    App.store.safeLoadMany(this.slaveModel, slaves);
+    App.store.safeLoadMany(this.masterModel, masters);
 
     if (hasNewComponents) {
       App.get('router.clusterController').triggerQuickLinksUpdate();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/configs/config_groups_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/configs/config_groups_mapper.js b/ambari-web/app/mappers/configs/config_groups_mapper.js
index fe3d7a1..e735d84 100644
--- a/ambari-web/app/mappers/configs/config_groups_mapper.js
+++ b/ambari-web/app/mappers/configs/config_groups_mapper.js
@@ -108,9 +108,7 @@ App.configGroupsMapper = App.QuickDataMapper.create({
       configGroups.sort(function (configGroupA, configGroupB) {
         return configGroupA.is_default || (configGroupA.name > configGroupB.name);
       });
-      App.store.commit();
-      App.store.loadMany(this.get('model'), configGroups);
-      App.store.commit();
+      App.store.safeLoadMany(this.get('model'), configGroups);
     }
     console.timeEnd('App.configGroupsMapper');
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/configs/service_config_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/configs/service_config_version_mapper.js b/ambari-web/app/mappers/configs/service_config_version_mapper.js
index 68bf235..e95c925 100644
--- a/ambari-web/app/mappers/configs/service_config_version_mapper.js
+++ b/ambari-web/app/mappers/configs/service_config_version_mapper.js
@@ -98,8 +98,7 @@ App.serviceConfigVersionsMapper = App.QuickDataMapper.create({
       if (App.router.get('currentState.name') === 'configHistory') {
         this.get('model').find().clear();
       }
-      App.store.commit();
-      App.store.loadMany(this.get('model'), result);
+      App.store.safeLoadMany(this.get('model'), result);
       console.timeEnd('App.serviceConfigVersionsMapper');
     }
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/configs/themes_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/configs/themes_mapper.js b/ambari-web/app/mappers/configs/themes_mapper.js
index 8ad51bd..1e98b34 100644
--- a/ambari-web/app/mappers/configs/themes_mapper.js
+++ b/ambari-web/app/mappers/configs/themes_mapper.js
@@ -85,8 +85,7 @@ App.themesMapper = App.QuickDataMapper.create({
       this.mapThemeWidgets(item);
     }, this);
 
-    App.store.commit();
-    App.store.loadMany(this.get("tabModel"), tabs);
+    App.store.safeLoadMany(this.get("tabModel"), tabs);
     this.generateAdvancedTabs(serviceNames);
     console.timeEnd('App.themesMapper execution time');
   },
@@ -136,9 +135,7 @@ App.themesMapper = App.QuickDataMapper.create({
                       var type = 'subsectionTab';
                       this.mapThemeConditions(subSectionTabConditions, type);
                     }
-                    App.store.commit();
-                    App.store.loadMany(this.get("subSectionTabModel"), subSectionTabs);
-                    App.store.commit();
+                    App.store.safeLoadMany(this.get("subSectionTabModel"), subSectionTabs);
                     parsedSubSection.sub_section_tabs = subSectionTabs.mapProperty("id");
                   }
                   if (parsedSubSection['depends_on']) {
@@ -150,18 +147,14 @@ App.themesMapper = App.QuickDataMapper.create({
                   var type = 'subsection';
                   this.mapThemeConditions(subSectionConditions, type);
                 }
-                App.store.commit();
-                App.store.loadMany(this.get("subSectionModel"), subSections);
-                App.store.commit();
+                App.store.safeLoadMany(this.get("subSectionModel"), subSections);
                 parsedSection.sub_sections = subSections.mapProperty("id");
               }
 
               sections.push(parsedSection);
             }, this);
 
-            App.store.commit();
-            App.store.loadMany(this.get("sectionModel"), sections);
-            App.store.commit();
+            App.store.safeLoadMany(this.get("sectionModel"), sections);
             parsedTab.sections = sections.mapProperty("id");
           }
 
@@ -283,8 +276,7 @@ App.themesMapper = App.QuickDataMapper.create({
       configConditionsCopy.pushObject(configCondition);
     }, this);
 
-    App.store.loadMany(this.get("themeConditionModel"), configConditionsCopy);
-    App.store.commit();
+    App.store.safeLoadMany(this.get("themeConditionModel"), configConditionsCopy);
   },
 
   /**
@@ -314,8 +306,7 @@ App.themesMapper = App.QuickDataMapper.create({
         subSectionConditionsCopy.pushObject(subSectionCondition);
       }, this);
     }, this);
-    App.store.loadMany(this.get("themeConditionModel"), subSectionConditionsCopy);
-    App.store.commit();
+    App.store.safeLoadMany(this.get("themeConditionModel"), subSectionConditionsCopy);
   },
 
   /**
@@ -374,8 +365,6 @@ App.themesMapper = App.QuickDataMapper.create({
         service_name: serviceName
       });
     });
-    App.store.commit();
-    App.store.loadMany(this.get("tabModel"), advancedTabs);
-    App.store.commit();
+    App.store.safeLoadMany(this.get("tabModel"), advancedTabs);
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/hosts_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/hosts_mapper.js b/ambari-web/app/mappers/hosts_mapper.js
index 3c7dd70..b64bd8d 100644
--- a/ambari-web/app/mappers/hosts_mapper.js
+++ b/ambari-web/app/mappers/hosts_mapper.js
@@ -213,15 +213,15 @@ App.hostsMapper = App.QuickDataMapper.create({
         if (componentsIdMap[key]) componentsIdMap[key].display_name_advanced = App.HostComponent.find(key).get('displayNameAdvanced');
       }
 
-      App.store.commit();
-      App.store.loadMany(App.HostStackVersion, stackVersions);
-      App.store.loadMany(App.HostComponentLog, hostComponentLogs);
-      App.store.loadMany(App.HostComponent, components);
       //"itemTotal" present only for Hosts page request
       if (!Em.isNone(json.itemTotal)) {
         App.Host.find().clear();
+        App.HostComponent.find().clear();
       }
-      App.store.loadMany(App.Host, hostsWithFullInfo);
+      App.store.safeLoadMany(App.HostStackVersion, stackVersions);
+      App.store.safeLoadMany(App.HostComponentLog, hostComponentLogs);
+      App.store.safeLoadMany(App.HostComponent, components);
+      App.store.safeLoadMany(App.Host, hostsWithFullInfo);
       var itemTotal = parseInt(json.itemTotal);
       if (!isNaN(itemTotal)) {
         App.router.set('mainHostController.filteredCount', itemTotal);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/quicklinks_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/quicklinks_mapper.js b/ambari-web/app/mappers/quicklinks_mapper.js
index 6cb1fe0..6de9b34 100644
--- a/ambari-web/app/mappers/quicklinks_mapper.js
+++ b/ambari-web/app/mappers/quicklinks_mapper.js
@@ -43,8 +43,7 @@ App.quicklinksMapper = App.QuickDataMapper.create({
       result.push(parseResult);
     }, this);
 
-    App.store.loadMany(this.get('model'), result);
-    App.store.commit();
+    App.store.safeLoadMany(this.get('model'), result);
     console.timeEnd('App.quicklinksMapper execution time');
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/repository_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/repository_version_mapper.js b/ambari-web/app/mappers/repository_version_mapper.js
index 48e460b..8ef50db 100644
--- a/ambari-web/app/mappers/repository_version_mapper.js
+++ b/ambari-web/app/mappers/repository_version_mapper.js
@@ -152,11 +152,10 @@ App.repoVersionMapper = App.QuickDataMapper.create({
         }
       }, this);
     }
-    App.store.commit();
-    App.store.loadMany(modelRepositories, resultRepo);
-    App.store.loadMany(modelOperatingSystems, resultOS);
-    App.store.loadMany(modelServices, resultService);
-    App.store.loadMany(modelRepoVersions, resultRepoVersion);
+    App.store.safeLoadMany(modelRepositories, resultRepo);
+    App.store.safeLoadMany(modelOperatingSystems, resultOS);
+    App.store.safeLoadMany(modelServices, resultService);
+    App.store.safeLoadMany(modelRepoVersions, resultRepoVersion);
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/root_service_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/root_service_mapper.js b/ambari-web/app/mappers/root_service_mapper.js
index 1e0e070..66a4849 100644
--- a/ambari-web/app/mappers/root_service_mapper.js
+++ b/ambari-web/app/mappers/root_service_mapper.js
@@ -51,8 +51,7 @@ App.rootServiceMapper = App.QuickDataMapper.create({
       rootServiceComponents.push(this.parseIt(item.RootServiceComponents, this.configRootServiceComponents));
     }, this);
     rootService.components =  rootServiceComponents;
-    App.store.commit();
-    App.store.loadMany(rootServiceComponentModel, rootServiceComponents);
-    App.store.load(rootServiceModel, this.parseIt(rootService, this.configRootService));
+    App.store.safeLoadMany(rootServiceComponentModel, rootServiceComponents);
+    App.store.safeLoad(rootServiceModel, this.parseIt(rootService, this.configRootService));
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/server_data_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/server_data_mapper.js b/ambari-web/app/mappers/server_data_mapper.js
index a99d8ae..8a32eb0 100644
--- a/ambari-web/app/mappers/server_data_mapper.js
+++ b/ambari-web/app/mappers/server_data_mapper.js
@@ -43,6 +43,42 @@ App.cache.clear = function () {
   App.cache.clear = clear;
 };
 
+App.store.reopen({
+  safeLoadMany: function(model, records) {
+    try {
+      this.loadMany(model, records);
+    } catch (e) {
+      console.debug('Resolve uncommitted records before load');
+      this.fastCommit();
+      this.loadMany(model, records);
+    }
+  },
+
+  safeLoad: function(model, record) {
+    try {
+      this.load(model, record);
+    } catch (e) {
+      console.debug('Resolve uncommitted record before load');
+      this.fastCommit();
+      this.load(model, record);
+    }
+  },
+
+  /**
+   * App.store.commit() - creates new transaction,
+   * and them move all records from old to new transactions which is expensive
+   *
+   * We should use only defaultTransaction,
+   * and then we can stub <code>removeCleanRecords</code> method,
+   * because it would remove from and add records to the same (default) transaction
+   */
+  fastCommit: function() {
+    console.time('store commit');
+    App.store.defaultTransaction.commit();
+    console.timeEnd('store commit');
+  }
+});
+
 App.ServerDataMapper = Em.Object.extend({
   jsonKey: false,
   map: function (json) {
@@ -77,7 +113,7 @@ App.QuickDataMapper = App.ServerDataMapper.extend({
         result.push(this.parseIt(item, this.config));
       }, this);
 
-      App.store.loadMany(this.get('model'), result);
+      App.store.safeLoadMany(this.get('model'), result);
     }
   },
 
@@ -144,7 +180,7 @@ App.QuickDataMapper = App.ServerDataMapper.extend({
    */
   deleteRecord: function (item) {
     item.deleteRecord();
-    App.store.commit();
+    App.store.fastCommit();
     item.get('stateManager').transitionTo('loading');
   },
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/service_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/service_mapper.js b/ambari-web/app/mappers/service_mapper.js
index 9a330f9..3523379 100644
--- a/ambari-web/app/mappers/service_mapper.js
+++ b/ambari-web/app/mappers/service_mapper.js
@@ -57,8 +57,7 @@ App.serviceMapper = App.QuickDataMapper.create({
         return self.parseIt(item, self.get('config'));
       });
       parsedCacheServices = misc.sortByOrder(App.StackService.find().mapProperty('serviceName'), parsedCacheServices);
-      App.store.loadMany(this.get('model'), parsedCacheServices);
-      App.store.commit();
+      App.store.safeLoadMany(this.get('model'), parsedCacheServices);
       this.set('initialAppLoad', true);
     }
     this.servicesLoading().done(function setMaintenanceState() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/service_metrics_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/service_metrics_mapper.js b/ambari-web/app/mappers/service_metrics_mapper.js
index c4f37df..87b6149 100644
--- a/ambari-web/app/mappers/service_metrics_mapper.js
+++ b/ambari-web/app/mappers/service_metrics_mapper.js
@@ -231,7 +231,7 @@ App.serviceMetricsMapper = App.QuickDataMapper.create({
       }, this);
       previousMasterComponentIds = hostComponents.mapProperty('id');
 
-      App.store.loadMany(this.get('model3'), hostComponents);
+      App.store.safeLoadMany(this.get('model3'), hostComponents);
 
       //parse service metrics from components
       services.forEach(function (item) {
@@ -253,7 +253,7 @@ App.serviceMetricsMapper = App.QuickDataMapper.create({
       result = misc.sortByOrder(stackServices, result);
 
       //load services to model
-      App.store.loadMany(this.get('model'), result);
+      App.store.safeLoadMany(this.get('model'), result);
 
       // check for new components
       if (lastKnownStatusesLength > 0) {
@@ -290,33 +290,33 @@ App.serviceMetricsMapper = App.QuickDataMapper.create({
     if (item && item.ServiceInfo && item.ServiceInfo.service_name == "HDFS") {
       finalJson = this.hdfsMapper(item);
       finalJson.rand = Math.random();
-      App.store.load(App.HDFSService, finalJson);
+      App.store.safeLoad(App.HDFSService, finalJson);
     } else if (item && item.ServiceInfo && item.ServiceInfo.service_name == "HBASE") {
       finalJson = this.hbaseMapper(item);
       finalJson.rand = Math.random();
-      App.store.load(App.HBaseService, finalJson);
+      App.store.safeLoad(App.HBaseService, finalJson);
     } else if (item && item.ServiceInfo && item.ServiceInfo.service_name == "FLUME") {
       finalJson = this.flumeMapper(item);
       finalJson.rand = Math.random();
-      App.store.loadMany(App.FlumeAgent, finalJson.agentJsons);
-      App.store.load(App.FlumeService, finalJson);
+      App.store.safeLoadMany(App.FlumeAgent, finalJson.agentJsons);
+      App.store.safeLoad(App.FlumeService, finalJson);
     } else if (item && item.ServiceInfo && item.ServiceInfo.service_name == "YARN") {
       finalJson = this.yarnMapper(item);
       finalJson.rand = Math.random();
-      App.store.load(App.YARNService, finalJson);
+      App.store.safeLoad(App.YARNService, finalJson);
     } else if (item && item.ServiceInfo && item.ServiceInfo.service_name == "MAPREDUCE2") {
       finalJson = this.mapreduce2Mapper(item);
       finalJson.rand = Math.random();
-      App.store.load(App.MapReduce2Service, finalJson);
+      App.store.safeLoad(App.MapReduce2Service, finalJson);
     } else if (item && item.ServiceInfo && item.ServiceInfo.service_name == "STORM") {
       finalJson = this.stormMapper(item);
       finalJson.rand = Math.random();
       this.mapQuickLinks(finalJson, item);
-      App.store.load(App.StormService, finalJson);
+      App.store.safeLoad(App.StormService, finalJson);
     } else if (item && item.ServiceInfo && item.ServiceInfo.service_name == "RANGER") {
       finalJson = this.rangerMapper(item);
       finalJson.rand = Math.random();
-      App.store.load(App.RangerService, finalJson);
+      App.store.safeLoad(App.RangerService, finalJson);
     } else {
       finalJson = this.parseIt(item, this.config);
       finalJson.rand = Math.random();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/stack_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/stack_mapper.js b/ambari-web/app/mappers/stack_mapper.js
index b1a5faf..4b6a6f8 100644
--- a/ambari-web/app/mappers/stack_mapper.js
+++ b/ambari-web/app/mappers/stack_mapper.js
@@ -141,10 +141,9 @@ App.stackMapper = App.QuickDataMapper.create({
     stack.stack_services = servicesArray;
     stack.operating_systems = operatingSystemsArray;
 
-    App.store.commit();
-    App.store.loadMany(modelRepo, resultRepo);
-    App.store.loadMany(modelOS, resultOS);
-    App.store.loadMany(modelServices, resultServices);
-    App.store.load(modelStack, this.parseIt(stack, this.get('configStack')));
+    App.store.safeLoadMany(modelRepo, resultRepo);
+    App.store.safeLoadMany(modelOS, resultOS);
+    App.store.safeLoadMany(modelServices, resultServices);
+    App.store.safeLoad(modelStack, this.parseIt(stack, this.get('configStack')));
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/stack_service_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/stack_service_mapper.js b/ambari-web/app/mappers/stack_service_mapper.js
index 21c4db9..4bda89d 100644
--- a/ambari-web/app/mappers/stack_service_mapper.js
+++ b/ambari-web/app/mappers/stack_service_mapper.js
@@ -122,8 +122,8 @@ App.stackServiceMapper = App.QuickDataMapper.create({
       }
       result.push(this.parseIt(stackService, this.get('config')));
     }, this);
-    App.store.loadMany(this.get('component_model'), stackServiceComponents);
-    App.store.loadMany(model, result);
+    App.store.safeLoadMany(this.get('component_model'), stackServiceComponents);
+    App.store.safeLoadMany(model, result);
   },
 
   /**
@@ -136,7 +136,7 @@ App.stackServiceMapper = App.QuickDataMapper.create({
       records.forEach(function (rec) {
         Ember.run(this, function () {
           rec.deleteRecord();
-          App.store.commit();
+          App.store.fastCommit();
         });
       }, this);
     }, this);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/stack_upgrade_history_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/stack_upgrade_history_mapper.js b/ambari-web/app/mappers/stack_upgrade_history_mapper.js
index 823ae80..25e9d06 100644
--- a/ambari-web/app/mappers/stack_upgrade_history_mapper.js
+++ b/ambari-web/app/mappers/stack_upgrade_history_mapper.js
@@ -47,8 +47,7 @@ App.stackUpgradeHistoryMapper = App.QuickDataMapper.create({
       result.push(parseResult);
     }, this);
 
-    App.store.loadMany(this.get('model'), result);
-    App.store.commit();
+    App.store.safeLoadMany(this.get('model'), result);
     App.set('isStackUpgradeHistoryLoaded',true);
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/stack_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/stack_version_mapper.js b/ambari-web/app/mappers/stack_version_mapper.js
index bd37288..62187b0 100644
--- a/ambari-web/app/mappers/stack_version_mapper.js
+++ b/ambari-web/app/mappers/stack_version_mapper.js
@@ -82,7 +82,6 @@ App.stackVersionMapper = App.QuickDataMapper.create({
         resultStack.push(this.parseIt(stack, this.get('modelStack')));
       }, this);
     }
-    App.store.commit();
-    App.store.loadMany(modelStackVerion, resultStack);
+    App.store.safeLoadMany(modelStackVerion, resultStack);
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/target_cluster_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/target_cluster_mapper.js b/ambari-web/app/mappers/target_cluster_mapper.js
index f774213..29cb508 100644
--- a/ambari-web/app/mappers/target_cluster_mapper.js
+++ b/ambari-web/app/mappers/target_cluster_mapper.js
@@ -47,7 +47,7 @@ App.targetClusterMapper = App.QuickDataMapper.create({
       clustersToDelete.forEach(function (name) {
         this.deleteRecord(model.find().findProperty('name', name));
       }, this);
-      App.store.loadMany(model, result);
+      App.store.safeLoadMany(model, result);
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/users_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/users_mapper.js b/ambari-web/app/mappers/users_mapper.js
index 80d7feb..896c043 100644
--- a/ambari-web/app/mappers/users_mapper.js
+++ b/ambari-web/app/mappers/users_mapper.js
@@ -43,7 +43,7 @@ App.usersMapper = App.QuickDataMapper.create({
         item.Users.operator = self.isOperator(item.permissions);
         item.Users.cluster_user = self.isClusterUser(item.permissions);
         result.push(self.parseIt(item, self.config));
-        App.store.loadMany(self.get('model'), result);
+        App.store.safeLoadMany(self.get('model'), result);
       }
     });
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mappers/widget_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/widget_mapper.js b/ambari-web/app/mappers/widget_mapper.js
index 0991d4f..84cb757 100644
--- a/ambari-web/app/mappers/widget_mapper.js
+++ b/ambari-web/app/mappers/widget_mapper.js
@@ -51,8 +51,7 @@ App.widgetMapper = App.QuickDataMapper.create({
         result.push(this.parseIt(item.WidgetInfo, this.config));
       }, this);
 
-      App.store.commit();
-      App.store.loadMany(this.get('model'), result);
+      App.store.safeLoadMany(this.get('model'), result);
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mixins/main/service/configs/config_overridable.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/service/configs/config_overridable.js b/ambari-web/app/mixins/main/service/configs/config_overridable.js
index 93dea5f..c1d96dc 100644
--- a/ambari-web/app/mixins/main/service/configs/config_overridable.js
+++ b/ambari-web/app/mixins/main/service/configs/config_overridable.js
@@ -153,8 +153,8 @@ App.ConfigOverridable = Em.Mixin.create({
             });
           } else {
             newConfigGroup.is_temporary = true;
-            App.store.load(App.ServiceConfigGroup, newConfigGroup);
-            App.store.commit();
+            App.store.safeLoad(App.ServiceConfigGroup, newConfigGroup);
+            App.store.fastCommit();
             newConfigGroup = App.ServiceConfigGroup.find(newConfigGroup.id);
             configGroups.pushObject(newConfigGroup);
             self.persistConfigGroups();
@@ -281,8 +281,8 @@ App.ConfigOverridable = Em.Mixin.create({
   postNewConfigurationGroupSuccess: function (response, opt, params) {
     var modelData = params.modelData;
     modelData.id = response.resources[0].ConfigGroup.id;
-    App.store.load(App.ServiceConfigGroup, modelData);
-    App.store.commit();
+    App.store.safeLoad(App.ServiceConfigGroup, modelData);
+    App.store.fastCommit();
     App.ServiceConfigGroup.deleteTemporaryRecords();
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/mixins/main/service/groups_mapping.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/service/groups_mapping.js b/ambari-web/app/mixins/main/service/groups_mapping.js
index 0781aa3..6c166c8 100644
--- a/ambari-web/app/mixins/main/service/groups_mapping.js
+++ b/ambari-web/app/mixins/main/service/groups_mapping.js
@@ -58,7 +58,6 @@ App.GroupsMappingMixin = Em.Mixin.create(App.TrackRequestMixin, {
    * @method saveConfigGroupsToModel
    */
   saveConfigGroupsToModel: function (data, opt, params) {
-    App.store.commit();
     App.configGroupsMapper.map(data, false, params.serviceNames.split(','));
     this.set('configGroupsAreLoaded', true);
     params.dfd.resolve();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/models/user.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/user.js b/ambari-web/app/models/user.js
index 4dde8ec..5c14724 100644
--- a/ambari-web/app/models/user.js
+++ b/ambari-web/app/models/user.js
@@ -106,7 +106,7 @@ App.CreateUserForm = App.Form.extend({
     });
 
     if (this.get('className')) {
-      App.store.load(this.get('className'), App.dateTime(), formValues);
+      App.store.safeLoad(this.get('className'), App.dateTime(), formValues);
     }
 
     this.set('result', 1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/app/utils/http_client.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/http_client.js b/ambari-web/app/utils/http_client.js
index 9b8346c..0113679 100644
--- a/ambari-web/app/utils/http_client.js
+++ b/ambari-web/app/utils/http_client.js
@@ -85,11 +85,6 @@ App.HttpClient = Em.Object.create({
     var timeout = setTimeout(function () {
       if (xhr.readyState == 4) {
         if (xhr.status == 200) {
-          try {
-            App.store.commit();
-          } catch (err) {
-            console.warn('App.store.commit error:', err);
-          }
           var response = $.parseJSON(xhr.responseText);
           if (tmp_val.beforeMap) {
             tmp_val.beforeMap.call(self, response);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/controllers/main/admin_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin_test.js b/ambari-web/test/controllers/main/admin_test.js
index b2828d0..d2b3e2a 100644
--- a/ambari-web/test/controllers/main/admin_test.js
+++ b/ambari-web/test/controllers/main/admin_test.js
@@ -35,7 +35,7 @@ describe('MainAdminController', function () {
 
     it('Services do not match dependencies', function () {
       App.Service.find().clear();
-      App.store.load(App.Service, {
+      App.store.safeLoad(App.Service, {
         id: 'HDFS',
         service_name: 'HDFS'
       });
@@ -47,21 +47,21 @@ describe('MainAdminController', function () {
       expect(controller.get("isAccessAvailable")).to.be.false;
     });
     it('Only one YARN service installed', function () {
-      App.store.load(App.Service, {
+      App.store.safeLoad(App.Service, {
         id: 'YARN',
         service_name: 'YARN'
       });
       expect(controller.get("isAccessAvailable")).to.be.false;
     });
     it('TEZ and YARN services installed', function () {
-      App.store.load(App.Service, {
+      App.store.safeLoad(App.Service, {
         id: 'TEZ',
         service_name: 'TEZ'
       });
       expect(controller.get("isAccessAvailable")).to.be.false;
     });
     it('TEZ and YARN services, APP_TIMELINE_SERVER component installed', function () {
-      App.store.load(App.HostComponent, {
+      App.store.safeLoad(App.HostComponent, {
         id: 'APP_TIMELINE_SERVER_host1',
         component_name: 'APP_TIMELINE_SERVER'
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/controllers/main/host/details_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/host/details_test.js b/ambari-web/test/controllers/main/host/details_test.js
index 17b1eec..c7e1808 100644
--- a/ambari-web/test/controllers/main/host/details_test.js
+++ b/ambari-web/test/controllers/main/host/details_test.js
@@ -850,7 +850,7 @@ describe('App.MainHostDetailsController', function () {
   describe('#constructConfigUrlParams()', function () {
 
     function loadService(serviceName) {
-      App.store.load(App.Service, {
+      App.store.safeLoad(App.Service, {
         id: serviceName,
         service_name: serviceName
       });
@@ -905,7 +905,7 @@ describe('App.MainHostDetailsController', function () {
       App.HostComponent.find().clear();
       App.propertyDidChange('isHaEnabled');
       expect(controller.constructConfigUrlParams(data)).to.eql(['(type=core-site&tag=1)']);
-      App.store.load(App.HostComponent, {
+      App.store.safeLoad(App.HostComponent, {
         id: 'SECONDARY_NAMENODE_host1',
         component_name: 'SECONDARY_NAMENODE'
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/controllers/main/service/add_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/add_controller_test.js b/ambari-web/test/controllers/main/service/add_controller_test.js
index 715f46a..ffde1f7 100644
--- a/ambari-web/test/controllers/main/service/add_controller_test.js
+++ b/ambari-web/test/controllers/main/service/add_controller_test.js
@@ -317,7 +317,7 @@ describe('App.AddServiceController', function() {
         mock.db = value;
       });
       sinon.stub(this.controller, 'hasDependentSlaveComponent');
-      sinon.stub(App.store, 'commit', Em.K);
+      sinon.stub(App.store, 'fastCommit', Em.K);
       this.mockStackService = sinon.stub(App.StackService, 'find');
       this.mockService = sinon.stub(App.Service, 'find');
     });
@@ -328,7 +328,7 @@ describe('App.AddServiceController', function() {
       this.controller.hasDependentSlaveComponent.restore();
       this.mockStackService.restore();
       this.mockService.restore();
-      App.store.commit.restore();
+      App.store.fastCommit.restore();
     });
 
     var tests = [

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/controllers/main/service/item_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/item_test.js b/ambari-web/test/controllers/main/service/item_test.js
index 6430ab6..fff321c 100644
--- a/ambari-web/test/controllers/main/service/item_test.js
+++ b/ambari-web/test/controllers/main/service/item_test.js
@@ -347,7 +347,7 @@ describe('App.MainServiceItemController', function () {
       });
       it(test.m, function () {
         if (!test.default) {
-          App.store.load(App.Service, test.content);
+          App.store.safeLoad(App.Service, test.content);
         }
         mainServiceItemController.runSmokeTest({}).onPrimary();
         expect(mainServiceItemController.runSmokeTestPrimary.calledOnce).to.equal(test.startSmoke);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/mappers/alert_groups_mapper_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mappers/alert_groups_mapper_test.js b/ambari-web/test/mappers/alert_groups_mapper_test.js
index 9480e3d..60fee6f 100644
--- a/ambari-web/test/mappers/alert_groups_mapper_test.js
+++ b/ambari-web/test/mappers/alert_groups_mapper_test.js
@@ -87,7 +87,7 @@ describe('App.alertGroupsMapper', function () {
 
     beforeEach(function () {
 
-      sinon.stub(App.store, 'commit', Em.K);
+      sinon.stub(App.store, 'fastCommit', Em.K);
       sinon.stub(App.store, 'loadMany', function (type, content) {
         type.content = content;
       });
@@ -99,7 +99,7 @@ describe('App.alertGroupsMapper', function () {
 
     afterEach(function () {
 
-      App.store.commit.restore();
+      App.store.fastCommit.restore();
       App.store.loadMany.restore();
       App.alertGroupsMapper.set('model', App.AlertGroup);
       App.cache.previousAlertGroupsMap = {};

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/main/service/configs/config_overridable_test.js b/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
index b1eb099..b562ffb 100644
--- a/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
+++ b/ambari-web/test/mixins/main/service/configs/config_overridable_test.js
@@ -195,8 +195,8 @@ describe('App.ConfigOverridable', function () {
   describe("#postNewConfigurationGroupSuccess()", function () {
 
     beforeEach(function() {
-      sinon.stub(App.store, 'load');
-      sinon.stub(App.store, 'commit');
+      sinon.stub(App.store, 'safeLoad');
+      sinon.stub(App.store, 'fastCommit');
       sinon.stub(App.ServiceConfigGroup, 'deleteTemporaryRecords');
       configOverridable.postNewConfigurationGroupSuccess({
         resources: [
@@ -211,16 +211,16 @@ describe('App.ConfigOverridable', function () {
 
     afterEach(function() {
       App.ServiceConfigGroup.deleteTemporaryRecords.restore();
-      App.store.commit.restore();
-      App.store.load.restore();
+      App.store.fastCommit.restore();
+      App.store.safeLoad.restore();
     });
 
     it("App.store.load should be called", function() {
-      expect(App.store.load.calledWith(App.ServiceConfigGroup, {id: 'cg1'})).to.be.true;
+      expect(App.store.safeLoad.calledWith(App.ServiceConfigGroup, {id: 'cg1'})).to.be.true;
     });
 
     it("App.store.commit should be called", function() {
-      expect(App.store.commit.calledOnce).to.be.true;
+      expect(App.store.fastCommit.calledOnce).to.be.true;
     });
 
     it("App.ServiceConfigGroup.deleteTemporaryRecords should be called", function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/models/host_component_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/host_component_test.js b/ambari-web/test/models/host_component_test.js
index d577005..5f4997d 100644
--- a/ambari-web/test/models/host_component_test.js
+++ b/ambari-web/test/models/host_component_test.js
@@ -21,7 +21,7 @@ require('models/host_component');
 
 describe('App.HostComponent', function() {
 
-  App.store.load(App.HostComponent, {
+  App.store.safeLoad(App.HostComponent, {
     id: 'COMP_host',
     component_name: 'COMP1'
   });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/models/host_stack_version_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/host_stack_version_test.js b/ambari-web/test/models/host_stack_version_test.js
index 702e284..014992d 100644
--- a/ambari-web/test/models/host_stack_version_test.js
+++ b/ambari-web/test/models/host_stack_version_test.js
@@ -62,7 +62,7 @@ describe('App.HostStackVersion', function () {
     });
     testCases.forEach(function (test) {
       it('status is ' + test.status, function () {
-        App.store.load(App.HostStackVersion, {
+        App.store.safeLoad(App.HostStackVersion, {
           id: 1,
           status: test.status
         });
@@ -99,7 +99,7 @@ describe('App.HostStackVersion', function () {
     });
     testCases.forEach(function (test) {
       it('status is ' + test.status, function () {
-        App.store.load(App.HostStackVersion, {
+        App.store.safeLoad(App.HostStackVersion, {
           id: 1,
           status: test.status
         });
@@ -113,14 +113,14 @@ describe('App.HostStackVersion', function () {
       App.HostStackVersion.find().clear();
     });
     it("status is CURRENT", function () {
-      App.store.load(App.HostStackVersion, {
+      App.store.safeLoad(App.HostStackVersion, {
         id: 1,
         status: 'CURRENT'
       });
       expect(App.HostStackVersion.find(1).get('isCurrent')).to.be.true;
     });
     it("status is not CURRENT", function () {
-      App.store.load(App.HostStackVersion, {
+      App.store.safeLoad(App.HostStackVersion, {
         id: 1,
         status: 'INSTALLED'
       });
@@ -133,14 +133,14 @@ describe('App.HostStackVersion', function () {
       App.HostStackVersion.find().clear();
     });
     it("status is INSTALLING", function () {
-      App.store.load(App.HostStackVersion, {
+      App.store.safeLoad(App.HostStackVersion, {
         id: 1,
         status: 'INSTALLING'
       });
       expect(App.HostStackVersion.find(1).get('isInstalling')).to.be.true;
     });
     it("status is not INSTALLING", function () {
-      App.store.load(App.HostStackVersion, {
+      App.store.safeLoad(App.HostStackVersion, {
         id: 1,
         status: 'INSTALLED'
       });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/models/rack_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/rack_test.js b/ambari-web/test/models/rack_test.js
index a63eaba..8c664c8 100644
--- a/ambari-web/test/models/rack_test.js
+++ b/ambari-web/test/models/rack_test.js
@@ -28,7 +28,7 @@ describe('App.Rack', function () {
     name: 'rack1'
   };
 
-  App.store.load(App.Rack, data);
+  App.store.safeLoad(App.Rack, data);
 
   describe('#liveHostsCount', function () {
 
@@ -38,7 +38,7 @@ describe('App.Rack', function () {
     });
 
     it('rack1 has three live hosts', function () {
-      App.store.load(App.Host, {
+      App.store.safeLoad(App.Host, {
         id: 'host3',
         host_name: 'host3',
         health_status: 'HEALTHY'

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/models/stack_service_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/stack_service_test.js b/ambari-web/test/models/stack_service_test.js
index bc101e0..2d76a8b 100644
--- a/ambari-web/test/models/stack_service_test.js
+++ b/ambari-web/test/models/stack_service_test.js
@@ -22,7 +22,7 @@ require('models/stack_service');
 
 describe('App.StackService', function () {
 
-  App.store.load(App.StackService, {
+  App.store.safeLoad(App.StackService, {
     id: 'S1'
   });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a1f23ad4/ambari-web/test/utils/http_client_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/http_client_test.js b/ambari-web/test/utils/http_client_test.js
index 589c44e..48e26a9 100644
--- a/ambari-web/test/utils/http_client_test.js
+++ b/ambari-web/test/utils/http_client_test.js
@@ -272,7 +272,6 @@ describe('App.HttpClient', function () {
 
         beforeEach(function () {
           clock = sinon.useFakeTimers();
-          sinon.stub(App.store, 'commit');
           sinon.spy(xhr, 'abort');
           sinon.spy(mapper, 'map');
           sinon.spy(mock, 'errorHandler');
@@ -280,9 +279,6 @@ describe('App.HttpClient', function () {
           sinon.spy(App.HttpClient, 'onReady');
           xhr.readyState = item.readyState;
           xhr.status = item.status;
-          if (item.isCommitError) {
-            App.store.commit.throws();
-          }
           App.HttpClient.onReady(xhr, null, ajaxOptions, mapper, mock.errorHandler, 'url');
           clock.tick(10);
           xhr.readyState = 4;
@@ -291,7 +287,6 @@ describe('App.HttpClient', function () {
 
         afterEach(function () {
           clock.restore();
-          App.store.commit.restore();
           xhr.abort.restore();
           mapper.map.restore();
           mock.errorHandler.restore();
@@ -299,10 +294,6 @@ describe('App.HttpClient', function () {
           App.HttpClient.onReady.restore();
         });
 
-        it('App.store.commit call', function () {
-          expect(App.store.commit.callCount).to.equal(item.commitCallCount);
-        });
-
         it('mapping data', function () {
           expect(mapper.map.callCount).to.equal(item.mapCallCount);
         });


[04/50] [abbrv] ambari git commit: AMBARI-20019. WFM Dashboard Actions menu items not working properly.(Padma Priya N via gauravn7)

Posted by nc...@apache.org.
AMBARI-20019. WFM Dashboard Actions menu items not working properly.(Padma Priya N via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f91095b5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f91095b5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f91095b5

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: f91095b55e18c79a70ac58937b608f7d4cd5adeb
Parents: 2edfefc
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Wed Feb 15 17:09:08 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Wed Feb 15 17:10:02 2017 +0530

----------------------------------------------------------------------
 .../main/resources/ui/app/components/job-row.js | 26 +++++++++++++++---
 .../ui/app/components/search-create-new-bar.js  | 11 ++++----
 .../resources/ui/app/components/search-table.js |  8 ++++++
 .../ui/app/controllers/design/dashboardtab.js   |  8 +++---
 .../ui/app/routes/design/dashboardtab.js        | 15 ++++++++---
 .../src/main/resources/ui/app/styles/app.less   |  1 +
 .../templates/components/designer-workspace.hbs | 10 +++----
 .../ui/app/templates/components/job-row.hbs     | 28 ++++++++++----------
 .../app/templates/components/search-table.hbs   | 12 ++++++---
 9 files changed, 79 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/components/job-row.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-row.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-row.js
index 2a5fc22..1dd580e 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-row.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-row.js
@@ -38,16 +38,33 @@ export default Ember.Component.extend({
     });
     return deferred.promise;
   },
+  jobType : Ember.computed('job', function(){
+    if(this.get('job').bundleJobId){
+      return 'bundle';
+    }else if(this.get('job').coordJobId){
+      return 'coord';
+    }else{
+      return 'wf';
+    }
+
+  }),
   actions : {
     doAction(action, id) {
+      this.set('showError', false);
+      this.set('showLoader', true);
       var deferred = Ember.RSVP.defer();
       deferred.promise.then(function(){
+        this.set('showLoader', false);
         if(action === 'start'){
           this.set('job.status','RUNNING');
-        }else if(action === 'suspend'){
+        }else if(action === 'suspend' && this.get('job.status') === 'RUNNING'){
           this.set('job.status','SUSPENDED');
-        }else if(action === 'resume'){
+        }else if(action === 'suspend' && this.get('job.status') === 'PREP'){
+          this.set('job.status','PREPSUSPENDED');
+        }else if(action === 'resume' && this.get('job.status') === 'SUSPENDED'){
           this.set('job.status','RUNNING');
+        }else if(action === 'resume' && this.get('job.status') === 'PREPSUSPENDED'){
+          this.set('job.status','PREP');
         }else if(action === 'stop'){
           this.set('job.status','STOPPED');
         }else if(action === 'rerun'){
@@ -55,7 +72,10 @@ export default Ember.Component.extend({
         }else if(action === 'kill'){
           this.set('job.status','KILLED');
         }
-      }.bind(this),function(){
+      }.bind(this)).catch(function(e){
+        this.set('showError', true);
+        this.set('showLoader', false);
+        console.error(e);
       }.bind(this));
       if(action === 'rerun' && this.get('job').bundleJobId){
         action = 'bundle-'+action;

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/components/search-create-new-bar.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/search-create-new-bar.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/search-create-new-bar.js
index e9bc44f..e6f7ec8 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/search-create-new-bar.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/search-create-new-bar.js
@@ -88,17 +88,16 @@ export default Ember.Component.extend(Ember.Evented,{
                     'Status:SUSPENDED',
                     'Status:SUCCEEDED',
                     'Status:KILLED',
-                    'Status:FAILED'];
+                    'Status:FAILED',
+                    'Status:PREP'];
       var substringMatcher = function(strs) {
         return function findMatches(q, cb) {
           var searchTerm =  self.$('#search-field').tagsinput('input').val();
           var originalLength = strs.length;
-          if(self.get('jobType') === 'wf'){
-            strs.push('Status:PREP');
+          if(self.get('jobType') && self.get('jobType') !== 'wf'){
+            strs.pushObjects(['Status:PREPSUSPENDED','Status:PREPPAUSED','Status:DONEWITHERROR']);
           }
-          strs.push('Name:'+ searchTerm);
-          strs.push('User:'+ searchTerm);
-          strs.push('Job id:'+ searchTerm);
+          strs.pushObjects(['Name:'+ searchTerm, 'User:'+ searchTerm, 'Job id:'+ searchTerm]);
           var newLength = strs.length;
           var matches, substrRegex;
           matches = [];

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/components/search-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/search-table.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/search-table.js
index 714de66..b2f2a57 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/search-table.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/search-table.js
@@ -20,6 +20,7 @@ import Ember from 'ember';
 export default Ember.Component.extend({
   showBulkAction : false,
   history: Ember.inject.service(),
+  userInfo : Ember.inject.service('user-info'),
   currentPage : Ember.computed('jobs.start',function(){
     if(Ember.isBlank(this.get('jobs.start'))){
       return 1;
@@ -30,6 +31,13 @@ export default Ember.Component.extend({
   rendered : function(){
     this.sendAction('onSearch', this.get('history').getSearchParams());
   }.on('didInsertElement'),
+  isUpdated : function(){
+    if(this.get('showActionError')){
+      this.$('#alert').fadeOut(2500, ()=>{
+        this.set("showActionError", false);
+      });
+    }
+  }.on('didUpdate'),
   actions: {
     selectAll() {
       this.$(".cbox").click();

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/controllers/design/dashboardtab.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/controllers/design/dashboardtab.js b/contrib/views/wfmanager/src/main/resources/ui/app/controllers/design/dashboardtab.js
index 9760ddb..98dcd70 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/controllers/design/dashboardtab.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/controllers/design/dashboardtab.js
@@ -46,10 +46,10 @@ export default Ember.Controller.extend({
       if(params.action.indexOf('rerun') > -1){
         jobActionParams.data = params.conf;
       }
-      Ember.$.ajax(jobActionParams).done(function(){
-        deferred.resolve();
-      }).fail(function(){
-        deferred.reject();
+      Ember.$.ajax(jobActionParams).done(function(response){
+        deferred.resolve(response);
+      }).fail(function(error){
+        deferred.reject(error);
       });
     },
     onBulkAction : function(params, deferred){

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/routes/design/dashboardtab.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/routes/design/dashboardtab.js b/contrib/views/wfmanager/src/main/resources/ui/app/routes/design/dashboardtab.js
index 95d06fc..0749d98 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/routes/design/dashboardtab.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/routes/design/dashboardtab.js
@@ -19,6 +19,7 @@ import Ember from 'ember';
 
 export default Ember.Route.extend({
   history: Ember.inject.service(),
+  userInfo : Ember.inject.service('user-info'),
   errorMessage : "Error",
   queryParams: {
     jobType: { refreshModel: true },
@@ -85,10 +86,16 @@ export default Ember.Route.extend({
       "&filter=", filter
     ].join(""),
     page = (start - 1) / len + 1;
-    return this.fetchJobs(url).catch(function(){
+    return Ember.RSVP.hash({
+      jobs : this.fetchJobs(url),
+      userName:this.get("userInfo").getUserData()
+    }).catch(function(e){
+      console.error(e);
       this.controllerFor('design.dashboardtab').set('model',{error : "Remote API Failed"});
       Ember.$("#loading").css("display", "none");
-    }.bind(this)).then(function (res) {
+    }.bind(this)).then(function (response) {
+      var res = response.jobs;
+      this.controllerFor('design.dashboardtab').set('userName', response.userName);
       if(!res){
         return;
       }
@@ -145,10 +152,10 @@ export default Ember.Route.extend({
       return res;
     }.bind(this));
   },
-  afterModel: function (model) {
+  afterModel: function () {
     Ember.$("#loading").css("display", "none");
   },
-  model: function (params) {
+  model: function () {
 
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
index 3f24000..f4869e4 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/styles/app.less
@@ -1030,6 +1030,7 @@ height: 100vh;
 .PAUSEDWITHERROR,
 .PREPPAUSED,
 .RUNNINGWITHERROR,
+.PREPSUSPENDED,
 .SUSPENDED,
 .SUSPENDEDWITHERROR {
     color: #f0ad4e;

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
index a4a0cc2..c8d8dc3 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/designer-workspace.hbs
@@ -26,13 +26,13 @@
         </div>
         <div class="col-sm-8">
           <div class="text-right pull-right paddingtop7">
-              <button {{action 'showDashboard'}} class="backto-dashboard btn btn-default" title="Workflow Dashboard">
+              <button id="dashboard-btn" {{action 'showDashboard'}} class="backto-dashboard btn btn-default" title="Workflow Dashboard">
                 <i class="fa fa-th marginright5"></i>Dashboard
               </button>
             <div class="btn-group">
             <div class="btn-group">
             <div class="dropdown create-wf-menu">
-              <button class="btn btn-default dropdown-toggle borderRightRadiusNone" type="button" data-toggle="dropdown">Create
+              <button id="create-workflows-btn" class="btn btn-default dropdown-toggle borderRightRadiusNone" type="button" data-toggle="dropdown">Create
                 <span class="caret"></span></button>
                 <ul class="dropdown-menu">
                   <li>
@@ -70,7 +70,7 @@
             </div>
             <div class="btn-group">
             <div class="dropdown create-wf-menu">
-              <button class="btn btn-default dropdown-toggle borderRadiusNone" {{action "showTopRecentList"}} type="button" data-toggle="dropdown">Recent
+              <button id="recent-workflows-btn" class="btn btn-default dropdown-toggle borderRadiusNone" {{action "showTopRecentList"}} type="button" data-toggle="dropdown">Recent
                 <span class="caret"></span></button>
                 <ul class="dropdown-menu proj-menu">
                  {{#if projList}}
@@ -88,7 +88,7 @@
                     </li>
                    {{/each}}
                   <li>
-                      <a {{action "showProjectManagerList"}} title="My Workflows" class="pointer">
+                      <a id="more-workflows-btn" {{action "showProjectManagerList"}} title="My Workflows" class="pointer">
                       More...
                       </a>
                   </li>
@@ -100,7 +100,7 @@
                 </ul>
              </div>
              </div>
-              <button {{action "showAssetManager" true}} class="btn btn-default" title="Manage Assets">
+              <button id="manage-assets-btn" {{action "showAssetManager" true}} class="btn btn-default" title="Manage Assets">
                   Manage Assets
               </button>
             </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/job-row.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/job-row.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/job-row.hbs
index be9310e..4c18d5d 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/job-row.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/job-row.hbs
@@ -46,7 +46,7 @@
       <button title="Start" {{action 'doAction' 'start' job.id}} type="button" class="btn btn-default isOn" data-status="{{job.status}}">
         <i class="fa fa-play">  </i>
       </button>
-      {{else if (eq job.status "SUSPENDED")}}
+      {{else if (or (eq job.status "SUSPENDED")(eq job.status "PREPSUSPENDED"))}}
       <button title="Resume" {{action 'doAction' 'resume' job.id}} type="button" class="btn btn-default isOn" data-status="{{job.status}}">
         <i class="fa fa-play">  </i>
       </button>
@@ -55,38 +55,38 @@
         <i class="fa fa-play">  </i>
       </button>
       {{/if}}
-      {{#if (eq job.status "RUNNING")}}
-      <button {{action 'doAction' 'suspend' job.id}} type="button" class="btn btn-default isOn" data-status="{{job.status}}">
+      {{#if (or (eq job.status "RUNNING")(and (eq job.status "PREP") (eq jobType "coord")))}}
+      <button title="Suspend" {{action 'doAction' 'suspend' job.id}} type="button" class="btn btn-default isOn" data-status="{{job.status}}">
         <i class="fa fa-pause">  </i>
       </button>
       {{else}}
-      <button type="button" class="btn btn-default" data-status="{{job.status}}">
+      <button title="Suspend" type="button" class="btn btn-default" data-status="{{job.status}}">
         <i class="fa fa-pause">  </i>
       </button>
       {{/if}}
-      {{#if (or (eq job.status "FAILED") (eq job.status "KILLED"))}}
-      <button {{action 'doAction' 'rerun' job.id}} type="button" class="btn btn-default isOn">
+      {{#if (or (eq job.status "FAILED") (eq job.status "KILLED") (eq job.status "SUCCEEDED")(eq job.status "DONEWITHERROR"))}}
+      <button title="Rerun" {{action 'doAction' 'rerun' job.id}} type="button" class="btn btn-default isOn">
         <i class="fa fa-repeat">  </i>
       </button>
       {{else}}
-      <button {{action 'doAction' 'rerun' job.id}} type="button" class="btn btn-default">
+      <button title="Rerun" type="button" class="btn btn-default">
         <i class="fa fa-repeat">  </i>
       </button>
       {{/if}}
     </div>
-    {{#if (eq job.status "RUNNING")}}
-    <button {{action 'doAction' 'kill' job.id}} type="button" class="isOn btn btn-default btn-sm btn-kill">
+    {{#if (not (or (eq job.status "SUCCEEDED")(eq job.status "FAILED")(eq job.status "KILLED")))}}
+    <button title="Kill" {{action 'doAction' 'kill' job.id}} type="button" class="isOn btn btn-default btn-sm btn-kill">
       <i class="fa fa-close">  </i> Kill
     </button>
     {{else}}
-    <button {{action 'doAction' 'kill' job.id}} type="button" class="btn btn-default btn-sm btn-kill">
+    <button title="Kill" type="button" class="btn btn-default btn-sm btn-kill">
       <i class="fa fa-close">  </i> Kill
     </button>
     {{/if}}
-    {{#if showLoader}}
+  </div>
+  {{#if showLoader}}
     <div class='loading-container'>
-      {{spin-spinner lines=7 length=3 width=3 radius=3 top=-10 left=150}}
+      {{spin-spinner lines=7 length=3 width=3 radius=3 top=10 left=150}}
     </div>
-    {{/if}}
-  </div>
+  {{/if}}
 </td>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f91095b5/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/search-table.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/search-table.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/search-table.hbs
index eccc95d..5c15493 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/search-table.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/search-table.hbs
@@ -15,7 +15,11 @@
 * See the License for the specific language governing permissions and
 * limitations under the License.
 }}
-
+{{#if showActionError}}
+<div id="alert"class="alert alert-danger alert-dismissible fade in workflow-error" role="alert">
+  <span>Action could not be completed.</span>
+</div>
+{{/if}}
 <table id="search-table" class="table search-table listing table-striped table-hover table-bordered" cellspacing="0" width="100%">
   <thead>
     <tr>
@@ -49,9 +53,9 @@
   </thead>
   <tbody>
     {{#if jobs.jobs}}
-    {{#each jobs.jobs as |job idx|}}
-    {{#job-row job=job onAction="onAction" showJobDetails="showJobDetails" rowSelected="rowSelected"}}{{/job-row}}
-    {{/each}}
+      {{#each jobs.jobs as |job idx|}}
+        {{#job-row job=job onAction="onAction" showJobDetails="showJobDetails" rowSelected="rowSelected" userName=userName showError=showActionError}}{{/job-row}}
+      {{/each}}
     {{/if}}
   </tbody>
 </table>


[10/50] [abbrv] ambari git commit: AMBARI-20025 Incorrect work of filters on Versions page of Admin View. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-20025 Incorrect work of filters on Versions page of Admin View. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d75756ef
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d75756ef
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d75756ef

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: d75756efa7bcecb09d0ee73db6c29b018ba964fc
Parents: 4584264
Author: ababiichuk <ab...@hortonworks.com>
Authored: Wed Feb 15 17:29:05 2017 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Wed Feb 15 18:41:52 2017 +0200

----------------------------------------------------------------------
 .../stackVersions/StackVersionsListCtrl.js      | 56 +++++++++++++-------
 .../ui/admin-web/app/scripts/services/Stack.js  | 15 ++++--
 .../stackVersions/StackversionsListCtrl_test.js | 38 +++++++++----
 3 files changed, 76 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d75756ef/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
index 1de8817..2990cef 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsListCtrl.js
@@ -77,29 +77,47 @@ angular.module('ambariAdminConsole')
       $scope.resetPagination();
     };
 
-    $scope.fetchRepoClusterStatus = function () {
-      var clusterName = ($scope.clusters && $scope.clusters.length > 0) ? $scope.clusters[0].Clusters.cluster_name : null; // only support one cluster at the moment
-      if (clusterName) {
-        angular.forEach($scope.repos, function (repo) {
-          Cluster.getRepoVersionStatus(clusterName, repo.id).then(function (response) {
-            repo.status = response.status;
-            repo.totalHosts = response.totalHosts;
-            repo.currentHosts = response.currentHosts;
-            repo.installedHosts = response.installedHosts;
-            repo.stackVersionId = response.stackVersionId;
-            repo.cluster = (repo.status == 'current' || repo.status == 'installed') ? clusterName : '';
+    $scope.fetchRepoClusterStatus = function (allRepos) {
+      if (allRepos && allRepos.length) {
+        var clusterName = ($scope.clusters && $scope.clusters.length > 0) ? $scope.clusters[0].Clusters.cluster_name : null, // only support one cluster at the moment
+          repos = [],
+          processedRepos = 0;
+        if (clusterName) {
+          angular.forEach(allRepos, function (repo) {
+            Cluster.getRepoVersionStatus(clusterName, repo.id).then(function (response) {
+              repo.cluster = (response.status == 'current' || response.status == 'installed') ? clusterName : '';
+              if (!$scope.filter.cluster.current.value || repo.cluster) {
+                repo.status = response.status;
+                repo.totalHosts = response.totalHosts;
+                repo.currentHosts = response.currentHosts;
+                repo.installedHosts = response.installedHosts;
+                repo.stackVersionId = response.stackVersionId;
+                repos.push(repo);
+              }
+              processedRepos++;
+              if (processedRepos === allRepos.length) {
+                var from = ($scope.pagination.currentPage - 1) * $scope.pagination.itemsPerPage;
+                var to = (repos.length - from > $scope.pagination.itemsPerPage) ? from + $scope.pagination.itemsPerPage : repos.length;
+                $scope.repos = repos.slice(from, to);
+                $scope.tableInfo.total = repos.length;
+                $scope.pagination.totalRepos = repos.length;
+                $scope.tableInfo.showed = to - from;
+              }
+            });
           });
-        });
+        }
+      } else {
+        $scope.repos = [];
+        $scope.tableInfo.total = 0;
+        $scope.pagination.totalRepos = 0;
+        $scope.tableInfo.showed = 0;
       }
     };
 
     $scope.fetchRepos = function () {
-      return Stack.allRepos($scope.filter, $scope.pagination).then(function (repos) {
+      return Stack.allRepos($scope.filter).then(function (repos) {
         $scope.isLoading = false;
-        $scope.pagination.totalRepos = repos.itemTotal;
-        $scope.repos = repos.items;
-        $scope.tableInfo.total = repos.itemTotal;
-        $scope.tableInfo.showed = repos.showed;
+        return repos.items;
       });
     };
 
@@ -161,8 +179,8 @@ angular.module('ambariAdminConsole')
         .then(function () {
           return $scope.fetchRepos();
         })
-        .then(function () {
-          $scope.fetchRepoClusterStatus();
+        .then(function (repos) {
+          $scope.fetchRepoClusterStatus(repos);
         });
     };
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d75756ef/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
index 84d7b33..e028906 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Stack.js
@@ -179,11 +179,16 @@ angular.module('ambariAdminConsole')
         // prepare response data with client side pagination
         var response = {};
         response.itemTotal = repos.length;
-        var from = (pagination.currentPage - 1) * pagination.itemsPerPage;
-        var to = (repos.length - from > pagination.itemsPerPage)? from + pagination.itemsPerPage : repos.length;
-        response.items = repos.slice(from, to);
-        response.showed = to - from;
-        deferred.resolve(response)
+        if (pagination) {
+          var from = (pagination.currentPage - 1) * pagination.itemsPerPage;
+          var to = (repos.length - from > pagination.itemsPerPage)? from + pagination.itemsPerPage : repos.length;
+          response.items = repos.slice(from, to);
+          response.showed = to - from;
+        } else {
+          response.items = repos;
+          response.showed = repos.length;
+        }
+        deferred.resolve(response);
       })
       .error(function (data) {
         deferred.reject(data);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d75756ef/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js b/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
index 6f168db..e6f2cc1 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/test/unit/controllers/stackVersions/StackversionsListCtrl_test.js
@@ -18,24 +18,44 @@
 
 describe('#Cluster', function () {
   describe('StackVersionsListCtrl', function() {
-    var scope, ctrl, $httpBackend;
+    var scope, ctrl, Stack;
 
     beforeEach(module('ambariAdminConsole', function($provide) {
 
     }));
 
-    beforeEach(inject(function($rootScope, $controller, _$httpBackend_) {
-      scope = $rootScope.$new();
-      ctrl = $controller('StackVersionsListCtrl', {$scope: scope});
-      $httpBackend = _$httpBackend_;
-    }));
+    beforeEach(function () {
+      module('ambariAdminConsole');
+      inject(function($rootScope, $controller) {
+        scope = $rootScope.$new();
+        ctrl = $controller('StackVersionsListCtrl', {$scope: scope});
+      });
+    });
 
     describe('#fetchRepos()', function () {
 
-      it('saves list of stacks', function() {
-        scope.fetchRepos().then(function() {
-          expect(Array.isArray(scope.repos)).toBe(true);
+      var repos;
+
+      beforeEach(inject(function(_Stack_) {
+        Stack = _Stack_;
+        spyOn(Stack, 'allRepos').andReturn({
+          then: function (callback) {
+            repos = callback({
+              items: [{}, {}]
+            });
+          }
         });
+        repos = [];
+        scope.isLoading = true;
+        scope.fetchRepos();
+      }));
+
+      it('saves list of stacks', function() {
+        expect(repos.length).toEqual(2);
+      });
+
+      it('isLoading should be set to false', function() {
+        expect(scope.isLoading).toBe(false);
       });
 
     });


[32/50] [abbrv] ambari git commit: AMBARI-20045 More than one version of jetty jars found in Ambari rpm (dsen)

Posted by nc...@apache.org.
AMBARI-20045 More than one version of jetty jars found in Ambari rpm (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f1bfb8e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f1bfb8e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f1bfb8e

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 1f1bfb8e7b847300f2ec8b10fc9e2269f042f18b
Parents: 4eaec8e
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Feb 16 19:21:17 2017 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Feb 16 19:21:17 2017 +0200

----------------------------------------------------------------------
 ambari-server/pom.xml | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1f1bfb8e/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 0508556..a3e945c 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -1486,6 +1486,10 @@
           <groupId>javax.servlet</groupId>
           <artifactId>servlet-api</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>


[50/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-12556

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-12556


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/36620ba8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/36620ba8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/36620ba8

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 36620ba891066619b980814ab02a8de3e3abe5a3
Parents: 341cb12 f2cb1b6
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Feb 17 17:05:46 2017 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Feb 17 17:05:46 2017 -0500

----------------------------------------------------------------------
 .../stackVersions/StackVersionsListCtrl.js      |  56 +-
 .../ui/admin-web/app/scripts/services/Stack.js  |  15 +-
 .../stackVersions/StackversionsListCtrl_test.js |  38 +-
 .../src/main/python/ambari_agent/security.py    |  17 +-
 .../test/python/ambari_agent/TestSecurity.py    |   2 +
 .../libraries/functions/constants.py            |   2 +
 .../functions/setup_ranger_plugin_xml.py        |  23 +-
 .../libraries/functions/solr_cloud_util.py      | 110 +++-
 .../org/apache/ambari/logsearch/LogSearch.java  |  51 +-
 .../ambari/logsearch/conf/ApiDocConfig.java     |   1 -
 .../ambari/logsearch/conf/SecurityConfig.java   |   1 -
 .../logsearch/conf/SolrAuditLogPropsConfig.java |   1 -
 .../conf/SolrServiceLogPropsConfig.java         |   3 -
 .../apache/ambari/logsearch/util/WebUtil.java   |  65 ++
 .../LogsearchKRBAuthenticationFilter.java       |   7 +-
 ...LogsearchSecurityContextFormationFilter.java |   1 -
 ...rchUsernamePasswordAuthenticationFilter.java |   2 -
 ambari-server/docs/configuration/index.md       |   2 +-
 ambari-server/pom.xml                           |   4 +
 .../actionmanager/ActionDBAccessorImpl.java     | 108 ++--
 .../server/actionmanager/ActionScheduler.java   |  31 +
 .../ambari/server/actionmanager/Request.java    |   8 +-
 .../ambari/server/actionmanager/Stage.java      |  25 +
 .../ambari/server/checks/CheckDescription.java  |   7 +
 .../checks/DatabaseConsistencyCheckHelper.java  | 320 +++++-----
 .../checks/DatabaseConsistencyCheckResult.java  |  50 ++
 .../checks/DatabaseConsistencyChecker.java      |   6 +-
 .../server/checks/RangerSSLConfigCheck.java     |  81 +++
 .../server/configuration/Configuration.java     |   2 +-
 .../AmbariCustomCommandExecutionHelper.java     |   8 +
 .../ambari/server/controller/AmbariServer.java  |  43 +-
 .../internal/AlertTargetResourceProvider.java   |  11 +-
 .../controller/internal/CalculatedStatus.java   | 390 +++++++++++-
 .../ambari/server/events/TaskCreateEvent.java   |  48 ++
 .../apache/ambari/server/events/TaskEvent.java  |  66 ++
 .../ambari/server/events/TaskUpdateEvent.java   |  35 ++
 .../listeners/tasks/TaskStatusListener.java     | 609 +++++++++++++++++++
 .../events/publishers/TaskEventPublisher.java   |  62 ++
 .../server/orm/dao/HostRoleCommandDAO.java      |  67 +-
 .../ambari/server/orm/dao/RequestDAO.java       |   8 +
 .../apache/ambari/server/orm/dao/StageDAO.java  |  32 +-
 .../orm/entities/HostRoleCommandEntity.java     |   4 +-
 .../server/orm/entities/PermissionEntity.java   |  30 +-
 .../server/orm/entities/RequestEntity.java      |  49 +-
 .../ambari/server/orm/entities/StageEntity.java |  70 ++-
 .../server/orm/entities/StageEntityPK.java      |  12 +
 .../authorization/RoleAuthorization.java        |   1 +
 .../internal/InternalAuthenticationToken.java   |  24 +-
 .../ambari/server/state/ConfigHelper.java       |   2 +
 .../server/state/cluster/ClusterImpl.java       |  45 +-
 .../ambari/server/topology/HostRequest.java     |   2 +-
 .../ambari/server/topology/TopologyManager.java |   6 +
 .../server/upgrade/AbstractUpgradeCatalog.java  |   2 +-
 .../server/upgrade/UpgradeCatalog250.java       |  53 ++
 .../server/upgrade/UpgradeCatalog300.java       |  70 +++
 ambari-server/src/main/python/ambari-server.py  |  13 +-
 .../main/python/ambari_server/serverSetup.py    |  11 +-
 .../src/main/python/ambari_server_main.py       |  24 +-
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |  10 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |  10 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |  10 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |  10 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |  10 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |  10 +-
 .../configuration/infra-solr-security-json.xml  |  82 ++-
 .../0.1.0/package/scripts/params.py             |   9 +-
 .../0.1.0/package/scripts/setup_infra_solr.py   |  17 +-
 .../templates/infra-solr-security.json.j2       |  68 +++
 .../properties/infra-solr-security.json.j2      |  68 ---
 .../0.1.0/package/scripts/ams_service.py        |   4 +
 .../ATLAS/0.1.0.2.3/package/scripts/metadata.py |  20 +
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |   3 +
 .../ATLAS/0.7.0.2.5/kerberos.json               |   3 +
 .../FLUME/1.4.0.2.0/package/scripts/flume.py    |   4 +-
 .../common-services/HDFS/2.1.0.2.0/widgets.json |  12 +-
 .../common-services/HDFS/3.0.0.3.0/widgets.json |  48 +-
 .../package/scripts/hive_server_interactive.py  |  37 +-
 .../hadoop-metrics2-hivemetastore.properties.j2 |   1 +
 .../hadoop-metrics2-hiveserver2.properties.j2   |   1 +
 .../templates/hadoop-metrics2-llapdaemon.j2     |   1 +
 .../hadoop-metrics2-llaptaskscheduler.j2        |   1 +
 .../LOGSEARCH/0.5.0/kerberos.json               |  39 +-
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |   5 +
 .../0.5.0/package/scripts/setup_logsearch.py    |  22 +-
 .../RANGER/0.4.0/package/scripts/params.py      |  21 +
 .../0.4.0/package/scripts/setup_ranger_xml.py   | 108 +++-
 .../0.5.0/configuration/ranger-admin-site.xml   |  12 +
 .../common-services/RANGER/0.6.0/kerberos.json  |   3 +
 .../0.7.0/configuration/ranger-admin-site.xml   |  31 +
 .../0.5.0.2.3/configuration/kms-env.xml         |  10 +
 .../RANGER_KMS/0.5.0.2.3/package/scripts/kms.py |  51 +-
 .../0.5.0.2.3/package/scripts/params.py         |  10 +
 .../SPARK/1.2.1/package/scripts/setup_livy.py   |   2 +-
 .../SPARK2/2.0.0/package/scripts/setup_livy2.py |   2 +-
 .../YARN/2.1.0.2.0/YARN_widgets.json            |  18 +-
 .../YARN/3.0.0.3.0/YARN_widgets.json            |  18 +-
 .../YARN/3.0.0.3.0/service_advisor.py           |  14 +-
 .../scripts/post-user-creation-hook.sh          |   2 +-
 .../HDP/2.0.6/configuration/cluster-env.xml     |  11 +
 .../HDP/2.0.6/properties/stack_features.json    |  10 +
 .../stacks/HDP/2.2/services/stack_advisor.py    |   1 -
 .../stacks/HDP/2.3/services/HDFS/widgets.json   |  48 +-
 .../HDP/2.3/services/YARN/YARN_widgets.json     |  18 +-
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |   7 +
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |   5 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |   2 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |   7 +
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |   5 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |   2 +
 .../HIVE/configuration/llap-cli-log4j2.xml      |  22 +-
 .../stacks/HDP/2.5/services/stack_advisor.py    |  13 +-
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |   7 +
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml |   2 +
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |  16 +-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |   2 +
 .../ATLAS/configuration/atlas-log4j.xml         |  17 +-
 .../stacks/HDP/2.6/services/DRUID/kerberos.json |  12 +-
 .../services/HBASE/configuration/hbase-site.xml |  43 ++
 .../configuration/ranger-kms-site.xml           |  68 +++
 .../stacks/HDP/2.6/services/stack_advisor.py    |  20 +-
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |   2 +
 .../actionmanager/TestActionDBAccessorImpl.java |   3 +-
 .../actionmanager/TestActionScheduler.java      |  71 ++-
 .../alerts/AmbariPerformanceRunnableTest.java   |   7 +-
 .../DatabaseConsistencyCheckHelperTest.java     | 143 ++++-
 .../server/checks/RangerSSLConfigCheckTest.java | 150 +++++
 .../AmbariManagementControllerTest.java         | 121 ++--
 .../internal/UpgradeResourceProviderTest.java   |   1 -
 .../UpgradeSummaryResourceProviderTest.java     |   1 -
 .../listeners/tasks/TaskStatusListenerTest.java | 164 +++++
 .../security/TestAuthenticationFactory.java     |  44 +-
 .../authorization/AuthorizationHelperTest.java  |  24 +-
 .../ambari/server/state/ConfigHelperTest.java   |   2 +
 .../cluster/ClusterEffectiveVersionTest.java    |   5 +-
 .../server/state/cluster/ClusterTest.java       |  78 ++-
 .../services/RetryUpgradeActionServiceTest.java |   1 -
 .../server/topology/TopologyManagerTest.java    |  35 +-
 .../server/upgrade/UpgradeCatalog242Test.java   |  27 +-
 .../server/upgrade/UpgradeCatalog250Test.java   |  71 ++-
 .../server/upgrade/UpgradeCatalog300Test.java   |   7 +
 .../src/test/python/TestAmbariServer.py         |  32 +-
 .../AMBARI_METRICS/test_metrics_collector.py    |   8 +
 .../python/stacks/2.0.6/FLUME/test_flume.py     |   4 +-
 .../stacks/2.3/ATLAS/test_metadata_server.py    |  10 +
 .../test/python/stacks/2.3/configs/secure.json  |   7 +-
 .../stacks/2.4/AMBARI_INFRA/test_infra_solr.py  |   4 +-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |   3 +-
 .../stacks/2.5/RANGER/test_ranger_admin.py      |  27 +-
 .../stacks/2.5/RANGER/test_ranger_usersync.py   |   8 +-
 .../stacks/2.5/RANGER_KMS/test_kms_server.py    |  70 ++-
 .../python/stacks/2.5/SPARK/test_spark_livy.py  |   2 +-
 .../stacks/2.6/RANGER/test_ranger_admin.py      |  49 +-
 .../stacks/2.6/RANGER/test_ranger_tagsync.py    |  19 +-
 .../stacks/2.6/SPARK2/test_spark_livy2.py       |   2 +-
 .../stacks/2.6/common/test_stack_advisor.py     |  69 +++
 .../2.6/configs/ranger-admin-default.json       |   6 +-
 ambari-web/app/assets/test/tests.js             |   1 +
 ambari-web/app/controllers/main.js              |   5 +
 .../controllers/main/service/add_controller.js  |   2 +-
 .../controllers/main/service/info/configs.js    |  37 +-
 .../service/manage_config_groups_controller.js  |  14 +-
 .../app/controllers/wizard/step7_controller.js  |   6 +-
 ambari-web/app/data/dashboard_widgets.js        | 196 ++++++
 .../mappers/alert_definition_summary_mapper.js  |   4 -
 .../app/mappers/alert_definitions_mapper.js     |  15 +-
 ambari-web/app/mappers/alert_groups_mapper.js   |   3 +-
 .../app/mappers/alert_instances_mapper.js       |   2 +-
 .../app/mappers/alert_notification_mapper.js    |   2 +-
 ambari-web/app/mappers/cluster_mapper.js        |   2 +-
 .../app/mappers/components_state_mapper.js      |   6 +-
 .../app/mappers/configs/config_groups_mapper.js |   4 +-
 .../configs/service_config_version_mapper.js    |   3 +-
 ambari-web/app/mappers/configs/themes_mapper.js |  25 +-
 ambari-web/app/mappers/hosts_mapper.js          |  10 +-
 ambari-web/app/mappers/quicklinks_mapper.js     |   3 +-
 .../app/mappers/repository_version_mapper.js    |   9 +-
 ambari-web/app/mappers/root_service_mapper.js   |   5 +-
 ambari-web/app/mappers/server_data_mapper.js    |  40 +-
 ambari-web/app/mappers/service_mapper.js        |   3 +-
 .../app/mappers/service_metrics_mapper.js       |  20 +-
 ambari-web/app/mappers/stack_mapper.js          |   9 +-
 ambari-web/app/mappers/stack_service_mapper.js  |   6 +-
 .../app/mappers/stack_upgrade_history_mapper.js |   3 +-
 ambari-web/app/mappers/stack_version_mapper.js  |   3 +-
 ambari-web/app/mappers/target_cluster_mapper.js |   2 +-
 ambari-web/app/mappers/users_mapper.js          |   2 +-
 ambari-web/app/mappers/widget_mapper.js         |   3 +-
 ambari-web/app/messages.js                      |   2 +-
 .../configs/widgets/unique/num_llap_nodes.js    |   7 +-
 .../app/mixins/common/track_request_mixin.js    |  14 +-
 .../mixins/main/dashboard/widgets/editable.js   |  91 +--
 .../dashboard/widgets/editable_with_limit.js    | 106 +---
 .../widgets/single_numeric_threshold.js         | 127 +---
 .../main/service/configs/config_overridable.js  |   8 +-
 .../app/mixins/main/service/groups_mapping.js   |   1 -
 ambari-web/app/models/user.js                   |   2 +-
 ambari-web/app/routes/main.js                   |   4 +
 ambari-web/app/routes/view.js                   |   7 +
 ambari-web/app/styles/application.less          |   2 +-
 .../common/configs/config_history_flow.hbs      |   6 +-
 .../common/configs/service_config_category.hbs  |   4 +-
 .../main/dashboard/edit_widget_popup.hbs        |  20 +-
 .../edit_widget_popup_single_threshold.hbs      |  12 +-
 ambari-web/app/utils/date/date.js               |  26 +-
 ambari-web/app/utils/http_client.js             |   5 -
 ambari-web/app/views.js                         |   1 +
 .../views/common/configs/config_history_flow.js |  15 +-
 .../app/views/common/configs/controls_view.js   |   2 +-
 .../modal_popups/edit_dashboard_widget_popup.js | 436 +++++++++++++
 .../app/views/main/admin/service_auto_start.js  |   2 +-
 ambari-web/app/views/main/dashboard/widget.js   | 173 ++----
 ambari-web/app/views/main/dashboard/widgets.js  | 266 +-------
 .../main/dashboard/widgets/supervisor_live.js   |   3 +-
 .../views/main/dashboard/widgets/text_widget.js |  23 +-
 .../dashboard/widgets/uptime_text_widget.js     |   9 +-
 .../app/views/main/host/combo_search_box.js     |   1 +
 ambari-web/test/controllers/main/admin_test.js  |   8 +-
 .../test/controllers/main/host/details_test.js  |   4 +-
 .../main/service/add_controller_test.js         |   4 +-
 .../main/service/info/config_test.js            |  71 ++-
 .../test/controllers/main/service/item_test.js  |   2 +-
 .../test/mappers/alert_groups_mapper_test.js    |   4 +-
 ambari-web/test/mappers/service_mapper_test.js  |   2 +-
 .../service/configs/config_overridable_test.js  |  12 +-
 ambari-web/test/models/host_component_test.js   |   2 +-
 .../test/models/host_stack_version_test.js      |  12 +-
 ambari-web/test/models/rack_test.js             |   4 +-
 ambari-web/test/models/stack_service_test.js    |   2 +-
 ambari-web/test/utils/date/date_test.js         |  30 +-
 ambari-web/test/utils/http_client_test.js       |   9 -
 .../edit_dashboard_widget_popup_test.js         | 214 +++++++
 .../stack_upgrade/upgrade_history_view_test.js  |   8 +-
 .../test/views/main/dashboard/widget_test.js    | 112 +---
 .../widgets/hbase_master_uptime_test.js         |   4 +-
 .../dashboard/widgets/namenode_uptime_test.js   |   4 +-
 .../widgets/resource_manager_uptime_test.js     |   4 +-
 .../widgets/uptime_text_widget_test.js          |   4 +-
 .../test/views/main/dashboard/widgets_test.js   |  10 +-
 .../views/main/host/combo_search_box_test.js    |   6 +-
 contrib/views/commons/pom.xml                   |   8 -
 contrib/views/files/pom.xml                     |   8 -
 contrib/views/hive-next/pom.xml                 |  26 -
 .../app/controllers/index/history-query/logs.js |   1 +
 contrib/views/hive20/pom.xml                    |  62 +-
 .../resources/ui/app/components/radio-button.js |   1 -
 .../resources/ui/app/components/upload-table.js |   4 +-
 .../resources/ui/app/locales/en/translations.js |  14 +-
 .../main/resources/ui/app/mixins/ui-logger.js   |  18 +
 .../src/main/resources/ui/app/models/column.js  |   7 +-
 .../databases/database/tables/upload-table.js   |  79 +--
 .../templates/components/csv-format-params.hbs  |  18 +-
 .../templates/databases/database/tables/new.hbs |   2 +-
 .../HiveHistoryMigrationUtility.java            |  26 +-
 ...HiveHistoryQueryMigrationImplementation.java |   2 +-
 .../HiveSavedQueryMigrationImplementation.java  |   4 +-
 .../HiveSavedQueryMigrationUtility.java         |  29 +-
 .../pigjob/PigJobMigrationImplementation.java   |   5 +-
 .../pig/pigjob/PigJobMigrationUtility.java      | 377 ++++++------
 .../PigScriptMigrationImplementation.java       |   5 +-
 .../pigscript/PigScriptMigrationUtility.java    |  27 +-
 .../pigudf/PigUdfMigrationImplementation.java   |   5 +-
 .../pig/pigudf/PigUdfMigrationUtility.java      | 286 ++++-----
 .../scripts/models/MigrationResponse.java       |   6 +
 .../app/models/checkprogress.js                 |   3 +-
 .../app/routes/home-page/hive-history.js        |   7 +-
 .../app/routes/home-page/hive-saved-query.js    |   8 +-
 .../app/routes/home-page/pig-job.js             |   7 +-
 .../app/routes/home-page/pig-script.js          |   8 +-
 .../app/routes/home-page/pig-udf.js             |   8 +-
 .../app/templates/home-page/hive-history.hbs    |   8 +
 .../templates/home-page/hive-saved-query.hbs    |   8 +
 .../app/templates/home-page/pig-job.hbs         |   8 +
 .../app/templates/home-page/pig-script.hbs      |   8 +
 .../app/templates/home-page/pig-udf.hbs         | 187 +++---
 contrib/views/pig/pom.xml                       |   8 -
 contrib/views/pom.xml                           |  11 +
 contrib/views/utils/pom.xml                     |  16 -
 contrib/views/wfmanager/pom.xml                 |   8 -
 .../ui/app/components/bundle-config.js          |   8 +-
 .../resources/ui/app/components/coord-config.js |  37 +-
 .../ui/app/components/decision-add-branch.js    |   5 -
 .../ui/app/components/decision-config.js        |   2 +
 .../ui/app/components/designer-workspace.js     |  17 +-
 .../ui/app/components/flow-designer.js          |  52 +-
 .../resources/ui/app/components/java-action.js  |  21 +-
 .../resources/ui/app/components/job-config.js   |  17 +-
 .../resources/ui/app/components/job-details.js  |  20 +-
 .../main/resources/ui/app/components/job-row.js |  26 +-
 .../ui/app/components/preview-dialog.js         |   4 +
 .../main/resources/ui/app/components/save-wf.js |   2 +-
 .../ui/app/components/search-create-new-bar.js  |  11 +-
 .../resources/ui/app/components/search-table.js |   8 +
 .../resources/ui/app/components/sqoop-action.js |   8 +-
 .../resources/ui/app/components/ssh-action.js   |  29 +-
 .../ui/app/controllers/design/dashboardtab.js   |   8 +-
 .../ui/app/domain/actionjob_hanlder.js          |   4 +-
 .../coordinator/coordinator-xml-generator.js    |  12 +-
 .../coordinator/coordinator-xml-importer.js     |  43 +-
 .../ui/app/domain/cytoscape-flow-renderer.js    |  66 +-
 .../resources/ui/app/domain/cytoscape-style.js  |  30 +-
 .../resources/ui/app/domain/mapping-utils.js    |   2 +-
 .../ui/app/domain/workflow-json-importer.js     |   2 +-
 .../main/resources/ui/app/domain/workflow.js    |  20 +-
 .../src/main/resources/ui/app/routes/design.js  |   6 +-
 .../ui/app/routes/design/dashboardtab.js        |  15 +-
 .../src/main/resources/ui/app/styles/app.less   |  30 +-
 .../templates/components/designer-workspace.hbs |  10 +-
 .../app/templates/components/flow-designer.hbs  |  16 +-
 .../ui/app/templates/components/job-config.hbs  |   4 +-
 .../ui/app/templates/components/job-details.hbs |  46 +-
 .../ui/app/templates/components/job-row.hbs     |  28 +-
 .../app/templates/components/preview-dialog.hbs |   2 +-
 .../ui/app/templates/components/save-wf.hbs     |   6 +-
 .../components/search-create-new-bar.hbs        |   2 +-
 .../app/templates/components/search-table.hbs   |  14 +-
 .../app/templates/components/shell-action.hbs   |  36 +-
 .../app/templates/components/sqoop-action.hbs   |   2 +-
 .../ui/app/templates/components/ssh-action.hbs  |  12 +-
 .../components/workflow-action-editor.hbs       |   2 +-
 .../templates/components/workflow-actions.hbs   |   4 +-
 .../components/workflow-job-details.hbs         |  14 +-
 .../main/resources/ui/app/utils/common-utils.js |   3 +
 .../main/resources/ui/app/utils/constants.js    |   3 +-
 .../app/validators/duplicate-data-node-name.js  |   2 +-
 .../wfmanager/src/main/resources/ui/bower.json  |   3 +-
 .../src/main/resources/ui/ember-cli-build.js    |   1 +
 docs/pom.xml                                    |  34 +-
 327 files changed, 6758 insertions(+), 2924 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
index 3190390,0267a5e..483362b
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
@@@ -19,12 -19,25 +19,26 @@@ package org.apache.ambari.server.upgrad
  
  
  import java.sql.SQLException;
+ import java.util.ArrayList;
+ import java.util.Collection;
+ import java.util.List;
  import java.util.Map;
  
+ import javax.persistence.EntityManager;
+ 
  import org.apache.ambari.server.AmbariException;
+ import org.apache.ambari.server.actionmanager.HostRoleCommand;
+ import org.apache.ambari.server.actionmanager.HostRoleStatus;
+ import org.apache.ambari.server.actionmanager.Stage;
+ import org.apache.ambari.server.actionmanager.StageFactory;
  import org.apache.ambari.server.controller.AmbariManagementController;
 +import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+ import org.apache.ambari.server.controller.internal.CalculatedStatus;
+ import org.apache.ambari.server.orm.DBAccessor;
  import org.apache.ambari.server.orm.dao.DaoUtils;
+ import org.apache.ambari.server.orm.dao.RequestDAO;
+ import org.apache.ambari.server.orm.entities.RequestEntity;
+ import org.apache.ambari.server.orm.entities.StageEntity;
  import org.apache.ambari.server.state.Cluster;
  import org.apache.ambari.server.state.Clusters;
  import org.apache.ambari.server.state.Config;
@@@ -86,7 -103,16 +106,17 @@@ public class UpgradeCatalog300 extends 
     */
    @Override
    protected void executeDDLUpdates() throws AmbariException, SQLException {
 +    addServiceComponentColumn();
+     updateStageTable();
+   }
+ 
+   protected void updateStageTable() throws SQLException {
+     dbAccessor.addColumn(STAGE_TABLE,
+         new DBAccessor.DBColumnInfo(STAGE_STATUS_COLUMN, String.class, 255, HostRoleStatus.PENDING, false));
+     dbAccessor.addColumn(STAGE_TABLE,
+         new DBAccessor.DBColumnInfo(STAGE_DISPLAY_STATUS_COLUMN, String.class, 255, HostRoleStatus.PENDING, false));
+     dbAccessor.addColumn(REQUEST_TABLE,
+         new DBAccessor.DBColumnInfo(REQUEST_DISPLAY_STATUS_COLUMN, String.class, 255, HostRoleStatus.PENDING, false));
    }
  
    /**
@@@ -126,14 -153,43 +157,53 @@@
  
    }
  
 +  /**
 +   * Updates the {@code servicecomponentdesiredstate} table.
 +   *
 +   * @throws SQLException
 +   */
 +  protected void addServiceComponentColumn() throws SQLException {
 +    dbAccessor.addColumn(UpgradeCatalog250.COMPONENT_TABLE,
 +        new DBColumnInfo("repo_state", String.class, 255, RepositoryVersionState.INIT.name(), false));
 +
 +  }
+   protected void setStatusOfStagesAndRequests() {
+     executeInTransaction(new Runnable() {
+       @Override
+       public void run() {
+         try {
+           RequestDAO requestDAO = injector.getInstance(RequestDAO.class);
+           StageFactory stageFactory = injector.getInstance(StageFactory.class);
+           EntityManager em = getEntityManagerProvider().get();
+           List<RequestEntity> requestEntities= requestDAO.findAll();
+           for (RequestEntity requestEntity: requestEntities) {
+             Collection<StageEntity> stageEntities= requestEntity.getStages();
+             List <HostRoleStatus> stageDisplayStatuses = new ArrayList<>();
+             List <HostRoleStatus> stageStatuses = new ArrayList<>();
+             for (StageEntity stageEntity: stageEntities) {
+               Stage stage = stageFactory.createExisting(stageEntity);
+               List<HostRoleCommand> hostRoleCommands = stage.getOrderedHostRoleCommands();
+               Map<HostRoleStatus, Integer> statusCount = CalculatedStatus.calculateStatusCountsForTasks(hostRoleCommands);
+               HostRoleStatus stageDisplayStatus = CalculatedStatus.calculateSummaryDisplayStatus(statusCount, hostRoleCommands.size(), stage.isSkippable());
+               HostRoleStatus stageStatus = CalculatedStatus.calculateStageStatus(hostRoleCommands, statusCount, stage.getSuccessFactors(), stage.isSkippable());
+               stageEntity.setStatus(stageStatus);
+               stageStatuses.add(stageStatus);
+               stageEntity.setDisplayStatus(stageDisplayStatus);
+               stageDisplayStatuses.add(stageDisplayStatus);
+               em.merge(stageEntity);
+             }
+             HostRoleStatus requestStatus = CalculatedStatus.getOverallStatusForRequest(stageStatuses);
+             requestEntity.setStatus(requestStatus);
+             HostRoleStatus requestDisplayStatus = CalculatedStatus.getOverallDisplayStatusForRequest(stageDisplayStatuses);
+             requestEntity.setDisplayStatus(requestDisplayStatus);
+             em.merge(requestEntity);
+           }
+         } catch (Exception e) {
+           LOG.warn("Setting status for stages and Requests threw exception. ", e);
+         }
+       }
+     });
+ 
+   }
+ 
  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/36620ba8/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
index f5e0bfe,ec001ec..3c933d9
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
@@@ -40,7 -42,9 +42,8 @@@ public class UpgradeCatalog300Test 
  
      upgradeCatalog300.addNewConfigurationsFromXml();
      upgradeCatalog300.showHcatDeletedUserMessage();
+     upgradeCatalog300.setStatusOfStagesAndRequests();
  
 -
      replay(upgradeCatalog300);
  
      upgradeCatalog300.executeDMLUpdates();
@@@ -50,16 -54,13 +53,20 @@@
  
    @Test
    public void testExecuteDDLUpdates() throws Exception {
 +
+     Method updateStageTable = UpgradeCatalog300.class.getDeclaredMethod("updateStageTable");
 +    Method addServiceComponentColumn = UpgradeCatalog300.class
 +        .getDeclaredMethod("addServiceComponentColumn");
 +
      UpgradeCatalog300 upgradeCatalog300 = createMockBuilder(UpgradeCatalog300.class)
 +        .addMockedMethod(addServiceComponentColumn)
+         .addMockedMethod(updateStageTable)
          .createMock();
  
 +    upgradeCatalog300.addServiceComponentColumn();
 +
+     upgradeCatalog300.updateStageTable();
+ 
      replay(upgradeCatalog300);
  
      upgradeCatalog300.executeDDLUpdates();


[28/50] [abbrv] ambari git commit: AMBARI-20041. Custom job.properties are not retained in the workflow designer.(Venkata Sairam via gauravn7)

Posted by nc...@apache.org.
AMBARI-20041. Custom job.properties are not retained in the workflow designer.(Venkata Sairam via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ea82a59d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ea82a59d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ea82a59d

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: ea82a59de4b170a10178eb1cc6385a8010e774fc
Parents: fb322e2
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 16 18:33:24 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 16 18:33:54 2017 +0530

----------------------------------------------------------------------
 .../ui/app/components/designer-workspace.js          | 15 ++++++++++++---
 .../main/resources/ui/app/components/job-config.js   | 11 +++++++++--
 .../main/resources/ui/app/components/job-details.js  |  4 +++-
 .../src/main/resources/ui/app/routes/design.js       |  6 +++---
 .../ui/app/templates/components/flow-designer.hbs    |  2 +-
 5 files changed, 28 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ea82a59d/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
index f93e1b8..74de3b7 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/designer-workspace.js
@@ -53,13 +53,13 @@ export default Ember.Component.extend({
         this.get('tabs').forEach((tab)=>{
           this.get('tabCounter').set(tab.type, (this.get('tabCounter').get(tab.type)) + 1);
         }, this);
-        Ember.getOwner(this).lookup('route:design').on('openNewTab', function(path, type){
+        Ember.getOwner(this).lookup('route:design').on('openNewTab', function(path, type, isImportedFromDesigner, configuration){
           if(type === 'COORDINATOR'){
             this.createNewTab('coord', path);
           }else if(type === 'BUNDLE'){
             this.createNewTab('bundle', path);
           }else{
-            this.createNewTab('wf', path);
+            this.createNewTab('wf', path, isImportedFromDesigner, configuration);
           }
         }.bind(this));
 
@@ -103,9 +103,17 @@ export default Ember.Component.extend({
       }.bind(this));
     }, 1000);
   },
-  createNewTab : function(type, path){
+  setWFConfigProperties(tab ,isImportedFromDesigner, configuration){
+    if(isImportedFromDesigner) {
+      tab.isImportedFromDesigner = true;
+      tab.configuration = configuration;
+    }
+    return tab;
+  },
+  createNewTab : function(type, path, isImportedFromDesigner, configuration){
     var existingTab = this.get('tabs').findBy("filePath", path);
     if(existingTab && path){
+      existingTab = this.setWFConfigProperties(existingTab, isImportedFromDesigner, configuration);
       this.$('.nav-tabs a[href="#' + existingTab.id + '"]').tab("show");
       return;
     }
@@ -117,6 +125,7 @@ export default Ember.Component.extend({
     if(path){
       tab.path = path;
     }
+    tab = this.setWFConfigProperties(tab, isImportedFromDesigner, configuration);
     this.$('.nav-tabs li').removeClass('active');
     this.$('.tab-content .tab-pane').removeClass('active');
     this.get('tabs').pushObject(tab);

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea82a59d/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
index f364e30..00dedbb 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-config.js
@@ -112,7 +112,7 @@ export default Ember.Component.extend(Validations, {
 
   extractJobProperties(){
     var jobProperties = [];
-    var jobParams = this.get("jobConfigs").params;
+    var jobParams = this.get("jobConfigs").params, self = this;
     this.get("jobProps").forEach(function(value) {
       if (value!== Constants.defaultNameNodeValue && value!==Constants.rmDefaultValue){
         var propName = value.trim().substring(2, value.length-1);
@@ -125,9 +125,16 @@ export default Ember.Component.extend(Validations, {
             isRequired = true;
           }
         }
+        let val = null, tabData = self.get("tabInfo");
+        if(tabData && tabData.isImportedFromDesigner && tabData.configuration && tabData.configuration.settings && tabData.configuration.settings.configuration && tabData.configuration.settings.configuration.property) {
+          let propVal = tabData.configuration.settings.configuration.property.findBy('name', propName);
+          if(propVal) {
+            val = propVal.value
+          }
+        }
         var prop= Ember.Object.create({
           name: propName,
-          value: null,
+          value: val,
           isRequired : isRequired
         });
         jobProperties.push(prop);

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea82a59d/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
index fe60793..f659317 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/job-details.js
@@ -480,7 +480,9 @@ export default Ember.Component.extend({
         this.sendAction('showCoord', coordId);
       },
       editWorkflow(path){
-        this.sendAction('editWorkflow', path);
+        var x2js = new X2JS();
+        var configurationObj  = x2js.xml_str2json(this.get('model.conf'));
+        this.sendAction('editWorkflow', path, null, true, {"settings":configurationObj});
       }
     }
   });

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea82a59d/contrib/views/wfmanager/src/main/resources/ui/app/routes/design.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/routes/design.js b/contrib/views/wfmanager/src/main/resources/ui/app/routes/design.js
index 25a3266..b5f0fad 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/routes/design.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/routes/design.js
@@ -71,8 +71,8 @@ export default Ember.Route.extend(Ember.Evented, {
       this.set('failedSchemaVersions', true);
       transition.retry();
     },
-    editWorkflow(path, type){
-      this.trigger('openNewTab', path, type);
+    editWorkflow(path, type, isImportedFromDesigner, configuration){
+      this.trigger('openNewTab', path, type, isImportedFromDesigner, configuration);
     },
     showDashboard(){
       this.controller.set('dashboardShown', true);
@@ -90,4 +90,4 @@ export default Ember.Route.extend(Ember.Evented, {
       this.transitionTo('design');
     }
   }
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea82a59d/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index ff92f7d..0356e9c 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -284,7 +284,7 @@
   {{save-wf type='wf' close="closeSaveWorkflow" jobFilePath=workflowFilePath openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=configForSave}}
 {{/if}}
 {{#if showingWorkflowConfigProps}}
-  {{job-config type='wf' closeJobConfigs="closeWorkflowSubmitConfigs" jobFilePath=workflowFilePath openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=workflowSubmitConfigs isDryrun=dryrun}}
+  {{job-config type='wf' closeJobConfigs="closeWorkflowSubmitConfigs" jobFilePath=workflowFilePath tabInfo=tabInfo openFileBrowser="openFileBrowser" closeFileBrowser="closeFileBrowser" jobConfigs=workflowSubmitConfigs isDryrun=dryrun}}
 {{/if}}
 {{#if showGlobalConfig}}
   {{#global-config closeGlobalConfig="closeWorkflowGlobalProps" saveGlobalConfig="saveGlobalConfig" actionModel=globalConfig}}{{/global-config}}


[40/50] [abbrv] ambari git commit: AMBARI-19915 Add Ranger KMS SSL properties in ambari stack (mugdha)

Posted by nc...@apache.org.
AMBARI-19915 Add Ranger KMS SSL properties in ambari stack (mugdha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b5014253
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b5014253
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b5014253

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: b5014253706c85cba77c781672443d94de3e5ef4
Parents: c395f69
Author: Mugdha Varadkar <mu...@apache.org>
Authored: Fri Feb 17 16:13:49 2017 +0530
Committer: Mugdha Varadkar <mu...@apache.org>
Committed: Fri Feb 17 16:26:25 2017 +0530

----------------------------------------------------------------------
 .../libraries/functions/constants.py            |  1 +
 .../0.5.0.2.3/configuration/kms-env.xml         | 10 +++
 .../RANGER_KMS/0.5.0.2.3/package/scripts/kms.py | 22 ++++++-
 .../0.5.0.2.3/package/scripts/params.py         |  8 ++-
 .../HDP/2.0.6/properties/stack_features.json    |  5 ++
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |  7 ++
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |  4 ++
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |  1 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  7 ++
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |  4 ++
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |  1 +
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |  7 ++
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |  4 ++
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |  1 +
 .../configuration/ranger-kms-site.xml           | 68 +++++++++++++++++++
 .../stacks/HDP/2.6/services/stack_advisor.py    | 20 +++++-
 .../stacks/2.5/RANGER_KMS/test_kms_server.py    | 20 ++++++
 .../stacks/2.6/common/test_stack_advisor.py     | 69 ++++++++++++++++++++
 18 files changed, 256 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index c31b883..f553ad2 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -117,3 +117,4 @@ class StackFeature:
   ATLAS_HDFS_SITE_ON_NAMENODE_HA='atlas_hdfs_site_on_namenode_ha'
   HIVE_INTERACTIVE_GA_SUPPORT='hive_interactive_ga'
   SECURE_RANGER_SSL_PASSWORD = "secure_ranger_ssl_password"
+  RANGER_KMS_SSL = "ranger_kms_ssl"

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-env.xml b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-env.xml
index 7dea07f..e5c0673 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-env.xml
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/kms-env.xml
@@ -58,6 +58,16 @@
     <value>9292</value>
     <description/>
     <on-ambari-upgrade add="true"/>
+    <depends-on>
+      <property>
+        <type>ranger-kms-site</type>
+        <name>ranger.service.https.port</name>
+      </property>
+      <property>
+        <type>ranger-kms-site</type>
+        <name>ranger.service.https.attrib.ssl.enabled</name>
+      </property>
+    </depends-on>
   </property>
   <property>
     <name>create_db_user</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
index 536ba76..1afe136 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms.py
@@ -140,6 +140,16 @@ def kms(upgrade_type=None):
       create_parents = True
     )
 
+    Directory("/etc/security/serverKeys",
+      create_parents = True,
+      cd_access = "a"
+    )
+
+    Directory("/etc/ranger/kms",
+      create_parents = True,
+      cd_access = "a"
+    )
+
     copy_jdbc_connector()
 
     File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"),
@@ -270,6 +280,8 @@ def kms(upgrade_type=None):
     do_keystore_setup(params.credential_provider_path, params.masterkey_alias, params.kms_master_key_password)
     if params.stack_support_kms_hsm and params.enable_kms_hsm:
       do_keystore_setup(params.credential_provider_path, params.hms_partition_alias, unicode(params.hms_partition_passwd))
+    if params.stack_supports_ranger_kms_ssl and params.ranger_kms_ssl_enabled:
+      do_keystore_setup(params.ranger_kms_cred_ssl_path, params.ranger_kms_ssl_keystore_alias, params.ranger_kms_ssl_passwd)
 
     # remove plain-text password from xml configs
     dbks_site_copy = {}
@@ -288,9 +300,17 @@ def kms(upgrade_type=None):
       mode=0644
     )
 
+    ranger_kms_site_copy = {}
+    ranger_kms_site_copy.update(params.config['configurations']['ranger-kms-site'])
+    if params.stack_supports_ranger_kms_ssl:
+      # remove plain-text password from xml configs
+      for prop in params.ranger_kms_site_password_properties:
+        if prop in ranger_kms_site_copy:
+          ranger_kms_site_copy[prop] = "_"
+
     XmlConfig("ranger-kms-site.xml",
       conf_dir=params.kms_conf_dir,
-      configurations=params.config['configurations']['ranger-kms-site'],
+      configurations=ranger_kms_site_copy,
       configuration_attributes=params.config['configuration_attributes']['ranger-kms-site'],
       owner=params.kms_user,
       group=params.kms_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index 8473160..dc830d5 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -48,6 +48,7 @@ stack_support_kms_hsm = check_stack_feature(StackFeature.RANGER_KMS_HSM_SUPPORT,
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_pid = check_stack_feature(StackFeature.RANGER_KMS_PID_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
+stack_supports_ranger_kms_ssl = check_stack_feature(StackFeature.RANGER_KMS_SSL, version_for_stack_feature_checks)
 
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 security_enabled = config['configurations']['cluster-env']['security_enabled']
@@ -279,4 +280,9 @@ if security_enabled:
 
 plugin_audit_password_property = 'xasecure.audit.destination.db.password'
 kms_plugin_password_properties = ['xasecure.policymgr.clientssl.keystore.password', 'xasecure.policymgr.clientssl.truststore.password']
-dbks_site_password_properties = ['ranger.db.encrypt.key.password', 'ranger.ks.jpa.jdbc.password', 'ranger.ks.hsm.partition.password']
\ No newline at end of file
+dbks_site_password_properties = ['ranger.db.encrypt.key.password', 'ranger.ks.jpa.jdbc.password', 'ranger.ks.hsm.partition.password']
+ranger_kms_site_password_properties = ['ranger.service.https.attrib.keystore.pass']
+ranger_kms_cred_ssl_path = config['configurations']['ranger-kms-site']['ranger.credential.provider.path']
+ranger_kms_ssl_keystore_alias = config['configurations']['ranger-kms-site']['ranger.service.https.attrib.keystore.credential.alias']
+ranger_kms_ssl_passwd = config['configurations']['ranger-kms-site']['ranger.service.https.attrib.keystore.pass']
+ranger_kms_ssl_enabled = config['configurations']['ranger-kms-site']['ranger.service.https.attrib.ssl.enabled']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
index 5e173b7..bc2e7a7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json
@@ -387,6 +387,11 @@
       "name": "secure_ranger_ssl_password",
       "description": "Securing Ranger Admin and Usersync SSL and Trustore related passwords in jceks",
       "min_version": "2.6.0.0"
+    },
+    {
+      "name": "ranger_kms_ssl",
+      "description": "Ranger KMS SSL properties in ambari stack",
+      "min_version": "2.6.0.0"
     }
   ]
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index 478f9b4..ba155ab 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -338,6 +338,13 @@
             <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB"/>
             <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl">
+            <type>ranger-kms-site</type>
+            <transfer operation="delete" delete-key="ranger.https.attrib.keystore.file"
+              if-type="ranger-kms-site" if-key="ranger.service.https.attrib.keystore.file" if-key-state="present"/>
+            <transfer operation="delete" delete-key="ranger.service.https.attrib.clientAuth"
+              if-type="ranger-kms-site" if-key="ranger.service.https.attrib.client.auth" if-key-state="present"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index d5ddeeb..e6a3c94 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -556,6 +556,10 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger KMS">
+        <task xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl"/>
+      </execute-stage>
+
       <!-- KNOX -->
       <execute-stage service="KNOX" component="KNOX_GATEWAY" title="Apply config changes for Knox Gateway">
         <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_knox_audit_db"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 88486e6..7d8438f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -654,6 +654,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db" />
           <task xsi:type="configure" id="kms_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl" />
           <task xsi:type="execute" hosts="any" sequential="true">
             <summary>Upgrading Ranger KMS database schema</summary>
             <script>scripts/kms_server.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 18f5fa1..d19bb76 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -229,6 +229,13 @@
             <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB"/>
             <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}"/>
           </definition>
+          <definition xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl">
+            <type>ranger-kms-site</type>
+            <transfer operation="delete" delete-key="ranger.https.attrib.keystore.file"
+              if-type="ranger-kms-site" if-key="ranger.service.https.attrib.keystore.file" if-key-state="present"/>
+            <transfer operation="delete" delete-key="ranger.service.https.attrib.clientAuth"
+              if-type="ranger-kms-site" if-key="ranger.service.https.attrib.client.auth" if-key-state="present"/>
+          </definition>
         </changes>
       </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index 94b19c6..69a894c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -563,6 +563,10 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger KMS">
+        <task xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl"/>
+      </execute-stage>
+
       <!--ATLAS-->
       <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Parameterizing Atlas Log4J Properties">
         <task xsi:type="configure" id="atlas_log4j_parameterize">

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index 626bc63..0c9a8ea 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -643,6 +643,7 @@
         <pre-upgrade>
           <task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_kms_audit_db" />
           <task xsi:type="configure" id="kms_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl" />
           <task xsi:type="execute" hosts="any" sequential="true">
             <summary>Upgrading Ranger KMS database schema</summary>
             <script>scripts/kms_server.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index 15837df..7236186 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -270,6 +270,13 @@
         <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.kms-audit.MaxFileSize = {{ranger_kms_audit_log_maxfilesize}}MB"/>
         <replace key="content" find="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.kms-audit=org.apache.log4j.DailyRollingFileAppender&#xA;log4j.appender.kms-audit.MaxBackupIndex = {{ranger_kms_audit_log_maxbackupindex}}"/>
       </definition>
+      <definition xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl">
+        <type>ranger-kms-site</type>
+        <transfer operation="delete" delete-key="ranger.https.attrib.keystore.file"
+          if-type="ranger-kms-site" if-key="ranger.service.https.attrib.keystore.file" if-key-state="present"/>
+        <transfer operation="delete" delete-key="ranger.service.https.attrib.clientAuth"
+          if-type="ranger-kms-site" if-key="ranger.service.https.attrib.client.auth" if-key-state="present"/>
+      </definition>
     </changes>
     </component>
     </service>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 62991e6..4fb68ed 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -402,6 +402,10 @@
         </task>
       </execute-stage>
 
+      <execute-stage service="RANGER_KMS" component="RANGER_KMS_SERVER" title="Apply config changes for Ranger KMS">
+        <task xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl"/>
+      </execute-stage>
+
       <!--ATLAS-->
       <execute-stage service="ATLAS" component="ATLAS_SERVER" title="Parameterizing Atlas Log4J Properties">
         <task xsi:type="configure" id="atlas_log4j_parameterize">

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 818a6c0..0a02734 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -553,6 +553,7 @@
       <component name="RANGER_KMS_SERVER">
         <pre-upgrade>
           <task xsi:type="configure" id="kms_log4j_parameterize" />
+          <task xsi:type="configure" id="hdp_2_6_0_0_remove_ranger_kms_duplicate_ssl" />
           <task xsi:type="execute" hosts="any" sequential="true">
             <summary>Upgrading Ranger KMS database schema</summary>
             <script>scripts/kms_server.py</script>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-site.xml
new file mode 100644
index 0000000..cd63e6e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/RANGER_KMS/configuration/ranger-kms-site.xml
@@ -0,0 +1,68 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+  <property>
+    <name>ranger.service.https.attrib.keystore.file</name>
+    <value>/etc/security/serverKeys/ranger-kms-keystore.jks</value>
+    <on-ambari-upgrade add="false"/>
+    <description/>
+  </property>
+  <property>
+    <name>ranger.service.https.attrib.client.auth</name>
+    <value>want</value>
+    <on-ambari-upgrade add="false"/>
+    <description/>
+  </property>
+  <property>
+    <name>ranger.service.https.attrib.keystore.keyalias</name>
+    <value>rangerkms</value>
+    <on-ambari-upgrade add="false"/>
+    <description/>
+  </property>
+  <property>
+    <name>ranger.service.https.attrib.keystore.pass</name>
+    <value>rangerkms</value>
+    <property-type>PASSWORD</property-type>
+    <value-attributes>
+      <type>password</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+    <description/>
+  </property>
+  <property>
+    <name>ranger.credential.provider.path</name>
+    <value>/etc/ranger/kms/rangerkms.jceks</value>
+    <on-ambari-upgrade add="false"/>
+    <description/>
+  </property>
+  <property>
+    <name>ranger.service.https.attrib.keystore.credential.alias</name>
+    <value>keyStoreCredentialAlias</value>
+    <on-ambari-upgrade add="false"/>
+    <description/>
+  </property>
+  <property>
+    <name>ajp.enabled</name>
+    <value>false</value>
+    <on-ambari-upgrade add="false"/>
+    <description/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
index 969c3dd..d8413b6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/stack_advisor.py
@@ -33,7 +33,8 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
           "DRUID": self.recommendDruidConfigurations,
           "ATLAS": self.recommendAtlasConfigurations,
           "TEZ": self.recommendTezConfigurations,
-          "RANGER": self.recommendRangerConfigurations
+          "RANGER": self.recommendRangerConfigurations,
+          "RANGER_KMS": self.recommendRangerKMSConfigurations
       }
       parentRecommendConfDict.update(childRecommendConfDict)
       return parentRecommendConfDict
@@ -301,3 +302,20 @@ class HDP26StackAdvisor(HDP25StackAdvisor):
                             "Need to set ranger.usersync.group.searchenabled as true, as ranger.usersync.ldap.deltasync is enabled")})
 
     return self.toConfigurationValidationProblems(validationItems, "ranger-ugsync-site")
+
+  def recommendRangerKMSConfigurations(self, configurations, clusterData, services, hosts):
+    super(HDP26StackAdvisor, self).recommendRangerKMSConfigurations(configurations, clusterData, services, hosts)
+    putRangerKmsEnvProperty = self.putProperty(configurations, "kms-env", services)
+
+    ranger_kms_ssl_enabled = False
+    ranger_kms_ssl_port = "9393"
+    if 'ranger-kms-site' in services['configurations'] and 'ranger.service.https.attrib.ssl.enabled' in services['configurations']['ranger-kms-site']['properties']:
+      ranger_kms_ssl_enabled = services['configurations']['ranger-kms-site']['properties']['ranger.service.https.attrib.ssl.enabled'].lower() == "true"
+
+    if 'ranger-kms-site' in services['configurations'] and 'ranger.service.https.port' in services['configurations']['ranger-kms-site']['properties']:
+      ranger_kms_ssl_port = services['configurations']['ranger-kms-site']['properties']['ranger.service.https.port']
+
+    if ranger_kms_ssl_enabled:
+      putRangerKmsEnvProperty("kms_port", ranger_kms_ssl_port)
+    else:
+      putRangerKmsEnvProperty("kms_port", "9292")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
index c2fc270..6f41b6d 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
@@ -208,6 +208,16 @@ class TestRangerKMS(RMFTestCase):
       create_parents = True
     )
 
+    self.assertResourceCalled('Directory', '/etc/security/serverKeys',
+      create_parents = True,
+      cd_access = "a",
+    )
+
+    self.assertResourceCalled('Directory', '/etc/ranger/kms',
+      create_parents = True,
+      cd_access = "a",
+    )
+
     self.assertResourceCalled('File', '/usr/hdp/current/ranger-kms/ews/webapp/lib/mysql-connector-java-old.jar',
         action = ['delete'],
     )
@@ -559,6 +569,16 @@ class TestRangerKMS(RMFTestCase):
       create_parents = True
     )
 
+    self.assertResourceCalled('Directory', '/etc/security/serverKeys',
+      create_parents = True,
+      cd_access = "a",
+    )
+
+    self.assertResourceCalled('Directory', '/etc/ranger/kms',
+      create_parents = True,
+      cd_access = "a",
+    )
+
     self.assertResourceCalled('File', '/usr/hdp/current/ranger-kms/ews/webapp/lib/mysql-connector-java-old.jar',
         action = ['delete'],
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5014253/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
index b7f8cbb..c15eaf1 100644
--- a/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.6/common/test_stack_advisor.py
@@ -780,6 +780,75 @@ class TestHDP26StackAdvisor(TestCase):
     self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData, services, None)
     self.assertEquals(recommendedConfigurations, expected)
 
+  def test_recommendRangerKMSConfigurations(self):
+    clusterData = {}
+    services = {
+      "ambari-server-properties": {
+        "ambari-server.user": "root"
+        },
+      "Versions": {
+        "stack_version" : "2.6",
+        },
+      "services": [
+        {
+          "StackServices": {
+            "service_name": "RANGER_KMS",
+            "service_version": "0.7.0.2.6"
+          },
+          "components": [
+            {
+              "StackServiceComponents": {
+                "component_name": "RANGER_KMS_SERVER",
+                "hostnames": ["host1"]
+              }
+            }
+          ]
+        }
+      ],
+      "configurations": {
+        'ranger-kms-site': {
+          'properties': {
+            "ranger.service.https.attrib.ssl.enabled": "true",
+            "ranger.service.https.port": "9393"
+          }
+        }
+      }
+    }
+
+    expected = {
+      'kms-site': {
+        'properties': {},
+        'property_attributes': {
+          'hadoop.kms.proxyuser.HTTP.users': {'delete': 'true'},
+          'hadoop.kms.proxyuser.root.hosts': {'delete': 'true'},
+          'hadoop.kms.proxyuser.root.users': {'delete': 'true'},
+          'hadoop.kms.proxyuser.HTTP.hosts': {'delete': 'true'}
+        }
+      },
+      'core-site': {
+        'properties': {}
+      },
+      'kms-properties': {
+        'properties': {}
+      },
+      'ranger-kms-audit': {
+        'properties': {}
+      },
+      'kms-env': {
+        'properties': {
+          'kms_port': '9393'
+        }
+      },
+      'dbks-site': {
+        'properties': {}
+      }
+    }
+
+    recommendedConfigurations = {}
+
+    self.stackAdvisor.recommendRangerKMSConfigurations(recommendedConfigurations, clusterData, services, None)
+    self.assertEquals(recommendedConfigurations, expected)
+
 def load_json(self, filename):
   file = os.path.join(self.testDirectory, filename)
   with open(file, 'rb') as f:


[26/50] [abbrv] ambari git commit: AMBARI-19957. Implement new DB checks for Postgres to prevent cross-schema confusion. (Balazs Bence Sari via stoader)

Posted by nc...@apache.org.
AMBARI-19957. Implement new DB checks for Postgres to prevent cross-schema confusion. (Balazs Bence Sari via stoader)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fa527360
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fa527360
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fa527360

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: fa527360b28e956e3e931449e8740ed07ed407ed
Parents: 8448d5a
Author: Balazs Bence Sari <bs...@hortonworks.com>
Authored: Thu Feb 16 13:08:46 2017 +0100
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Thu Feb 16 13:08:46 2017 +0100

----------------------------------------------------------------------
 .../checks/DatabaseConsistencyCheckHelper.java  | 303 ++++++++++---------
 .../checks/DatabaseConsistencyCheckResult.java  |  50 +++
 .../checks/DatabaseConsistencyChecker.java      |   6 +-
 .../ambari/server/controller/AmbariServer.java  |  43 +--
 .../src/main/python/ambari_server_main.py       |  19 +-
 .../DatabaseConsistencyCheckHelperTest.java     | 143 ++++++++-
 6 files changed, 379 insertions(+), 185 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fa527360/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
index 7aa8652..926ec65 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java
@@ -20,7 +20,6 @@ package org.apache.ambari.server.checks;
 import java.io.File;
 import java.io.IOException;
 import java.sql.Connection;
-import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -31,9 +30,11 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Scanner;
 import java.util.Set;
 
+import javax.annotation.Nullable;
 import javax.inject.Provider;
 import javax.persistence.EntityManager;
 import javax.persistence.TypedQuery;
@@ -59,7 +60,9 @@ import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Splitter;
 import com.google.common.collect.HashMultimap;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Multimap;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
@@ -77,21 +80,54 @@ public class DatabaseConsistencyCheckHelper {
   private static AmbariMetaInfo ambariMetaInfo;
   private static DBAccessor dbAccessor;
 
+  private static DatabaseConsistencyCheckResult checkResult = DatabaseConsistencyCheckResult.DB_CHECK_SUCCESS;
 
-  private static boolean errorsFound = false;
-  private static boolean warningsFound = false;
+  /**
+   * @return The result of the DB cheks run so far.
+   */
+  public static DatabaseConsistencyCheckResult getLastCheckResult() {
+    return checkResult;
+  }
 
-  public static boolean ifErrorsFound() {
-    return errorsFound;
+  /**
+   * Reset check result to {@link DatabaseConsistencyCheckResult#DB_CHECK_SUCCESS}.
+   */
+  public static void resetCheckResult() {
+    checkResult = DatabaseConsistencyCheckResult.DB_CHECK_SUCCESS;
   }
 
-  public static boolean ifWarningsFound() {
-    return warningsFound;
+  /**
+   * Called internally to set the result of the DB checks. The new result is only recorded if it is more severe than
+   * the existing result.
+   *
+    * @param newResult the new result
+   */
+  private static void setCheckResult(DatabaseConsistencyCheckResult newResult) {
+    if (newResult.ordinal() > checkResult.ordinal()) {
+      checkResult = newResult;
+    }
   }
 
-  public static void resetErrorWarningFlags() {
-    errorsFound = false;
-    warningsFound = false;
+  /**
+   * Called to indicate a warning during checks
+   *
+   * @param messageTemplate Message template (log4j format)
+   * @param messageParams Message params
+   */
+  private static void warning(String messageTemplate, Object... messageParams) {
+    LOG.warn(messageTemplate, messageParams);
+    setCheckResult(DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+  }
+
+  /**
+   * Called to indicate an error during checks
+   *
+   * @param messageTemplate Message template (log4j format)
+   * @param messageParams Message params
+   */
+  private static void error(String messageTemplate, Object... messageParams) {
+    LOG.error(messageTemplate, messageParams);
+    setCheckResult(DatabaseConsistencyCheckResult.DB_CHECK_ERROR);
   }
 
   public static void setInjector(Injector injector) {
@@ -120,25 +156,30 @@ public class DatabaseConsistencyCheckHelper {
       LOG.error("Exception occurred during connection close procedure: ", e);
     }
   }
-
-
-  public static void fixDatabaseConsistency() {
-    fixHostComponentStatesCountEqualsHostComponentsDesiredStates();
-    fixClusterConfigsNotMappedToAnyService();
-  }
-
-  public static void runAllDBChecks() {
+  
+  public static DatabaseConsistencyCheckResult runAllDBChecks(boolean fixIssues) throws Throwable {
     LOG.info("******************************* Check database started *******************************");
-    checkSchemaName();
-    checkMySQLEngine();
-    checkForConfigsNotMappedToService();
-    checkForNotMappedConfigsToCluster();
-    checkForConfigsSelectedMoreThanOnce();
-    checkForHostsWithoutState();
-    checkHostComponentStates();
-    checkServiceConfigs();
-    checkTopologyTables();
-    LOG.info("******************************* Check database completed *******************************");
+    try {
+      if (fixIssues) {
+        fixHostComponentStatesCountEqualsHostComponentsDesiredStates();
+        fixClusterConfigsNotMappedToAnyService();
+      }
+      checkSchemaName();
+      checkMySQLEngine();
+      checkForConfigsNotMappedToService();
+      checkForNotMappedConfigsToCluster();
+      checkForConfigsSelectedMoreThanOnce();
+      checkForHostsWithoutState();
+      checkHostComponentStates();
+      checkServiceConfigs();
+      checkTopologyTables();
+      LOG.info("******************************* Check database completed *******************************");
+      return checkResult;
+    }
+    catch (Throwable ex) {
+      LOG.error("An error occurred during database consistency check.", ex);
+      throw ex;
+    }
   }
 
   public static void checkDBVersionCompatible() throws AmbariException {
@@ -180,7 +221,7 @@ public class DatabaseConsistencyCheckHelper {
     LOG.info("DB store version is compatible");
   }
 
-  public static void checkForNotMappedConfigsToCluster() {
+  static void checkForNotMappedConfigsToCluster() {
     LOG.info("Checking for configs not mapped to any cluster");
 
     String GET_NOT_MAPPED_CONFIGS_QUERY = "select type_name from clusterconfig where type_name not in (select type_name from clusterconfigmapping)";
@@ -188,12 +229,7 @@ public class DatabaseConsistencyCheckHelper {
     ResultSet rs = null;
     Statement statement = null;
 
-    if (connection == null) {
-      if (dbAccessor == null) {
-        dbAccessor = injector.getInstance(DBAccessor.class);
-      }
-      connection = dbAccessor.getConnection();
-    }
+    ensureConnection();
 
     try {
       statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);
@@ -204,8 +240,8 @@ public class DatabaseConsistencyCheckHelper {
         }
       }
       if (!nonSelectedConfigs.isEmpty()) {
-        LOG.warn("You have config(s): {} that is(are) not mapped (in clusterconfigmapping table) to any cluster!", StringUtils.join(nonSelectedConfigs, ","));
-        warningsFound = true;
+        warning("You have config(s): {} that is(are) not mapped (in clusterconfigmapping table) to any cluster!",
+            nonSelectedConfigs);
       }
     } catch (SQLException e) {
       LOG.error("Exception occurred during check for not mapped configs to cluster procedure: ", e);
@@ -234,7 +270,7 @@ public class DatabaseConsistencyCheckHelper {
   * it means that this version of config is actual. So, if any config type has more
   * than one selected version it's a bug and we are showing error message for user.
   * */
-  public static void checkForConfigsSelectedMoreThanOnce() {
+  static void checkForConfigsSelectedMoreThanOnce() {
     LOG.info("Checking for configs selected more than once");
 
     String GET_CONFIGS_SELECTED_MORE_THAN_ONCE_QUERY = "select c.cluster_name, ccm.type_name from clusterconfigmapping ccm " +
@@ -245,12 +281,7 @@ public class DatabaseConsistencyCheckHelper {
     ResultSet rs = null;
     Statement statement = null;
 
-    if (connection == null) {
-      if (dbAccessor == null) {
-        dbAccessor = injector.getInstance(DBAccessor.class);
-      }
-      connection = dbAccessor.getConnection();
-    }
+    ensureConnection();
 
     try {
       statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);
@@ -261,9 +292,8 @@ public class DatabaseConsistencyCheckHelper {
         }
 
         for (String clusterName : clusterConfigTypeMap.keySet()) {
-          LOG.error("You have config(s), in cluster {}, that is(are) selected more than once in clusterconfigmapping table: {}",
+          error("You have config(s), in cluster {}, that is(are) selected more than once in clusterconfigmapping table: {}",
                   clusterName ,StringUtils.join(clusterConfigTypeMap.get(clusterName), ","));
-          errorsFound = true;
         }
       }
 
@@ -293,7 +323,7 @@ public class DatabaseConsistencyCheckHelper {
   * has related host state info in hoststate table.
   * If not then we are showing error.
   * */
-  public static void checkForHostsWithoutState() {
+  static void checkForHostsWithoutState() {
     LOG.info("Checking for hosts without state");
 
     String GET_HOSTS_WITHOUT_STATUS_QUERY = "select host_name from hosts where host_id not in (select host_id from hoststate)";
@@ -301,12 +331,7 @@ public class DatabaseConsistencyCheckHelper {
     ResultSet rs = null;
     Statement statement = null;
 
-    if (connection == null) {
-      if (dbAccessor == null) {
-        dbAccessor = injector.getInstance(DBAccessor.class);
-      }
-      connection = dbAccessor.getConnection();
-    }
+    ensureConnection();
 
     try {
       statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);
@@ -317,8 +342,7 @@ public class DatabaseConsistencyCheckHelper {
         }
 
         if (!hostsWithoutStatus.isEmpty()) {
-          LOG.error("You have host(s) without state (in hoststate table): " + StringUtils.join(hostsWithoutStatus, ","));
-          errorsFound = true;
+          error("You have host(s) without state (in hoststate table): " + StringUtils.join(hostsWithoutStatus, ","));
         }
       }
 
@@ -348,7 +372,7 @@ public class DatabaseConsistencyCheckHelper {
    * This method checks that for each row in topology_request there is at least one row in topology_logical_request,
    * topology_host_request, topology_host_task, topology_logical_task.
    * */
-  public static void checkTopologyTables() {
+  static void checkTopologyTables() {
     LOG.info("Checking Topology tables");
 
     String SELECT_REQUEST_COUNT_QUERY = "select count(tpr.id) from topology_request tpr";
@@ -389,10 +413,9 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       if (topologyRequestCount != topologyRequestTablesJoinedCount) {
-        LOG.error("Your topology request hierarchy is not complete for each row in topology_request should exist " +
+        error("Your topology request hierarchy is not complete for each row in topology_request should exist " +
           "at least one raw in topology_logical_request, topology_host_request, topology_host_task, " +
           "topology_logical_task.");
-        errorsFound = true;
       }
 
 
@@ -429,7 +452,7 @@ public class DatabaseConsistencyCheckHelper {
   * One more, we are checking if all components has only one host
   * component state. If some component has more, it can cause issues
   * */
-  public static void checkHostComponentStates() {
+  static void checkHostComponentStates() {
     LOG.info("Checking host component states count equals host component desired states count");
 
     String GET_HOST_COMPONENT_STATE_COUNT_QUERY = "select count(*) from hostcomponentstate";
@@ -444,12 +467,7 @@ public class DatabaseConsistencyCheckHelper {
     ResultSet rs = null;
     Statement statement = null;
 
-    if (connection == null) {
-      if (dbAccessor == null) {
-        dbAccessor = injector.getInstance(DBAccessor.class);
-      }
-      connection = dbAccessor.getConnection();
-    }
+    ensureConnection();
 
     try {
       statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE);
@@ -476,8 +494,7 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       if (hostComponentStateCount != hostComponentDesiredStateCount || hostComponentStateCount != mergedCount) {
-        LOG.error("Your host component states (hostcomponentstate table) count not equals host component desired states (hostcomponentdesiredstate table) count!");
-        errorsFound = true;
+        error("Your host component states (hostcomponentstate table) count not equals host component desired states (hostcomponentdesiredstate table) count!");
       }
 
 
@@ -489,8 +506,7 @@ public class DatabaseConsistencyCheckHelper {
       }
 
       for (Map.Entry<String, String> component : hostComponentStateDuplicates.entrySet()) {
-        LOG.error("Component {} on host with id {}, has more than one host component state (hostcomponentstate table)!", component.getKey(), component.getValue());
-        errorsFound = true;
+        error("Component {} on host with id {}, has more than one host component state (hostcomponentstate table)!", component.getKey(), component.getValue());
       }
 
     } catch (SQLException e) {
@@ -519,7 +535,7 @@ public class DatabaseConsistencyCheckHelper {
   * Remove configs that are not mapped to any service.
   */
   @Transactional
-  public static void fixClusterConfigsNotMappedToAnyService() {
+  static void fixClusterConfigsNotMappedToAnyService() {
     LOG.info("Checking for configs not mapped to any Service");
     ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
     List<ClusterConfigEntity> notMappedClusterConfigs = getNotMappedClusterConfigsToService();
@@ -553,7 +569,7 @@ public class DatabaseConsistencyCheckHelper {
   /**
    * Look for configs that are not mapped to any service.
    */
-  public static void checkForConfigsNotMappedToService() {
+  static void checkForConfigsNotMappedToService() {
     LOG.info("Checking for configs that are not mapped to any service");
     List<ClusterConfigEntity> notMappedClasterConfigs = getNotMappedClusterConfigsToService();
 
@@ -562,8 +578,7 @@ public class DatabaseConsistencyCheckHelper {
       nonMappedConfigs.add(clusterConfigEntity.getType() + '-' + clusterConfigEntity.getTag());
     }
     if (!notMappedClasterConfigs.isEmpty()){
-      LOG.warn("You have config(s): {} that is(are) not mapped (in serviceconfigmapping table) to any service!", StringUtils.join(nonMappedConfigs, ","));
-      warningsFound = true;
+      warning("You have config(s): {} that is(are) not mapped (in serviceconfigmapping table) to any service!", StringUtils.join(nonMappedConfigs, ","));
     }
   }
 
@@ -574,7 +589,7 @@ public class DatabaseConsistencyCheckHelper {
   * adding missed host components.
   */
   @Transactional
-  public static void fixHostComponentStatesCountEqualsHostComponentsDesiredStates() {
+  static void fixHostComponentStatesCountEqualsHostComponentsDesiredStates() {
     LOG.info("Checking that there are the same number of actual and desired host components");
 
     HostComponentStateDAO hostComponentStateDAO = injector.getInstance(HostComponentStateDAO.class);
@@ -636,69 +651,80 @@ public class DatabaseConsistencyCheckHelper {
   }
 
   /**
-  * This method checks db schema name for Postgres.
-  * */
-  public static void checkSchemaName () {
+   * This makes the following checks for Postgres:
+   * <ol>
+   *   <li>Check if the connection's schema (first item on search path) is the one set in ambari.properties</li>
+   *   <li>Check if the connection's schema is present in the DB</li>
+   *   <li>Check if the ambari tables exist in the schema configured in ambari.properties</li>
+   *   <li>Check if ambari tables don't exist in other shemas</li>
+   * </ol>
+   * The purpose of these checks is to avoid that tables and constraints in ambari's schema get confused with tables
+   * and constraints in other schemas on the DB user's search path. This can happen after an improperly made DB restore
+   * operation and can cause issues during upgrade.
+  **/
+  static void checkSchemaName () {
     Configuration conf = injector.getInstance(Configuration.class);
-    if(conf.getDatabaseType()!=Configuration.DatabaseType.POSTGRES) {
-      return;
-    }
-    LOG.info("Ensuring that the schema set for Postgres is correct");
-    if (connection == null) {
-      if (dbAccessor == null) {
-        dbAccessor = injector.getInstance(DBAccessor.class);
+    if(conf.getDatabaseType() == Configuration.DatabaseType.POSTGRES) {
+      LOG.info("Ensuring that the schema set for Postgres is correct");
+
+      ensureConnection();
+
+      try (ResultSet schemaRs = connection.getMetaData().getSchemas();
+           ResultSet searchPathRs = connection.createStatement().executeQuery("show search_path");
+           ResultSet ambariTablesRs = connection.createStatement().executeQuery(
+               "select table_schema from information_schema.tables where table_name = 'hostcomponentdesiredstate'")) {
+        // Check if ambari's schema exists
+        final boolean ambariSchemaExists = getResultSetColumn(schemaRs, "TABLE_SCHEM").contains(conf.getDatabaseSchema());
+        if ( !ambariSchemaExists ) {
+          warning("The schema [{}] defined for Ambari from ambari.properties has not been found in the database. " +
+              "Storing Ambari tables under a different schema can lead to problems.", conf.getDatabaseSchema());
+        }
+        // Check if the right schema is first on the search path
+        List<Object> searchPathResultColumn = getResultSetColumn(searchPathRs, "search_path");
+        List<String> searchPath = searchPathResultColumn.isEmpty() ? ImmutableList.<String>of() :
+            ImmutableList.copyOf(Splitter.on(",").trimResults().split(String.valueOf(searchPathResultColumn.get(0))));
+        String firstSearchPathItem = searchPath.isEmpty() ? null : searchPath.get(0);
+        if (!Objects.equals(firstSearchPathItem, conf.getDatabaseSchema())) {
+          warning("The schema [{}] defined for Ambari in ambari.properties is not first on the search path:" +
+              " {}. This can lead to problems.", conf.getDatabaseSchema(), searchPath);
+        }
+        // Check schemas with Ambari tables.
+        ArrayList<Object> schemasWithAmbariTables = getResultSetColumn(ambariTablesRs, "table_schema");
+        if ( ambariSchemaExists && !schemasWithAmbariTables.contains(conf.getDatabaseSchema()) ) {
+          warning("The schema [{}] defined for Ambari from ambari.properties does not contain the Ambari tables. " +
+              "Storing Ambari tables under a different schema can lead to problems.", conf.getDatabaseSchema());
+        }
+        if ( schemasWithAmbariTables.size() > 1 ) {
+          warning("Multiple schemas contain the Ambari tables: {}. This can lead to problems.", schemasWithAmbariTables);
+        }
+      }
+      catch (SQLException e) {
+        warning("Exception occurred during checking db schema name: ", e);
       }
-      connection = dbAccessor.getConnection();
     }
-    ResultSet rs = null;
-    try {
-      DatabaseMetaData databaseMetaData = connection.getMetaData();
-
-      rs = databaseMetaData.getSchemas();
+  }
 
-      boolean ambariSchemaPresent = false;
-      if (rs != null) {
-        while (rs.next()) {
-          if(StringUtils.equals(rs.getString("TABLE_SCHEM"),conf.getDatabaseSchema())){
-            ambariSchemaPresent = true;
-            break;
-          }
-        }
-      }
-      if (!ambariSchemaPresent){
-        LOG.error("The schema %s defined for Ambari from ambari.properties has not been found in the database. " +
-          "This means that the Ambari tables are stored under the public schema which can lead to problems.", conf.getDatabaseSchema());
-        warningsFound = true;
-      }
-
-    } catch (SQLException e) {
-      LOG.error("Exception occurred during checking db schema name.: ", e);
-    } finally {
-      if (rs != null) {
-        try {
-          rs.close();
-        } catch (SQLException e) {
-          LOG.error("Exception occurred during result set closing procedure: ", e);
-        }
+  private static ArrayList<Object> getResultSetColumn(@Nullable ResultSet rs, String columnName) throws SQLException {
+    ArrayList<Object> values = new ArrayList<>();
+    if (null != rs) {
+      while (rs.next()) {
+        values.add(rs.getObject(columnName));
       }
     }
+    return values;
   }
 
   /**
   * This method checks tables engine type to be innodb for MySQL.
   * */
-  public static void checkMySQLEngine () {
+  static void checkMySQLEngine () {
     Configuration conf = injector.getInstance(Configuration.class);
     if(conf.getDatabaseType()!=Configuration.DatabaseType.MYSQL) {
       return;
     }
     LOG.info("Checking to ensure that the MySQL DB engine type is set to InnoDB");
-    if (connection == null) {
-      if (dbAccessor == null) {
-        dbAccessor = injector.getInstance(DBAccessor.class);
-      }
-      connection = dbAccessor.getConnection();
-    }
+
+    ensureConnection();
 
     String GET_INNODB_ENGINE_SUPPORT = "select TABLE_NAME, ENGINE from information_schema.tables where TABLE_SCHEMA = '%s' and LOWER(ENGINE) != 'innodb';";
 
@@ -711,11 +737,10 @@ public class DatabaseConsistencyCheckHelper {
       if (rs != null) {
         List<String> tablesInfo = new ArrayList<>();
         while (rs.next()) {
-          errorsFound = true;
           tablesInfo.add(rs.getString("TABLE_NAME"));
         }
         if (!tablesInfo.isEmpty()){
-          LOG.error("Found tables with engine type that is not InnoDB : %s", StringUtils.join(tablesInfo, ','));
+          error("Found tables with engine type that is not InnoDB : {}", tablesInfo);
         }
       }
     } catch (SQLException e) {
@@ -739,7 +764,7 @@ public class DatabaseConsistencyCheckHelper {
   * 4) Check if service has config which is not selected(has no actual config version) in clusterconfigmapping table.
   * If any issue was discovered, we are showing error message for user.
   * */
-  public static void checkServiceConfigs()  {
+  static void checkServiceConfigs()  {
     LOG.info("Checking services and their configs");
 
     String GET_SERVICES_WITHOUT_CONFIGS_QUERY = "select c.cluster_name, service_name from clusterservices cs " +
@@ -773,12 +798,7 @@ public class DatabaseConsistencyCheckHelper {
     ResultSet rs = null;
     Statement statement = null;
 
-    if (connection == null) {
-      if (dbAccessor == null) {
-        dbAccessor = injector.getInstance(DBAccessor.class);
-      }
-      connection = dbAccessor.getConnection();
-    }
+    ensureConnection();
 
     LOG.info("Getting ambari metainfo instance");
     if (ambariMetaInfo == null) {
@@ -796,8 +816,7 @@ public class DatabaseConsistencyCheckHelper {
         }
 
         for (String clusterName : clusterServiceMap.keySet()) {
-          LOG.warn("Service(s): {}, from cluster {} has no config(s) in serviceconfig table!", StringUtils.join(clusterServiceMap.get(clusterName), ","), clusterName);
-          warningsFound = true;
+          warning("Service(s): {}, from cluster {} has no config(s) in serviceconfig table!", StringUtils.join(clusterServiceMap.get(clusterName), ","), clusterName);
         }
 
       }
@@ -823,8 +842,7 @@ public class DatabaseConsistencyCheckHelper {
         for (String clName : clusterServiceVersionMap.keySet()) {
           Multimap<String, String> serviceVersion = clusterServiceVersionMap.get(clName);
           for (String servName : serviceVersion.keySet()) {
-            LOG.error("In cluster {}, service config mapping is unavailable (in table serviceconfigmapping) for service {} with version(s) {}! ", clName, servName, StringUtils.join(serviceVersion.get(servName), ","));
-            errorsFound = true;
+            error("In cluster {}, service config mapping is unavailable (in table serviceconfigmapping) for service {} with version(s) {}! ", clName, servName, StringUtils.join(serviceVersion.get(servName), ","));
           }
         }
 
@@ -899,9 +917,8 @@ public class DatabaseConsistencyCheckHelper {
               stackServiceConfigs.put(serviceName, configType);
             }
           } else {
-            LOG.warn("Service {} is not available for stack {} in cluster {}",
+            warning("Service {} is not available for stack {} in cluster {}",
                     serviceName, stackName + "-" + stackVersion, clusterName);
-            warningsFound = true;
           }
         }
 
@@ -918,9 +935,8 @@ public class DatabaseConsistencyCheckHelper {
                 if (serviceConfigsFromDB != null && serviceConfigsFromStack != null) {
                   serviceConfigsFromStack.removeAll(serviceConfigsFromDB);
                   if (!serviceConfigsFromStack.isEmpty()) {
-                    LOG.error("Required config(s): {} is(are) not available for service {} with service config version {} in cluster {}",
+                    error("Required config(s): {} is(are) not available for service {} with service config version {} in cluster {}",
                             StringUtils.join(serviceConfigsFromStack, ","), serviceName, Integer.toString(serviceVersion), clusterName);
-                    errorsFound = true;
                   }
                 }
               }
@@ -957,8 +973,7 @@ public class DatabaseConsistencyCheckHelper {
       for (String clusterName : clusterServiceConfigType.keySet()) {
         Multimap<String, String> serviceConfig = clusterServiceConfigType.get(clusterName);
         for (String serviceName : serviceConfig.keySet()) {
-          LOG.error("You have non selected configs: {} for service {} from cluster {}!", StringUtils.join(serviceConfig.get(serviceName), ","), serviceName, clusterName);
-          errorsFound = true;
+          error("You have non selected configs: {} for service {} from cluster {}!", StringUtils.join(serviceConfig.get(serviceName), ","), serviceName, clusterName);
         }
       }
     } catch (SQLException e) {
@@ -985,5 +1000,13 @@ public class DatabaseConsistencyCheckHelper {
 
   }
 
+  private static void ensureConnection() {
+    if (connection == null) {
+      if (dbAccessor == null) {
+        dbAccessor = injector.getInstance(DBAccessor.class);
+      }
+      connection = dbAccessor.getConnection();
+    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa527360/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckResult.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckResult.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckResult.java
new file mode 100644
index 0000000..7291d5d
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckResult.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+/**
+ * Enum representing the possible outcomes of the on-start database consistency check.
+ *
+ * <p><b>IMPORTANT:</b></p>
+ * <ul>
+ *   <li>Outcomes are ordered by severity, the program relies on this.</li>
+ *   <li>The check result is logged to the standard output and the server startup python script relies
+ *       on them. When changing the values, please make sure the startup scripts are changed accordingly!</li>
+ * </ul>
+
+ */
+public enum DatabaseConsistencyCheckResult {
+  DB_CHECK_SUCCESS,
+  DB_CHECK_WARNING,
+  DB_CHECK_ERROR;
+
+  /**
+   * @return a boolean indicating that the result is has least warning severity
+   */
+  public boolean isErrorOrWarning() {
+    return this == DB_CHECK_WARNING || this == DB_CHECK_ERROR;
+  }
+
+  /**
+   * @return a boolean indicating that the result is error
+   */
+  public boolean isError() {
+    return this == DB_CHECK_ERROR;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa527360/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyChecker.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyChecker.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyChecker.java
index 2aaaadd..8fd1d18 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyChecker.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyChecker.java
@@ -103,7 +103,7 @@ public class DatabaseConsistencyChecker {
 
       databaseConsistencyChecker.startPersistenceService();
 
-      DatabaseConsistencyCheckHelper.runAllDBChecks();
+      DatabaseConsistencyCheckHelper.runAllDBChecks(false);
       DatabaseConsistencyCheckHelper.checkHostComponentStates();
 
       DatabaseConsistencyCheckHelper.checkServiceConfigs();
@@ -120,7 +120,7 @@ public class DatabaseConsistencyChecker {
       }
     } finally {
         DatabaseConsistencyCheckHelper.closeConnection();
-        if (DatabaseConsistencyCheckHelper.ifErrorsFound() || DatabaseConsistencyCheckHelper.ifWarningsFound()) {
+        if (DatabaseConsistencyCheckHelper.getLastCheckResult().isErrorOrWarning()) {
           String ambariDBConsistencyCheckLog = "ambari-server-check-database.log";
           if (LOG instanceof Log4jLoggerAdapter) {
             org.apache.log4j.Logger dbConsistencyCheckHelperLogger = org.apache.log4j.Logger.getLogger(DatabaseConsistencyCheckHelper.class);
@@ -135,7 +135,7 @@ public class DatabaseConsistencyChecker {
           }
           ambariDBConsistencyCheckLog = ambariDBConsistencyCheckLog.replace("//", "/");
 
-          if (DatabaseConsistencyCheckHelper.ifErrorsFound()) {
+          if (DatabaseConsistencyCheckHelper.getLastCheckResult().isError()) {
             System.out.print(String.format("DB configs consistency check failed. Run \"ambari-server start --skip-database-check\" to skip. " +
                   "You may try --auto-fix-database flag to attempt to fix issues automatically. " +
                   "If you use this \"--skip-database-check\" option, do not make any changes to your cluster topology " +

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa527360/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 1704546..9540ca3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -57,6 +57,7 @@ import org.apache.ambari.server.audit.AuditLoggerModule;
 import org.apache.ambari.server.audit.request.RequestAuditLogger;
 import org.apache.ambari.server.bootstrap.BootStrapImpl;
 import org.apache.ambari.server.checks.DatabaseConsistencyCheckHelper;
+import org.apache.ambari.server.checks.DatabaseConsistencyCheckResult;
 import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.AbstractControllerResourceProvider;
@@ -678,36 +679,20 @@ public class AmbariServer {
    */
   protected void runDatabaseConsistencyCheck() throws Exception {
     if (System.getProperty("skipDatabaseConsistencyCheck") == null) {
-      System.out.println("Database consistency check started");
-      Logger DB_CHECK_LOG = LoggerFactory.getLogger(DatabaseConsistencyCheckHelper.class);
-      try{
-        if (System.getProperty("fixDatabaseConsistency") != null ){
-          DatabaseConsistencyCheckHelper.fixDatabaseConsistency();
-        }
-        DatabaseConsistencyCheckHelper.runAllDBChecks();
-      } catch(Throwable e) {
-        System.out.println("Database consistency check: failed");
-        if (e instanceof AmbariException) {
-          DB_CHECK_LOG.error("Exception occurred during database check:", e);
-          System.out.println("Exception occurred during database check: " + e.getMessage());
-          e.printStackTrace();
-          throw (AmbariException)e;
-        } else {
-          DB_CHECK_LOG.error("Unexpected error, database check failed", e);
-          System.out.println("Unexpected error, database check failed: " + e.getMessage());
-          e.printStackTrace();
-          throw new Exception("Unexpected error, database check failed", e);
-        }
-      } finally {
-        if (DatabaseConsistencyCheckHelper.ifErrorsFound()) {
-          System.out.println("Database consistency check: failed");
+      boolean fixIssues = (System.getProperty("fixDatabaseConsistency") != null);
+      try {
+        DatabaseConsistencyCheckResult checkResult = DatabaseConsistencyCheckHelper.runAllDBChecks(fixIssues);
+        // Writing explicitly to the console is necessary as the python start script expects it.
+        System.out.println("Database consistency check result: " + checkResult);
+        if (checkResult.isError()) {
           System.exit(1);
-        } else if (DatabaseConsistencyCheckHelper.ifWarningsFound()) {
-          System.out.println("Database consistency check: warning");
-        } else {
-          System.out.println("Database consistency check: successful");
         }
       }
+      catch (Throwable ex) {
+        // Writing explicitly to the console is necessary as the python start script expects it.
+        System.out.println("Database consistency check result: " + DatabaseConsistencyCheckResult.DB_CHECK_ERROR);
+        throw new Exception(ex);
+      }
     }
   }
 
@@ -1075,7 +1060,9 @@ public class AmbariServer {
       setupProxyAuth();
 
       injector.getInstance(GuiceJpaInitializer.class);
+
       DatabaseConsistencyCheckHelper.checkDBVersionCompatible();
+
       server = injector.getInstance(AmbariServer.class);
       injector.getInstance(UpdateActiveRepoVersionOnStartup.class).process();
       CertificateManager certMan = injector.getInstance(CertificateManager.class);
@@ -1092,4 +1079,6 @@ public class AmbariServer {
       System.exit(-1);
     }
   }
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa527360/ambari-server/src/main/python/ambari_server_main.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server_main.py b/ambari-server/src/main/python/ambari_server_main.py
index 4667899..66678b0 100644
--- a/ambari-server/src/main/python/ambari_server_main.py
+++ b/ambari-server/src/main/python/ambari_server_main.py
@@ -239,16 +239,19 @@ def wait_for_server_start(pidFile, scmStatus):
     exception = FatalException(-1, AMBARI_SERVER_NOT_STARTED_MSG)
 
   if os.path.isfile(configDefaults.SERVER_OUT_FILE):
-    if 'Database consistency check: failed' in open(configDefaults.SERVER_OUT_FILE).read():
-      print "DB configs consistency check failed. Run \"ambari-server start --skip-database-check\" to skip. " \
-      "You may try --auto-fix-database flag to attempt to fix issues automatically. " \
+    if 'DB_CHECK_ERROR' in open(configDefaults.SERVER_OUT_FILE).read():
+      print "\nDB configs consistency check failed. Run \"ambari-server start --skip-database-check\" to skip. " \
+        "You may try --auto-fix-database flag to attempt to fix issues automatically. " \
         "If you use this \"--skip-database-check\" option, do not make any changes to your cluster topology " \
         "or perform a cluster upgrade until you correct the database consistency issues. See " + \
-          configDefaults.DB_CHECK_LOG + " for more details on the consistency issues."
-    elif 'Database consistency check: warning' in open(configDefaults.SERVER_OUT_FILE).read():
-      print "DB configs consistency check found warnings. See " + configDefaults.DB_CHECK_LOG + " for more details."
-    else:
-      print "DB configs consistency check: no errors and warnings were found."
+        configDefaults.DB_CHECK_LOG + " for more details on the consistency issues."
+    elif 'DB_CHECK_WARNING' in open(configDefaults.SERVER_OUT_FILE).read():
+      print "\nDB configs consistency check found warnings. See " + configDefaults.DB_CHECK_LOG + " for more details."
+    # Only presume that DB check was successful if it explicitly appears in the log. An unexpected error may prevent
+    # the consistency check from running at all, so missing error/warning message in the log cannot imply the check was
+    # successful
+    elif 'DB_CHECK_SUCCESS' in open(configDefaults.SERVER_OUT_FILE).read():
+      print "\nDB configs consistency check: no errors and warnings were found."
   else:
     sys.stdout.write(configDefaults.SERVER_OUT_FILE + " does not exist")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa527360/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
index f73562d..fddbb6a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java
@@ -18,30 +18,42 @@
 package org.apache.ambari.server.checks;
 
 
+import static com.google.common.collect.Lists.newArrayList;
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.sql.Connection;
+import java.sql.DatabaseMetaData;
 import java.sql.ResultSet;
+import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import javax.persistence.EntityManager;
 
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.stack.StackManagerFactory;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.EasyMockSupport;
+import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 
-import junit.framework.Assert;
 
 public class DatabaseConsistencyCheckHelperTest {
 
@@ -174,13 +186,13 @@ public class DatabaseConsistencyCheckHelperTest {
   @Test
   public void testCheckTopologyTablesAreConsistent() throws Exception {
     testCheckTopologyTablesConsistent(2);
-    Assert.assertTrue(!DatabaseConsistencyCheckHelper.ifErrorsFound());
+    Assert.assertFalse(DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
   }
 
   @Test
   public void testCheckTopologyTablesAreNotConsistent() throws Exception {
     testCheckTopologyTablesConsistent(1);
-    Assert.assertTrue(DatabaseConsistencyCheckHelper.ifErrorsFound());
+    Assert.assertTrue(DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
   }
 
   private void testCheckTopologyTablesConsistent(int resultCount) throws Exception {
@@ -356,6 +368,123 @@ public class DatabaseConsistencyCheckHelperTest {
   }
 
   @Test
+  public void testSchemaName_NoIssues() throws Exception {
+    setupMocksForTestSchemaName("ambari", "ambari, public", newArrayList("ambari", "public"), newArrayList("ambari"));
+    DatabaseConsistencyCheckHelper.checkSchemaName();
+    assertFalse("No warnings were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult() ==
+        DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    assertFalse("No errors were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
+  }
+
+  @Test
+  public void testSchemaName_WrongSearchPathOrder() throws Exception {
+    setupMocksForTestSchemaName("ambari", "public, ambari", newArrayList("ambari", "public"), newArrayList("ambari"));
+    DatabaseConsistencyCheckHelper.checkSchemaName();
+    assertTrue("Warnings were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult() ==
+        DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    assertFalse("No errors were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
+  }
+
+  @Test
+  public void testSchemaName_NoSearchPath() throws Exception {
+    setupMocksForTestSchemaName("ambari", null, newArrayList("ambari", "public"), newArrayList("ambari"));
+    DatabaseConsistencyCheckHelper.checkSchemaName();
+    assertTrue("Warnings were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult() ==
+        DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    assertFalse("No errors were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
+  }
+
+
+  @Test
+  public void testSchemaName_NoAmbariSchema() throws Exception {
+    setupMocksForTestSchemaName("ambari", null, newArrayList("public"), Lists.<String>newArrayList());
+    DatabaseConsistencyCheckHelper.checkSchemaName();
+    assertTrue("Warnings were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult() ==
+        DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    assertFalse("No errors were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
+  }
+
+  @Test
+  public void testSchemaName_NoTablesInAmbariSchema() throws Exception {
+    setupMocksForTestSchemaName("ambari", "ambari", newArrayList("ambari", "public"), newArrayList("public"));
+    DatabaseConsistencyCheckHelper.checkSchemaName();
+    assertTrue("Warnings were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult() ==
+        DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    assertFalse("No errors were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
+  }
+
+  @Test
+  public void testSchemaName_AmbariTablesInMultipleSchemas() throws Exception {
+    setupMocksForTestSchemaName("ambari", "ambari", newArrayList("ambari", "public"), newArrayList("ambari", "public"));
+    DatabaseConsistencyCheckHelper.checkSchemaName();
+    assertTrue("Warnings were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult() ==
+        DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    assertFalse("No errors were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
+  }
+
+  @Test
+  public void testSchemaName_NullsAreTolerated() throws Exception {
+    setupMocksForTestSchemaName(null, null, null, null);
+    DatabaseConsistencyCheckHelper.checkSchemaName();
+    assertTrue("Warnings were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult() ==
+        DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    assertFalse("No errors were expected.", DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
+  }
+
+  private void setupMocksForTestSchemaName(String configuredSchema, String searchPath, List<String> schemas,
+                                           List<String> schemasWithAmbariTables) throws Exception {
+    final Configuration config = createNiceMock(Configuration.class);
+    final OsFamily osFamily = createNiceMock(OsFamily.class);
+    final Connection connection = createNiceMock(Connection.class);
+    final DBAccessor dbAccessor = createStrictMock(DBAccessor.class);
+    final Statement searchPathStatement = createStrictMock(Statement.class);
+    final Statement getTablesStatement = createStrictMock(Statement.class);
+    final DatabaseMetaData dbMetaData = createStrictMock(DatabaseMetaData.class);
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(DBAccessor.class).toInstance(dbAccessor);
+        bind(OsFamily.class).toInstance(osFamily);
+        bind(Configuration.class).toInstance(config);
+      }
+    });
+    expect(config.getDatabaseSchema()).andReturn(configuredSchema).anyTimes();
+    expect(config.getDatabaseType()).andReturn(Configuration.DatabaseType.POSTGRES);
+    expect(dbAccessor.getConnection()).andReturn(connection);
+    expect(connection.getMetaData()).andReturn(dbMetaData);
+    expect(connection.createStatement()).andReturn(searchPathStatement);
+    expect(connection.createStatement()).andReturn(getTablesStatement);
+    expect(dbMetaData.getSchemas()).andReturn(resultSet("TABLE_SCHEM", schemas));
+    expect(searchPathStatement.executeQuery(anyString())).andReturn(
+        resultSet("search_path", newArrayList(searchPath)));
+    expect(getTablesStatement.executeQuery(anyString())).andReturn(
+        resultSet("table_schema", schemasWithAmbariTables));
+    replay(config, connection, dbAccessor, dbMetaData, getTablesStatement, osFamily, searchPathStatement);
+    DatabaseConsistencyCheckHelper.setInjector(mockInjector);
+    DatabaseConsistencyCheckHelper.setConnection(null);
+    DatabaseConsistencyCheckHelper.resetCheckResult();
+  }
+
+  private ResultSet resultSet(final String columnName, final List<? extends Object> columnData) throws SQLException {
+    if (null == columnData) {
+      return null;
+    }
+    else {
+      ResultSet rs = createNiceMock(ResultSet.class);
+      if ( !columnData.isEmpty() ) {
+        expect(rs.next()).andReturn(true).times(columnData.size());
+      }
+      expect(rs.next()).andReturn(false);
+      for(Object item: columnData) {
+        expect(rs.getObject(columnName)).andReturn(item);
+      }
+      replay(rs);
+      return rs;
+    }
+  }
+
+
+  @Test
   public void testCheckServiceConfigs_missingServiceConfigGeneratesWarning() throws Exception {
     EasyMockSupport easyMockSupport = new EasyMockSupport();
     final AmbariMetaInfo mockAmbariMetainfo = easyMockSupport.createNiceMock(AmbariMetaInfo.class);
@@ -434,16 +563,16 @@ public class DatabaseConsistencyCheckHelperTest {
 
     mockAmbariMetainfo.init();
 
-    DatabaseConsistencyCheckHelper.resetErrorWarningFlags();
+    DatabaseConsistencyCheckHelper.resetCheckResult();
     DatabaseConsistencyCheckHelper.checkServiceConfigs();
 
     easyMockSupport.verifyAll();
 
     Assert.assertTrue("Missing service config for OPENSOFT R should have triggered a warning.",
-        DatabaseConsistencyCheckHelper.ifWarningsFound());
-    Assert.assertFalse("No errors should have been triggered.", DatabaseConsistencyCheckHelper.ifErrorsFound());
+        DatabaseConsistencyCheckHelper.getLastCheckResult() == DatabaseConsistencyCheckResult.DB_CHECK_WARNING);
+    Assert.assertFalse("No errors should have been triggered.",
+        DatabaseConsistencyCheckHelper.getLastCheckResult().isError());
   }
 
 
 }
-


[22/50] [abbrv] ambari git commit: AMBARI-19823. If by any case Migration Is hampered Then No message is shown in UI Hue-Ambari Migration view. (Ishan Bhatt via gauravn7)

Posted by nc...@apache.org.
AMBARI-19823. If by any case Migration Is hampered Then No message is shown in UI Hue-Ambari Migration view. (Ishan Bhatt via gauravn7)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4aa0f623
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4aa0f623
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4aa0f623

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 4aa0f6234e42b2328e2b0b3f435a1b13d27412a9
Parents: d4c1ace
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Thu Feb 16 10:47:22 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Thu Feb 16 10:47:22 2017 +0530

----------------------------------------------------------------------
 .../HiveHistoryMigrationUtility.java            |  26 +-
 ...HiveHistoryQueryMigrationImplementation.java |   2 +-
 .../HiveSavedQueryMigrationImplementation.java  |   4 +-
 .../HiveSavedQueryMigrationUtility.java         |  29 +-
 .../pigjob/PigJobMigrationImplementation.java   |   5 +-
 .../pig/pigjob/PigJobMigrationUtility.java      | 377 ++++++++++---------
 .../PigScriptMigrationImplementation.java       |   5 +-
 .../pigscript/PigScriptMigrationUtility.java    |  27 +-
 .../pigudf/PigUdfMigrationImplementation.java   |   5 +-
 .../pig/pigudf/PigUdfMigrationUtility.java      | 286 +++++++-------
 .../scripts/models/MigrationResponse.java       |   6 +
 .../app/models/checkprogress.js                 |   3 +-
 .../app/routes/home-page/hive-history.js        |   7 +-
 .../app/routes/home-page/hive-saved-query.js    |   8 +-
 .../app/routes/home-page/pig-job.js             |   7 +-
 .../app/routes/home-page/pig-script.js          |   8 +-
 .../app/routes/home-page/pig-udf.js             |   8 +-
 .../app/templates/home-page/hive-history.hbs    |   8 +
 .../templates/home-page/hive-saved-query.hbs    |   8 +
 .../app/templates/home-page/pig-job.hbs         |   8 +
 .../app/templates/home-page/pig-script.hbs      |   8 +
 .../app/templates/home-page/pig-udf.hbs         | 187 ++++-----
 22 files changed, 578 insertions(+), 454 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
index bab2084..7c3b2c2 100755
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
@@ -69,6 +69,7 @@ public class HiveHistoryMigrationUtility {
     Connection connectionHuedb = null;
     Connection connectionAmbaridb = null;
 
+
     logger.info(System.getProperty("java.class.path"));
 
     logger.info("--------------------------------------");
@@ -121,7 +122,7 @@ public class HiveHistoryMigrationUtility {
     try {
       String[] usernames = username.split(",");
       int totalQueries = 0;
-      for(int k=0; k<usernames.length; k++) {
+      for (int k = 0; k < usernames.length; k++) {
         connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
         username = usernames[k];
         migrationresult.setProgressPercentage(0);
@@ -137,7 +138,7 @@ public class HiveHistoryMigrationUtility {
              /* if No migration query selected from Hue Database according to our search criteria */
 
         if (dbpojoHiveHistoryQuery.size() == 0) {
-          logger.info("No queries has been selected for the user " + username + " between dates: " + startDate +" - "+endDate);
+          logger.info("No queries has been selected for the user " + username + " between dates: " + startDate + " - " + endDate);
 
         } else {
           /* If hive queries are selected based on our search criteria */
@@ -158,7 +159,7 @@ public class HiveHistoryMigrationUtility {
             float calc = ((float) (i + 1)) / dbpojoHiveHistoryQuery.size() * 100;
             int progressPercentage = Math.round(calc);
             migrationresult.setProgressPercentage(progressPercentage);
-            migrationresult.setNumberOfQueryTransfered(i+1);
+            migrationresult.setNumberOfQueryTransfered(i + 1);
             getResourceManager(view).update(migrationresult, jobid);
 
             logger.info("_____________________");
@@ -182,7 +183,7 @@ public class HiveHistoryMigrationUtility {
 
             epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
 
-            if(usernames[k].equals("all")) {
+            if (usernames[k].equals("all")) {
               username = dbpojoHiveHistoryQuery.get(i).getOwnerName();
             }
 
@@ -219,7 +220,7 @@ public class HiveHistoryMigrationUtility {
       }
       logger.info("Migration Completed");
       migrationresult.setFlag(1);
-      if(totalQueries==0) {
+      if (totalQueries == 0) {
         migrationresult.setNumberOfQueryTransfered(0);
         migrationresult.setTotalNoQuery(0);
       } else {
@@ -230,30 +231,41 @@ public class HiveHistoryMigrationUtility {
       getResourceManager(view).update(migrationresult, jobid);
     } catch (SQLException e) {
       logger.error("Sql exception in ambari database: ", e);
+      migrationresult.setError("SQL Exception: " + e.getMessage());
       try {
         connectionAmbaridb.rollback();
         model.setIfSuccess(false);
         logger.error("Sql statement are Rolledback");
       } catch (SQLException e1) {
         logger.error("Sql rollback exception in ambari database",
-          e1);
+                e1);
       }
     } catch (ClassNotFoundException e) {
       logger.error("Class not found :- ", e);
+      migrationresult.setError("Class Not Found: " + e.getMessage());
     } catch (ParseException e) {
       logger.error("Parse Exception : ", e);
+      migrationresult.setError("Parse Exception: " + e.getMessage());
     } catch (URISyntaxException e) {
       logger.error("URI Syntax Exception: ", e);
+      migrationresult.setError("URI Syntax Exception: " + e.getMessage());
     } catch (PropertyVetoException e) {
       logger.error("PropertyVetoException: ", e);
+      migrationresult.setError("Property Veto Exception: " + e.getMessage());
     } catch (ItemNotFound itemNotFound) {
       itemNotFound.printStackTrace();
+      migrationresult.setError("Item Not Found: " + itemNotFound.getMessage());
+    } catch (Exception e) {
+      logger.error("Generic Exception: ", e);
+      migrationresult.setError("Exception: " + e.getMessage());
     } finally {
       if (connectionAmbaridb != null) try {
         connectionAmbaridb.close();
       } catch (SQLException e) {
         logger.error("Exception in closing the connection :", e);
+        migrationresult.setError("Exception in closing the connection: " + e.getMessage());
       }
+      getResourceManager(view).update(migrationresult, jobid);
     }
     //deleteing the temprary files that are created while execution
     hiveHistoryQueryImpl.deleteFileQueryhql(ConfigurationCheckImplementation.getHomeDir());
@@ -274,4 +286,4 @@ public class HiveHistoryMigrationUtility {
 
   }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
index 33d57b4..1a5b742 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
@@ -260,7 +260,7 @@ public class HiveHistoryQueryMigrationImplementation {
 
   }
 
-  public ArrayList<HiveModel> fetchFromHue(String username, String startdate, String endtime, Connection connection, QuerySetHueDB huedatabase) throws ClassNotFoundException, SQLException {
+  public ArrayList<HiveModel> fetchFromHue(String username, String startdate, String endtime, Connection connection, QuerySetHueDB huedatabase) throws ClassNotFoundException, SQLException, IOException {
     int id = 0;
     int i = 0;
     ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
index c08455d..af493ba 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
@@ -351,7 +351,7 @@ public class HiveSavedQueryMigrationImplementation {
   }
 
   public ArrayList<HiveModel> fetchFromHuedb(String username, String startdate, String endtime, Connection connection, QuerySetHueDb huedatabase)
-    throws ClassNotFoundException, IOException {
+    throws ClassNotFoundException, SQLException, IOException {
     int id = 0;
     int i = 0;
     String[] query = new String[100];
@@ -474,9 +474,11 @@ public class HiveSavedQueryMigrationImplementation {
         hiveArrayList.add(hivepojo);
         i++;
       }
+      connection.commit();
 
     } catch (SQLException e2) {
       e2.printStackTrace();
+      connection.rollback();
     } finally
 
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
index 5fc171c..ddfff5e 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
@@ -73,7 +73,7 @@ public class HiveSavedQueryMigrationUtility {
     Connection connectionAmbaridb = null;
     Connection connectionHuedb = null;
 
-    int i = 0, j=0;
+    int i = 0, j = 0;
     String sequenceName = "";
 
     logger.info("-------------------------------------");
@@ -117,7 +117,7 @@ public class HiveSavedQueryMigrationUtility {
       logger.info("Ambari database is Oracle");
     }
 
-    int maxCountforFileResourceAmbaridb=0, maxCountforUdfAmbaridb=0, maxCountforSavequeryAmbaridb = 0;
+    int maxCountforFileResourceAmbaridb = 0, maxCountforUdfAmbaridb = 0, maxCountforSavequeryAmbaridb = 0;
     String time = null;
     Long epochtime = null;
     String dirNameforHiveSavedquery;
@@ -128,7 +128,7 @@ public class HiveSavedQueryMigrationUtility {
     try {
       String[] usernames = username.split(",");
       int totalQueries = 0;
-      for(int l=0; l<usernames.length; l++) {
+      for (int l = 0; l < usernames.length; l++) {
         connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
         logger.info("Hue database connection successful");
 
@@ -148,7 +148,7 @@ public class HiveSavedQueryMigrationUtility {
 
         if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
 
-          logger.info("No queries has been selected for the user " + username + " between dates: " + startDate +" - "+endDate);
+          logger.info("No queries has been selected for the user " + username + " between dates: " + startDate + " - " + endDate);
         } else {
 
           connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
@@ -191,7 +191,7 @@ public class HiveSavedQueryMigrationUtility {
 
             time = hivesavedqueryimpl.getTime();/* getting system time */
 
-            if(usernames[l].equals("all")) {
+            if (usernames[l].equals("all")) {
               username = dbpojoHiveSavedQuery.get(i).getOwnerName();
             }
 
@@ -251,7 +251,7 @@ public class HiveSavedQueryMigrationUtility {
       }
       logger.info("Migration Completed");
       migrationresult.setFlag(1);
-      if(totalQueries==0) {
+      if (totalQueries == 0) {
         migrationresult.setNumberOfQueryTransfered(0);
         migrationresult.setTotalNoQuery(0);
       } else {
@@ -261,8 +261,8 @@ public class HiveSavedQueryMigrationUtility {
       }
       getResourceManager(view).update(migrationresult, jobid);
     } catch (SQLException e) {
-
       logger.error("SQL exception: ", e);
+      migrationresult.setError("SQL Exception: " + e.getMessage());
       try {
         connectionAmbaridb.rollback();
         logger.info("roll back done");
@@ -272,19 +272,28 @@ public class HiveSavedQueryMigrationUtility {
       }
     } catch (ClassNotFoundException e1) {
       logger.error("Class not found : ", e1);
+      migrationresult.setError("Class not found Exception: " + e1.getMessage());
     } catch (ParseException e) {
       logger.error("ParseException: ", e);
+      migrationresult.setError("Parse Exception: " + e.getMessage());
     } catch (URISyntaxException e) {
       logger.error("URISyntaxException: ", e);
+      migrationresult.setError("URI Syntax Exception: " + e.getMessage());
     } catch (PropertyVetoException e) {
       logger.error("PropertyVetoException:", e);
+      migrationresult.setError("Property Veto Exception: " + e.getMessage());
+    } catch (Exception e) {
+      logger.error("Generic Exception: ", e);
+      migrationresult.setError("Exception: " + e.getMessage());
     } finally {
       if (null != connectionAmbaridb)
         try {
           connectionAmbaridb.close();
         } catch (SQLException e) {
           logger.error("Error in connection close", e);
+          migrationresult.setError("Error in closing connection: " + e.getMessage());
         }
+      getResourceManager(view).update(migrationresult, jobid);
     }
 
 
@@ -308,8 +317,4 @@ public class HiveSavedQueryMigrationUtility {
     return model;
 
   }
-}
-
-
-
-
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
index 1cb0471..9767deb 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
@@ -261,7 +261,7 @@ public class PigJobMigrationImplementation {
     return strDate;
   }
 
-  public ArrayList<PigModel> fetchFromHueDB(String username, String startdate, String endtime, Connection connection, QuerySetHueDb huedatabase) throws ClassNotFoundException, IOException {
+  public ArrayList<PigModel> fetchFromHueDB(String username, String startdate, String endtime, Connection connection, QuerySetHueDb huedatabase) throws ClassNotFoundException, SQLException, IOException {
     int id = 0;
     int i = 0;
     String[] query = new String[100];
@@ -359,10 +359,11 @@ public class PigJobMigrationImplementation {
 
         i++;
       }
-
+      connection.commit();
 
     } catch (SQLException e) {
       logger.error("Sqlexception: ", e);
+      connection.rollback();
     } finally {
       try {
         if (connection != null)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
index 7bd36ea..b935bad 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
@@ -1,230 +1,239 @@
-  /**
-   * Licensed to the Apache Software Foundation (ASF) under one
-   * or more contributor license agreements.  See the NOTICE file
-   * distributed with this work for additional information
-   * regarding copyright ownership.  The ASF licenses this file
-   * to you under the Apache License, Version 2.0 (the
-   * "License"); you may not use this file except in compliance
-   * with the License.  You may obtain a copy of the License at
-   * <p/>
-   * http://www.apache.org/licenses/LICENSE-2.0
-   * <p/>
-   * Unless required by applicable law or agreed to in writing, software
-   * distributed under the License is distributed on an "AS IS" BASIS,
-   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   * See the License for the specific language governing permissions and
-   * limitations under the License.
-   */
-
-
-  package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-  import java.beans.PropertyVetoException;
-  import java.io.IOException;
-  import java.net.URISyntaxException;
-  import java.sql.Connection;
-  import java.sql.SQLException;
-  import java.text.ParseException;
-  import java.util.ArrayList;
-
-  import org.apache.ambari.view.ViewContext;
-
-  import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-  import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-  import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-  import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-  import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-  import org.apache.log4j.Logger;
-
-  import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-  import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-  import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
-  import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
-
-  public class PigJobMigrationUtility {
-
-    protected MigrationResourceManager resourceManager = null;
-
-    public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
-      if (resourceManager == null) {
-        resourceManager = new MigrationResourceManager(view);
-      }
-      return resourceManager;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+import org.apache.ambari.view.ViewContext;
+
+import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
+import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
+import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
+import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
+
+public class PigJobMigrationUtility {
+
+  protected MigrationResourceManager resourceManager = null;
+
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
     }
+    return resourceManager;
+  }
 
-    public void pigJobMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
+  public void pigJobMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
 
-      long startTime = System.currentTimeMillis();
+    long startTime = System.currentTimeMillis();
 
-      final Logger logger = Logger.getLogger(PigJobMigrationUtility.class);
-      Connection connectionHuedb = null;
-      Connection connectionAmbaridb = null;
+    final Logger logger = Logger.getLogger(PigJobMigrationUtility.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
 
-      logger.info("------------------------------");
-      logger.info("pig Jobs Migration started");
-      logger.info("------------------------------");
-      logger.info("start date: " + startDate);
-      logger.info("enddate date: " + endDate);
-      logger.info("instance is: " + username);
-      logger.info("hue username is : " + instance);
+    logger.info("------------------------------");
+    logger.info("pig Jobs Migration started");
+    logger.info("------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + username);
+    logger.info("hue username is : " + instance);
 
-      PigJobMigrationImplementation pigjobimpl = new PigJobMigrationImplementation();// creating the implementation object
+    PigJobMigrationImplementation pigjobimpl = new PigJobMigrationImplementation();// creating the implementation object
 
-      QuerySetHueDb huedatabase = null;
+    QuerySetHueDb huedatabase = null;
 
-      if (view.getProperties().get("huedrivername").contains("mysql")) {
-        huedatabase = new MysqlQuerySetHueDb();
-      } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
-        huedatabase = new PostgressQuerySetHueDb();
-      } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
-        huedatabase = new SqliteQuerySetHueDb();
-      } else if (view.getProperties().get("huedrivername").contains("oracle")) {
-        huedatabase = new OracleQuerySetHueDb();
-      }
+    if (view.getProperties().get("huedrivername").contains("mysql")) {
+      huedatabase = new MysqlQuerySetHueDb();
+    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+      huedatabase = new PostgressQuerySetHueDb();
+    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+      huedatabase = new SqliteQuerySetHueDb();
+    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+      huedatabase = new OracleQuerySetHueDb();
+    }
 
-      QuerySetAmbariDB ambaridatabase = null;
+    QuerySetAmbariDB ambaridatabase = null;
 
-      if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-        ambaridatabase = new MysqlQuerySetAmbariDB();
-      } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
-        ambaridatabase = new PostgressQuerySetAmbariDB();
-      } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
-        ambaridatabase = new OracleQuerySetAmbariDB();
-      }
-      int maxCountforPigScript = 0, i = 0;
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
+    }
+    int maxCountforPigScript = 0, i = 0;
 
-      String time = null, timeIndorder = null;
-      Long epochtime = null;
-      String pigJobDirName;
-      ArrayList<PigModel> pigJobDbPojo = new ArrayList<PigModel>();
+    String time = null, timeIndorder = null;
+    Long epochtime = null;
+    String pigJobDirName;
+    ArrayList<PigModel> pigJobDbPojo = new ArrayList<PigModel>();
 
-      try {
+    try {
 
-        String[] usernames = username.split(",");
-        int totalQueries = 0;
-        for(int k=0; k<usernames.length; k++) {
+      String[] usernames = username.split(",");
+      int totalQueries = 0;
+      for (int k = 0; k < usernames.length; k++) {
 
-          connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
-          username = usernames[k];
-          migrationresult.setProgressPercentage(0);
-          logger.info("Migration started for user " + username);
-          pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb, huedatabase);// fetching the PigJobs details from hue
-          totalQueries += pigJobDbPojo.size();
-          for (int j = 0; j < pigJobDbPojo.size(); j++) {
-            logger.info("the query fetched from hue=" + pigJobDbPojo.get(i).getScript());
+        connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
+        username = usernames[k];
+        migrationresult.setProgressPercentage(0);
+        logger.info("Migration started for user " + username);
+        pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb, huedatabase);// fetching the PigJobs details from hue
+        totalQueries += pigJobDbPojo.size();
+        for (int j = 0; j < pigJobDbPojo.size(); j++) {
+          logger.info("the query fetched from hue=" + pigJobDbPojo.get(i).getScript());
 
-          }
+        }
 
                 /*No pig Job details has been fetched accordring to search criteria*/
-          if (pigJobDbPojo.size() == 0) {
+        if (pigJobDbPojo.size() == 0) {
 
-            logger.info("No queries has been selected for the user " + username + " between dates: " + startDate +" - "+endDate);
-          } else {
+          logger.info("No queries has been selected for the user " + username + " between dates: " + startDate + " - " + endDate);
+        } else {
 
-            connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-            connectionAmbaridb.setAutoCommit(false);
+          connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+          connectionAmbaridb.setAutoCommit(false);
 
-            int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase);
-            int sequence = pigjobimpl.fetchSequenceno(connectionAmbaridb, fetchPigTablenameInstance, ambaridatabase);
+          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase);
+          int sequence = pigjobimpl.fetchSequenceno(connectionAmbaridb, fetchPigTablenameInstance, ambaridatabase);
 
-            for (i = 0; i < pigJobDbPojo.size(); i++) {
+          for (i = 0; i < pigJobDbPojo.size(); i++) {
 
-              float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
-              int progressPercentage = Math.round(calc);
+            float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
+            int progressPercentage = Math.round(calc);
 
-              migrationresult.setProgressPercentage(progressPercentage);
-              migrationresult.setNumberOfQueryTransfered(i+1);
-              getResourceManager(view).update(migrationresult, jobid);
+            migrationresult.setProgressPercentage(progressPercentage);
+            migrationresult.setNumberOfQueryTransfered(i + 1);
+            getResourceManager(view).update(migrationresult, jobid);
 
-              logger.info("Loop No." + (i + 1));
-              logger.info("________________");
-              logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
+            logger.info("Loop No." + (i + 1));
+            logger.info("________________");
+            logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
 
-              maxCountforPigScript = i + sequence + 1;
+            maxCountforPigScript = i + sequence + 1;
 
-              time = pigjobimpl.getTime();
-              timeIndorder = pigjobimpl.getTimeInorder();
-              epochtime = pigjobimpl.getEpochTime();
+            time = pigjobimpl.getTime();
+            timeIndorder = pigjobimpl.getTimeInorder();
+            epochtime = pigjobimpl.getEpochTime();
 
-              if(usernames[k].equals("all")) {
-                username = pigJobDbPojo.get(i).getUserName();
-              }
+            if (usernames[k].equals("all")) {
+              username = pigJobDbPojo.get(i).getUserName();
+            }
 
-              pigJobDirName = "/user/" + username + "/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
+            pigJobDirName = "/user/" + username + "/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
 
-              pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i, ambaridatabase, username);
+            pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i, ambaridatabase, username);
 
-              if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-                pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));
-                pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
-                pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
-                pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
+            if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+              pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), username, view.getProperties().get("PrincipalUserName"));
+              pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
+              pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
+              pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username, view.getProperties().get("PrincipalUserName"));
 
-              } else {
+            } else {
 
-                pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), username);
-                pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
-                pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
-                pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
+              pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), username);
+              pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
+              pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
+              pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"), username);
 
-              }
+            }
 
-              logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
+            logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
 
-            }
-            pigjobimpl.updateSequenceno(connectionAmbaridb, maxCountforPigScript, fetchPigTablenameInstance, ambaridatabase);
-            connectionAmbaridb.commit();
           }
-          logger.info("Migration completed for user " + username);
+          pigjobimpl.updateSequenceno(connectionAmbaridb, maxCountforPigScript, fetchPigTablenameInstance, ambaridatabase);
+          connectionAmbaridb.commit();
         }
-        migrationresult.setFlag(1);
-        if(totalQueries==0) {
-          migrationresult.setNumberOfQueryTransfered(0);
-          migrationresult.setTotalNoQuery(0);
-        } else {
-          migrationresult.setNumberOfQueryTransfered(totalQueries);
-          migrationresult.setTotalNoQuery(totalQueries);
-          migrationresult.setProgressPercentage(100);
-        }
-        getResourceManager(view).update(migrationresult, jobid);
-      } catch (SQLException e) {
-        logger.error("sql exception in ambari database:", e);
+        logger.info("Migration completed for user " + username);
+      }
+      migrationresult.setFlag(1);
+      if (totalQueries == 0) {
+        migrationresult.setNumberOfQueryTransfered(0);
+        migrationresult.setTotalNoQuery(0);
+      } else {
+        migrationresult.setNumberOfQueryTransfered(totalQueries);
+        migrationresult.setTotalNoQuery(totalQueries);
+        migrationresult.setProgressPercentage(100);
+      }
+      getResourceManager(view).update(migrationresult, jobid);
+    } catch (SQLException e) {
+      logger.error("sql exception in ambari database:", e);
+      migrationresult.setError("SQL Exception: " + e.getMessage());
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("roll back done");
+      } catch (SQLException e1) {
+        logger.error("roll back  exception:", e1);
+      }
+    } catch (ClassNotFoundException e2) {
+      logger.error("class not found exception:", e2);
+      migrationresult.setError("Class Not Found Exception: " + e2.getMessage());
+    } catch (ParseException e) {
+      logger.error("ParseException: ", e);
+      migrationresult.setError("ParseException: " + e.getMessage());
+    } catch (URISyntaxException e) {
+      logger.error("URISyntaxException", e);
+      migrationresult.setError("URI Syntax Exception: " + e.getMessage());
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException", e);
+      migrationresult.setError("Property Veto Exception: " + e.getMessage());
+    } catch (Exception e) {
+      logger.error("Generic Exception: ", e);
+      migrationresult.setError("Exception: " + e.getMessage());
+    } finally {
+      if (null != connectionAmbaridb)
         try {
-          connectionAmbaridb.rollback();
-          logger.info("roll back done");
-        } catch (SQLException e1) {
-          logger.error("roll back  exception:", e1);
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("connection closing exception ", e);
+          migrationresult.setError("Error closing connection: " + e.getMessage());
         }
-      } catch (ClassNotFoundException e2) {
-        logger.error("class not found exception:", e2);
-      } catch (ParseException e) {
-        logger.error("ParseException: ", e);
-      } catch (URISyntaxException e) {
-        logger.error("URISyntaxException", e);
-      } catch (PropertyVetoException e) {
-        logger.error("PropertyVetoException", e);
-      } finally {
-        if (null != connectionAmbaridb)
-          try {
-            connectionAmbaridb.close();
-          } catch (SQLException e) {
-            logger.error("connection closing exception ", e);
-          }
-      }
-
-      logger.info("------------------------------");
-      logger.info("pig Job Migration End");
-      logger.info("------------------------------");
+      getResourceManager(view).update(migrationresult, jobid);
+    }
 
-      long stopTime = System.currentTimeMillis();
-      long elapsedTime = stopTime - startTime;
+    logger.info("------------------------------");
+    logger.info("pig Job Migration End");
+    logger.info("------------------------------");
 
-      migrationresult.setJobtype("hivehistoryquerymigration");
-      migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-      getResourceManager(view).update(migrationresult, jobid);
+    long stopTime = System.currentTimeMillis();
+    long elapsedTime = stopTime - startTime;
 
+    migrationresult.setJobtype("pigjobmigration");
+    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+    getResourceManager(view).update(migrationresult, jobid);
 
-    }
 
   }
 
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
index 82461ca..1325a45 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
@@ -287,7 +287,7 @@ public class PigScriptMigrationImplementation {
   }
 
 
-  public ArrayList<PigModel> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, QuerySetHueDb huedatabase) throws ClassNotFoundException, IOException {
+  public ArrayList<PigModel> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, QuerySetHueDb huedatabase) throws ClassNotFoundException, SQLException, IOException {
     int id = 0;
     int i = 0;
     ResultSet rs1 = null;
@@ -370,10 +370,11 @@ public class PigScriptMigrationImplementation {
         pigArrayList.add(pojopig);
         i++;
       }
-
+      connection.commit();
 
     } catch (SQLException e) {
       logger.error("SQLException", e);
+      connection.rollback();
     } finally {
       try {
         if (connection != null)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
index 5e9f0d1..8ed13fb 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
@@ -110,7 +110,7 @@ public class PigScriptMigrationUtility {
     try {
       String[] usernames = username.split(",");
       int totalQueries = 0;
-      for(int k=0; k<usernames.length; k++) {
+      for (int k = 0; k < usernames.length; k++) {
         connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
         username = usernames[k];
         migrationresult.setProgressPercentage(0);
@@ -128,7 +128,7 @@ public class PigScriptMigrationUtility {
         /* If No pig Script has been fetched from Hue db according to our search criteria*/
         if (dbpojoPigSavedscript.size() == 0) {
 
-          logger.info("No queries has been selected for the user " + username + " between dates: " + startDate +" - "+endDate);
+          logger.info("No queries has been selected for the user " + username + " between dates: " + startDate + " - " + endDate);
         } else {
 
           connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
@@ -148,7 +148,7 @@ public class PigScriptMigrationUtility {
             float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
             int progressPercentage = Math.round(calc);
             migrationresult.setProgressPercentage(progressPercentage);
-            migrationresult.setNumberOfQueryTransfered(i+1);
+            migrationresult.setNumberOfQueryTransfered(i + 1);
             getResourceManager(view).update(migrationresult, jobid);
 
             logger.info("Loop No." + (i + 1));
@@ -163,7 +163,7 @@ public class PigScriptMigrationUtility {
 
             maxcountforpigsavedscript = i + sequence + 1;
 
-            if(usernames[k].equals("all")) {
+            if (usernames[k].equals("all")) {
               username = dbpojoPigSavedscript.get(i).getUserName();
             }
 
@@ -200,7 +200,7 @@ public class PigScriptMigrationUtility {
       }
       logger.info("Migration Completed");
       migrationresult.setFlag(1);
-      if(totalQueries==0) {
+      if (totalQueries == 0) {
         migrationresult.setNumberOfQueryTransfered(0);
         migrationresult.setTotalNoQuery(0);
       } else {
@@ -211,34 +211,44 @@ public class PigScriptMigrationUtility {
       getResourceManager(view).update(migrationresult, jobid);
     } catch (SQLException e) {
       logger.error("Sql exception in ambari database", e);
+      migrationresult.setError("SQL Exception: " + e.getMessage());
       try {
         connectionAmbaridb.rollback();
         logger.info("rollback done");
       } catch (SQLException e1) {
-        logger.error("Sql exception while doing roll back", e);
+        logger.error("Sql exception while doing roll back", e1);
       }
     } catch (ClassNotFoundException e2) {
       logger.error("class not found exception", e2);
+      migrationresult.setError("Class Not Found Exception: " + e2.getMessage());
     } catch (ParseException e) {
       logger.error("ParseException: ", e);
+      migrationresult.setError("Parse Exception: " + e.getMessage());
     } catch (PropertyVetoException e) {
       logger.error("PropertyVetoException: ", e);
+      migrationresult.setError("Property Veto Exception: " + e.getMessage());
     } catch (URISyntaxException e) {
       e.printStackTrace();
+      migrationresult.setError("URISyntaxException: " + e.getMessage());
+    } catch (Exception e) {
+      logger.error("Generic Exception: ", e);
+      migrationresult.setError("Exception: " + e.getMessage());
     } finally {
       if (null != connectionAmbaridb)
         try {
           connectionAmbaridb.close();
         } catch (SQLException e) {
           logger.error("connection close exception: ", e);
+          migrationresult.setError("Error Closing Connection: " + e.getMessage());
         }
+      getResourceManager(view).update(migrationresult, jobid);
     }
 
     long stopTime = System.currentTimeMillis();
     long elapsedTime = stopTime - startTime;
 
 
-    migrationresult.setJobtype("hivehistoryquerymigration");
+    migrationresult.setJobtype("pigsavedscriptmigration");
     migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
     getResourceManager(view).update(migrationresult, jobid);
 
@@ -249,5 +259,4 @@ public class PigScriptMigrationUtility {
 
   }
 
-
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
index a7728a8..351d5e1 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationImplementation.java
@@ -125,7 +125,7 @@ public class PigUdfMigrationImplementation {
     }
 
 
-    public ArrayList<PigModel> fetchFromHueDatabase(String username, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, IOException {
+    public ArrayList<PigModel> fetchFromHueDatabase(String username, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, SQLException, IOException {
         int id = 0;
         int i = 0;
         ResultSet rs1;
@@ -178,10 +178,11 @@ public class PigUdfMigrationImplementation {
                 pigArrayList.add(pojopig);
                 i++;
             }
-
+            connection.commit();
 
         } catch (SQLException e) {
             logger.error("SQLException", e);
+            connection.rollback();
         } finally {
             try {
                 if (connection != null)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationUtility.java
index d6a69fc..9656ad6 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationUtility.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigudf/PigUdfMigrationUtility.java
@@ -45,191 +45,199 @@ import java.util.ArrayList;
 
 public class PigUdfMigrationUtility {
 
-    protected MigrationResourceManager resourceManager = null;
+  protected MigrationResourceManager resourceManager = null;
 
-    public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
-        if (resourceManager == null) {
-            resourceManager = new MigrationResourceManager(view);
-        }
-        return resourceManager;
+  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
+    if (resourceManager == null) {
+      resourceManager = new MigrationResourceManager(view);
     }
+    return resourceManager;
+  }
 
 
-    public void pigUdfMigration(String username, String instance, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
-
-        long startTime = System.currentTimeMillis();
-
-        final Logger logger = Logger.getLogger(PigUdfMigrationUtility.class);
-        Connection connectionHuedb = null;
-        Connection connectionAmbaridb = null;
+  public void pigUdfMigration(String username, String instance, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
 
-        logger.info("-------------------------------------");
-        logger.info("pig udf Migration started");
-        logger.info("-------------------------------------");
+    long startTime = System.currentTimeMillis();
 
+    final Logger logger = Logger.getLogger(PigUdfMigrationUtility.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
 
-        int i = 0;
-        
-        logger.info("instance is: " + username);
-        logger.info("hue username is : " + instance);
+    logger.info("-------------------------------------");
+    logger.info("pig udf Migration started");
+    logger.info("-------------------------------------");
 
-        //Reading the configuration file
-        PigUdfMigrationImplementation pigudfmigration = new PigUdfMigrationImplementation();
 
-        QuerySet huedatabase = null;
+    int i = 0;
 
-        if (view.getProperties().get("huedrivername").contains("mysql")) {
-            huedatabase = new MysqlQuerySet();
-        } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
-            huedatabase = new PostgressQuerySet();
-        } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
+    logger.info("instance is: " + username);
+    logger.info("hue username is : " + instance);
 
-            huedatabase = new SqliteQuerySet();
-        } else if (view.getProperties().get("huedrivername").contains("oracle")) {
-            huedatabase = new OracleQuerySet();
-        }
+    //Reading the configuration file
+    PigUdfMigrationImplementation pigudfmigration = new PigUdfMigrationImplementation();
 
-        QuerySetAmbariDB ambaridatabase = null;
+    QuerySet huedatabase = null;
 
+    if (view.getProperties().get("huedrivername").contains("mysql")) {
+      huedatabase = new MysqlQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
+      huedatabase = new PostgressQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
 
-        if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-            ambaridatabase = new MysqlQuerySetAmbariDB();
-        } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
-            ambaridatabase = new PostgressQuerySetAmbariDB();
-        } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
-            ambaridatabase = new OracleQuerySetAmbariDB();
-        }
+      huedatabase = new SqliteQuerySet();
+    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
+      huedatabase = new OracleQuerySet();
+    }
 
-        int maxcountforpigudf = 0;
-        String dirNameForPigUdf = "";
-        int pigInstanceTableId, sequence;
+    QuerySetAmbariDB ambaridatabase = null;
 
-        ArrayList<PigModel> dbpojoPigUdf = new ArrayList<PigModel>();
 
-        try {
-            String[] usernames = username.split(",");
-            int totalQueries = 0;
-            for(int k=0; k<usernames.length; k++) {
-                connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
-                username = usernames[k];
-                migrationresult.setProgressPercentage(0);
-                logger.info("Migration started for user " + username);
-                dbpojoPigUdf = pigudfmigration.fetchFromHueDatabase(username, connectionHuedb, huedatabase);// Fetching pig script details from Hue DB
-                totalQueries += dbpojoPigUdf.size();
+    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
+      ambaridatabase = new MysqlQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
+      ambaridatabase = new PostgressQuerySetAmbariDB();
+    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
+      ambaridatabase = new OracleQuerySetAmbariDB();
+    }
 
-                for (int j = 0; j < dbpojoPigUdf.size(); j++) {
-                    logger.info("jar fetched from hue=" + dbpojoPigUdf.get(j).getFileName());
+    int maxcountforpigudf = 0;
+    String dirNameForPigUdf = "";
+    int pigInstanceTableId, sequence;
 
-                }
+    ArrayList<PigModel> dbpojoPigUdf = new ArrayList<PigModel>();
 
+    try {
+      String[] usernames = username.split(",");
+      int totalQueries = 0;
+      for (int k = 0; k < usernames.length; k++) {
+        connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
+        username = usernames[k];
+        migrationresult.setProgressPercentage(0);
+        logger.info("Migration started for user " + username);
+        dbpojoPigUdf = pigudfmigration.fetchFromHueDatabase(username, connectionHuedb, huedatabase);// Fetching pig script details from Hue DB
+        totalQueries += dbpojoPigUdf.size();
 
-          /* If No pig Script has been fetched from Hue db according to our search criteria*/
-                if (dbpojoPigUdf.size() == 0) {
+        for (int j = 0; j < dbpojoPigUdf.size(); j++) {
+          logger.info("jar fetched from hue=" + dbpojoPigUdf.get(j).getFileName());
 
-                    logger.info("No queries has been selected for the user " + username);
-                } else {
+        }
 
-                    connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
-                    connectionAmbaridb.setAutoCommit(false);
 
-                    logger.info("loop will continue for " + dbpojoPigUdf.size() + "times");
+          /* If No pig Script has been fetched from Hue db according to our search criteria*/
+        if (dbpojoPigUdf.size() == 0) {
 
-                    //for each pig udf found in Hue Database
+          logger.info("No queries has been selected for the user " + username);
+        } else {
 
-                    pigInstanceTableId = pigudfmigration.fetchInstanceTablenamePigUdf(connectionAmbaridb, instance, ambaridatabase);// finding the table name in ambari from the given instance
+          connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+          connectionAmbaridb.setAutoCommit(false);
 
-                    sequence = pigudfmigration.fetchSequenceno(connectionAmbaridb, pigInstanceTableId, ambaridatabase);
+          logger.info("loop will continue for " + dbpojoPigUdf.size() + "times");
 
-                    for (i = 0; i < dbpojoPigUdf.size(); i++) {
+          //for each pig udf found in Hue Database
 
+          pigInstanceTableId = pigudfmigration.fetchInstanceTablenamePigUdf(connectionAmbaridb, instance, ambaridatabase);// finding the table name in ambari from the given instance
 
-                        float calc = ((float) (i + 1)) / dbpojoPigUdf.size() * 100;
-                        int progressPercentage = Math.round(calc);
-                        migrationresult.setProgressPercentage(progressPercentage);
-                        migrationresult.setNumberOfQueryTransfered(i + 1);
-                        getResourceManager(view).update(migrationresult, jobid);
+          sequence = pigudfmigration.fetchSequenceno(connectionAmbaridb, pigInstanceTableId, ambaridatabase);
 
-                        logger.info("Loop No." + (i + 1));
-                        logger.info("________________");
-                        logger.info("jar name:  " + dbpojoPigUdf.get(i).getFileName());
+          for (i = 0; i < dbpojoPigUdf.size(); i++) {
 
-                        maxcountforpigudf = i + sequence + 1;
 
+            float calc = ((float) (i + 1)) / dbpojoPigUdf.size() * 100;
+            int progressPercentage = Math.round(calc);
+            migrationresult.setProgressPercentage(progressPercentage);
+            migrationresult.setNumberOfQueryTransfered(i + 1);
+            getResourceManager(view).update(migrationresult, jobid);
 
-                        String ownerName = dbpojoPigUdf.get(i).getUserName();
-                        String filePath = dbpojoPigUdf.get(i).getUrl();
-                        String fileName = dbpojoPigUdf.get(i).getFileName();
-                        if(usernames[k].equals("all")) {
-                            username = dbpojoPigUdf.get(i).getUserName();
-                        }
-                        dirNameForPigUdf = "/user/" + username + "/pig/udf/";
-                        String ambariNameNodeUri = view.getProperties().get("namenode_URI_Ambari");
-                        String dirAndFileName = ambariNameNodeUri + dirNameForPigUdf + fileName;
+            logger.info("Loop No." + (i + 1));
+            logger.info("________________");
+            logger.info("jar name:  " + dbpojoPigUdf.get(i).getFileName());
 
-                        if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-                            pigudfmigration.createDirPigUdfSecured(dirNameForPigUdf, ambariNameNodeUri, ownerName, view.getProperties().get("PrincipalUserName"));
-                            pigudfmigration.copyFileBetweenHdfsSecured(filePath, dirNameForPigUdf, ambariNameNodeUri, ownerName, view.getProperties().get("PrincipalUserName"));
-                        } else {
-                            pigudfmigration.createDirPigUdf(dirNameForPigUdf, ambariNameNodeUri, ownerName);
-                            pigudfmigration.copyFileBetweenHdfs(filePath, dirNameForPigUdf, ambariNameNodeUri, ownerName);
-                        }
+            maxcountforpigudf = i + sequence + 1;
 
-                        pigudfmigration.insertRowForPigUdf(maxcountforpigudf, dirAndFileName, fileName, connectionAmbaridb, pigInstanceTableId, ambaridatabase, ownerName);
-                        logger.info(dbpojoPigUdf.get(i).getFileName() + "Migrated to Ambari");
 
-                    }
-                    pigudfmigration.updateSequenceno(connectionAmbaridb, maxcountforpigudf, pigInstanceTableId, ambaridatabase);
-                    connectionAmbaridb.commit();
-                }
-                logger.info("Migration completed for user " + username);
+            String ownerName = dbpojoPigUdf.get(i).getUserName();
+            String filePath = dbpojoPigUdf.get(i).getUrl();
+            String fileName = dbpojoPigUdf.get(i).getFileName();
+            if (usernames[k].equals("all")) {
+              username = dbpojoPigUdf.get(i).getUserName();
             }
-            migrationresult.setFlag(1);
-            if(totalQueries==0) {
-                migrationresult.setNumberOfQueryTransfered(0);
-                migrationresult.setTotalNoQuery(0);
+            dirNameForPigUdf = "/user/" + username + "/pig/udf/";
+            String ambariNameNodeUri = view.getProperties().get("namenode_URI_Ambari");
+            String dirAndFileName = ambariNameNodeUri + dirNameForPigUdf + fileName;
+
+            if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+              pigudfmigration.createDirPigUdfSecured(dirNameForPigUdf, ambariNameNodeUri, ownerName, view.getProperties().get("PrincipalUserName"));
+              pigudfmigration.copyFileBetweenHdfsSecured(filePath, dirNameForPigUdf, ambariNameNodeUri, ownerName, view.getProperties().get("PrincipalUserName"));
             } else {
-                migrationresult.setNumberOfQueryTransfered(totalQueries);
-                migrationresult.setTotalNoQuery(totalQueries);
-                migrationresult.setProgressPercentage(100);
+              pigudfmigration.createDirPigUdf(dirNameForPigUdf, ambariNameNodeUri, ownerName);
+              pigudfmigration.copyFileBetweenHdfs(filePath, dirNameForPigUdf, ambariNameNodeUri, ownerName);
             }
-            getResourceManager(view).update(migrationresult, jobid);
+
+            pigudfmigration.insertRowForPigUdf(maxcountforpigudf, dirAndFileName, fileName, connectionAmbaridb, pigInstanceTableId, ambaridatabase, ownerName);
+            logger.info(dbpojoPigUdf.get(i).getFileName() + "Migrated to Ambari");
+
+          }
+          pigudfmigration.updateSequenceno(connectionAmbaridb, maxcountforpigudf, pigInstanceTableId, ambaridatabase);
+          connectionAmbaridb.commit();
+        }
+        logger.info("Migration completed for user " + username);
+      }
+      migrationresult.setFlag(1);
+      if (totalQueries == 0) {
+        migrationresult.setNumberOfQueryTransfered(0);
+        migrationresult.setTotalNoQuery(0);
+      } else {
+        migrationresult.setNumberOfQueryTransfered(totalQueries);
+        migrationresult.setTotalNoQuery(totalQueries);
+        migrationresult.setProgressPercentage(100);
+      }
+      getResourceManager(view).update(migrationresult, jobid);
+    } catch (SQLException e) {
+      logger.error("Sql exception in ambari database", e);
+      migrationresult.setError("SQL Exception: " + e.getMessage());
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("rollback done");
+      } catch (SQLException e1) {
+        logger.error("Sql exception while doing roll back", e1);
+      }
+    } catch (ClassNotFoundException e2) {
+      logger.error("class not found exception", e2);
+      migrationresult.setError("Class Not Found Exception: " + e2.getMessage());
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: ", e);
+      migrationresult.setError("Property Veto Exception: " + e.getMessage());
+    } catch (URISyntaxException e) {
+      e.printStackTrace();
+      migrationresult.setError("URI Syntax Exception: " + e.getMessage());
+    } catch (Exception e) {
+      logger.error("Generic Exception: ", e);
+      migrationresult.setError("Exception: " + e.getMessage());
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
         } catch (SQLException e) {
-            logger.error("Sql exception in ambari database", e);
-            try {
-                connectionAmbaridb.rollback();
-                logger.info("rollback done");
-            } catch (SQLException e1) {
-                logger.error("Sql exception while doing roll back", e);
-            }
-        } catch (ClassNotFoundException e2) {
-            logger.error("class not found exception", e2);
-
-        } catch (PropertyVetoException e) {
-            logger.error("PropertyVetoException: ", e);
-        } catch (URISyntaxException e) {
-            e.printStackTrace();
-        } finally {
-            if (null != connectionAmbaridb)
-                try {
-                    connectionAmbaridb.close();
-                } catch (SQLException e) {
-                    logger.error("connection close exception: ", e);
-                }
+          logger.error("connection close exception: ", e);
+          migrationresult.setError("Error in closing connection: " + e.getMessage());
         }
+      getResourceManager(view).update(migrationresult, jobid);
+    }
 
-        long stopTime = System.currentTimeMillis();
-        long elapsedTime = stopTime - startTime;
+    long stopTime = System.currentTimeMillis();
+    long elapsedTime = stopTime - startTime;
 
 
-        migrationresult.setJobtype("hivehistoryquerymigration");
-        migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-        getResourceManager(view).update(migrationresult, jobid);
+    migrationresult.setJobtype("pigudfmigration");
+    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
+    getResourceManager(view).update(migrationresult, jobid);
 
 
-        logger.info("----------------------------------");
-        logger.info("pig udf Migration ends");
-        logger.info("----------------------------------");
+    logger.info("----------------------------------");
+    logger.info("pig udf Migration ends");
+    logger.info("----------------------------------");
 
-    }
+  }
 
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
index 3581149..612f73b 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
@@ -45,6 +45,7 @@ public class MigrationResponse implements Serializable,PersonalResource{
   private String jobtype="";
   private String isNoQuerySelected="";
   private int flag;
+  private String error;
 
   public String getTotalTimeTaken() {
     return totalTimeTaken;
@@ -144,4 +145,9 @@ public class MigrationResponse implements Serializable,PersonalResource{
   public int getFlag() { return flag; }
 
   public void setFlag(int flag) { this.flag = flag; }
+
+  public String getError() { return error; }
+
+  public void setError(String error) { this.error = error; }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
index a65e367..877be99 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
@@ -27,5 +27,6 @@ export default Model.extend({
   totalTimeTaken: DS.attr('string'),
   jobtype: DS.attr('string'),
   isNoQuerySelected: DS.attr('string'),
-  flag: DS.attr('string')
+  flag: DS.attr('string'),
+  error: DS.attr('string')
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
index 52716f2..746049d 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
@@ -97,10 +97,15 @@ export default Ember.Route.extend({
         var progressPercentage = progress.get('progressPercentage');
         var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
         var flagForCompletion = parseInt(progress.get('flag'));
+        var error = progress.get('error');
         console.log("the progress percentage is="+progressPercentage);
         console.log("flag status is "+flagForCompletion);
+        console.log("error is "+error);
+        if(error) {
+          control.set('error', error);
+          control.set('jobstatus', null);
 
-        if (flagForCompletion === 1) {
+        } else if (flagForCompletion === 1) {
           var totalNoQuery = progress.get('totalNoQuery');
           var intanceName = progress.get('intanceName');
           var userNameofhue = progress.get('userNameofhue');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
index ed90469..43653e7 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
@@ -92,9 +92,15 @@ export default Ember.Route.extend({
         var progressPercentage = progress.get('progressPercentage');
         var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
         var flagForCompletion = parseInt(progress.get('flag'));
+        var error = progress.get('error');
         console.log("the progress percentage is="+progressPercentage);
+        console.log("flag status is "+flagForCompletion);
+        console.log("error is "+error);
+        if(error) {
+          control.set('error', error);
+          control.set('jobstatus', null);
 
-        if (flagForCompletion === 1) {
+        } else if (flagForCompletion === 1) {
           var totalNoQuery = progress.get('totalNoQuery');
           var intanceName = progress.get('intanceName');
           var userNameofhue = progress.get('userNameofhue');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
index 225f7ee..a5409da 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
@@ -94,10 +94,15 @@ export default Ember.Route.extend({
         var progressPercentage = progress.get('progressPercentage');
         var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
         var flagForCompletion = parseInt(progress.get('flag'));
+        var error = progress.get('error');
         console.log("the progress percentage is="+progressPercentage);
         console.log("flag completion status is "+flagForCompletion);
+        console.log("error is "+error);
+        if(error) {
+          control.set('error', error);
+          control.set('jobstatus', null);
 
-        if (flagForCompletion === 1) {
+        } else if (flagForCompletion === 1) {
           var totalNoQuery = progress.get('totalNoQuery');
           var intanceName = progress.get('intanceName');
           var userNameofhue = progress.get('userNameofhue');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
index cc6c631..b1fa481 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
@@ -94,9 +94,15 @@ export default Ember.Route.extend({
         var progressPercentage = progress.get('progressPercentage');
         var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
         var flagForCompletion = parseInt(progress.get('flag'));
+        var error = progress.get('error');
         console.log("the progress percentage is="+progressPercentage);
+        console.log("flag status is "+flagForCompletion);
+        console.log("error is "+error);
+        if(error) {
+          control.set('error', error);
+          control.set('jobstatus', null);
 
-        if (flagForCompletion === 1) {
+        } else if (flagForCompletion === 1) {
           var totalNoQuery = progress.get('totalNoQuery');
           var intanceName = progress.get('intanceName');
           var userNameofhue = progress.get('userNameofhue');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-udf.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-udf.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-udf.js
index 83e316f..0894702 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-udf.js
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-udf.js
@@ -90,9 +90,15 @@ export default Ember.Route.extend({
         var progressPercentage = progress.get('progressPercentage');
         var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
         var flagForCompletion = parseInt(progress.get('flag'));
+        var error = progress.get('error');
         console.log("the progress percentage is="+progressPercentage);
+        console.log("flag status is "+flagForCompletion);
+        console.log("error is "+error);
+        if(error) {
+          control.set('error', error);
+          control.set('jobstatus', null);
 
-        if (flagForCompletion === 1) {
+        } else if (flagForCompletion === 1) {
           var totalNoQuery = progress.get('totalNoQuery');
           var intanceName = progress.get('intanceName');
           var userNameofhue = progress.get('userNameofhue');

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs
index 7a0fd6c..4b63d68 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs
@@ -86,6 +86,14 @@ limitations under the License.
     </div>
     <div class="row">
       <div class="col-sm-9">
+        {{#if error}}
+          <h3>Error Occured during migration</h3>
+          <p><span class="alert-danger">{{error}}</span></p>
+        {{/if}}
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-sm-9">
         {{#if completionStatus}}
           <h2>Migration Report
           </h2>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs
index 3b285e7..28963aa 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs
@@ -86,6 +86,14 @@ limitations under the License.
     </div>
     <div class="row">
       <div class="col-sm-9">
+        {{#if error}}
+          <h3>Error Occured during migration</h3>
+          <p><span class="alert-danger">{{error}}</span></p>
+        {{/if}}
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-sm-9">
         {{#if completionStatus}}
           <h2>Migration Report
           </h2>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs
index b273995..1f5cce0 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs
@@ -87,6 +87,14 @@ limitations under the License.
     </div>
     <div class="row">
       <div class="col-sm-9">
+        {{#if error}}
+          <h3>Error Occured during migration</h3>
+          <p><span class="alert-danger">{{error}}</span></p>
+        {{/if}}
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-sm-9">
         {{#if completionStatus}}
           <h2>Migration Report
           </h2>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4aa0f623/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs
index d88ab21..113ad6e 100644
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs
@@ -87,6 +87,14 @@ limitations under the License.
     </div>
     <div class="row">
       <div class="col-sm-9">
+        {{#if error}}
+          <h3>Error Occured during migration</h3>
+          <p><span class="alert-danger">{{error}}</span></p>
+        {{/if}}
+      </div>
+    </div>
+    <div class="row">
+      <div class="col-sm-9">
         {{#if completionStatus}}
           <h2>Migration Report
           </h2>